content_type stringclasses 8
values | main_lang stringclasses 7
values | message stringlengths 1 50 | sha stringlengths 40 40 | patch stringlengths 52 962k | file_count int64 1 300 |
|---|---|---|---|---|---|
Javascript | Javascript | enable strict mode for vm tests | 15f13cd74a8bb9e62b871076328447b4ead7e8b6 | <ide><path>test/parallel/test-vm-new-script-new-context.js
<del>/* eslint-disable strict */
<del>var common = require('../common');
<del>var assert = require('assert');
<del>var Script = require('vm').Script;
<add>'use strict';
<add>const common = require('../common');
<add>const assert = require('assert');
<add>const Script = require('vm').Script;
<ide>
<ide> common.globalCheck = false;
<ide>
<ide> console.error('run a string');
<ide> var script = new Script('\'passed\';');
<ide> console.error('script created');
<del>var result1 = script.runInNewContext();
<del>var result2 = script.runInNewContext();
<add>const result1 = script.runInNewContext();
<add>const result2 = script.runInNewContext();
<ide> assert.equal('passed', result1);
<ide> assert.equal('passed', result2);
<ide>
<ide> assert.throws(function() {
<ide> }, /not defined/);
<ide>
<ide>
<del>hello = 5;
<add>global.hello = 5;
<ide> script = new Script('hello = 2');
<ide> script.runInNewContext();
<del>assert.equal(5, hello);
<add>assert.equal(5, global.hello);
<ide>
<ide>
<ide> console.error('pass values in and out');
<del>code = 'foo = 1;' +
<del> 'bar = 2;' +
<del> 'if (baz !== 3) throw new Error(\'test fail\');';
<del>foo = 2;
<del>obj = { foo: 0, baz: 3 };
<del>script = new Script(code);
<add>global.code = 'foo = 1;' +
<add> 'bar = 2;' +
<add> 'if (baz !== 3) throw new Error(\'test fail\');';
<add>global.foo = 2;
<add>global.obj = { foo: 0, baz: 3 };
<add>script = new Script(global.code);
<ide> /* eslint-disable no-unused-vars */
<del>var baz = script.runInNewContext(obj);
<add>var baz = script.runInNewContext(global.obj);
<ide> /* eslint-enable no-unused-vars */
<del>assert.equal(1, obj.foo);
<del>assert.equal(2, obj.bar);
<del>assert.equal(2, foo);
<add>assert.equal(1, global.obj.foo);
<add>assert.equal(2, global.obj.bar);
<add>assert.equal(2, global.foo);
<ide>
<ide> console.error('call a function by reference');
<ide> script = new Script('f()');
<del>function changeFoo() { foo = 100; }
<add>function changeFoo() { global.foo = 100; }
<ide> script.runInNewContext({ f: changeFoo });
<del>assert.equal(foo, 100);
<add>assert.equal(global.foo, 100);
<ide>
<ide> console.error('modify an object by reference');
<ide> script = new Script('f.a = 2');
<ide><path>test/parallel/test-vm-new-script-this-context.js
<del>/* eslint-disable strict */
<del>var common = require('../common');
<del>var assert = require('assert');
<del>var Script = require('vm').Script;
<add>'use strict';
<add>const common = require('../common');
<add>const assert = require('assert');
<add>const Script = require('vm').Script;
<ide>
<ide> common.globalCheck = false;
<ide>
<ide> console.error('run a string');
<ide> var script = new Script('\'passed\';');
<del>var result = script.runInThisContext(script);
<add>const result = script.runInThisContext(script);
<ide> assert.equal('passed', result);
<ide>
<ide> console.error('thrown error');
<ide> assert.throws(function() {
<ide> script.runInThisContext(script);
<ide> });
<ide>
<del>hello = 5;
<add>global.hello = 5;
<ide> script = new Script('hello = 2');
<ide> script.runInThisContext(script);
<del>assert.equal(2, hello);
<add>assert.equal(2, global.hello);
<ide>
<ide>
<ide> console.error('pass values');
<del>code = 'foo = 1;' +
<del> 'bar = 2;' +
<del> 'if (typeof baz !== \'undefined\') throw new Error(\'test fail\');';
<del>foo = 2;
<del>obj = { foo: 0, baz: 3 };
<del>script = new Script(code);
<add>global.code = 'foo = 1;' +
<add> 'bar = 2;' +
<add> 'if (typeof baz !== "undefined") throw new Error("test fail");';
<add>global.foo = 2;
<add>global.obj = { foo: 0, baz: 3 };
<add>script = new Script(global.code);
<ide> script.runInThisContext(script);
<del>assert.equal(0, obj.foo);
<del>assert.equal(2, bar);
<del>assert.equal(1, foo);
<add>assert.equal(0, global.obj.foo);
<add>assert.equal(2, global.bar);
<add>assert.equal(1, global.foo);
<ide>
<ide> console.error('call a function');
<del>f = function() { foo = 100; };
<add>global.f = function() { global.foo = 100; };
<ide> script = new Script('f()');
<ide> script.runInThisContext(script);
<del>assert.equal(100, foo);
<add>assert.equal(100, global.foo);
<ide><path>test/parallel/test-vm-run-in-new-context.js
<del>/* eslint-disable strict */
<add>'use strict';
<ide> // Flags: --expose-gc
<ide>
<del>var common = require('../common');
<del>var assert = require('assert');
<del>var vm = require('vm');
<add>const common = require('../common');
<add>const assert = require('assert');
<add>const vm = require('vm');
<ide>
<ide> assert.equal(typeof gc, 'function', 'Run this test with --expose-gc');
<ide>
<ide> common.globalCheck = false;
<ide>
<ide> console.error('run a string');
<del>var result = vm.runInNewContext('\'passed\';');
<add>const result = vm.runInNewContext('\'passed\';');
<ide> assert.equal('passed', result);
<ide>
<ide> console.error('thrown error');
<ide> assert.throws(function() {
<ide> vm.runInNewContext('throw new Error(\'test\');');
<ide> });
<ide>
<del>hello = 5;
<add>global.hello = 5;
<ide> vm.runInNewContext('hello = 2');
<del>assert.equal(5, hello);
<add>assert.equal(5, global.hello);
<ide>
<ide>
<ide> console.error('pass values in and out');
<del>code = 'foo = 1;' +
<del> 'bar = 2;' +
<del> 'if (baz !== 3) throw new Error(\'test fail\');';
<del>foo = 2;
<del>obj = { foo: 0, baz: 3 };
<add>global.code = 'foo = 1;' +
<add> 'bar = 2;' +
<add> 'if (baz !== 3) throw new Error(\'test fail\');';
<add>global.foo = 2;
<add>global.obj = { foo: 0, baz: 3 };
<ide> /* eslint-disable no-unused-vars */
<del>var baz = vm.runInNewContext(code, obj);
<add>var baz = vm.runInNewContext(global.code, global.obj);
<ide> /* eslint-enable no-unused-vars */
<del>assert.equal(1, obj.foo);
<del>assert.equal(2, obj.bar);
<del>assert.equal(2, foo);
<add>assert.equal(1, global.obj.foo);
<add>assert.equal(2, global.obj.bar);
<add>assert.equal(2, global.foo);
<ide>
<ide> console.error('call a function by reference');
<del>function changeFoo() { foo = 100; }
<add>function changeFoo() { global.foo = 100; }
<ide> vm.runInNewContext('f()', { f: changeFoo });
<del>assert.equal(foo, 100);
<add>assert.equal(global.foo, 100);
<ide>
<ide> console.error('modify an object by reference');
<ide> var f = { a: 1 }; | 3 |
Java | Java | add logging of statistics of fabric commits | 08aab2b4e1cfc0202dca544d22bc2cde82be7b39 | <ide><path>ReactAndroid/src/main/java/com/facebook/react/fabric/FabricUIManager.java
<ide> private void scheduleMountItem(
<ide> // When Binding.cpp calls scheduleMountItems during a commit phase, it always calls with
<ide> // a BatchMountItem. No other sites call into this with a BatchMountItem, and Binding.cpp only
<ide> // calls scheduleMountItems with a BatchMountItem.
<add> long scheduleMountItemStartTime = SystemClock.uptimeMillis();
<ide> boolean isBatchMountItem = mountItem instanceof IntBufferBatchMountItem;
<ide> boolean shouldSchedule =
<ide> (isBatchMountItem && ((IntBufferBatchMountItem) mountItem).shouldSchedule())
<ide> private void scheduleMountItem(
<ide> mCommitStartTime = commitStartTime;
<ide> mLayoutTime = layoutEndTime - layoutStartTime;
<ide> mFinishTransactionCPPTime = finishTransactionEndTime - finishTransactionStartTime;
<del> mFinishTransactionTime = SystemClock.uptimeMillis() - finishTransactionStartTime;
<add> mFinishTransactionTime = scheduleMountItemStartTime - finishTransactionStartTime;
<ide> mDispatchViewUpdatesTime = SystemClock.uptimeMillis();
<ide> }
<ide>
<ide> private void scheduleMountItem(
<ide> ReactMarker.logFabricMarker(
<ide> ReactMarkerConstants.FABRIC_LAYOUT_END, null, commitNumber, layoutEndTime);
<ide> ReactMarker.logFabricMarker(ReactMarkerConstants.FABRIC_COMMIT_END, null, commitNumber);
<add>
<add> if (ENABLE_FABRIC_LOGS) {
<add> FLog.e(
<add> TAG,
<add> "Statistic of Fabric commit #: "
<add> + commitNumber
<add> + "\n - Total commit time: "
<add> + (finishTransactionEndTime - commitStartTime)
<add> + " ms.\n - Layout: "
<add> + mLayoutTime
<add> + " ms.\n - Diffing: "
<add> + (diffEndTime - diffStartTime)
<add> + " ms.\n"
<add> + " - FinishTransaction (Diffing + Processing + Serialization of MountingInstructions): "
<add> + mFinishTransactionCPPTime
<add> + " ms.");
<add> }
<ide> }
<ide> }
<ide> | 1 |
Javascript | Javascript | put mask data to the canvas in small slices | 1ec3c341cc4416a35714ccecfb9e9e6a4bc1d283 | <ide><path>src/display/canvas.js
<ide> var CanvasGraphics = (function CanvasGraphicsClosure() {
<ide> }
<ide>
<ide> function putBinaryImageMask(ctx, imgData) {
<del> var width = imgData.width, height = imgData.height;
<del> var tmpImgData = ctx.createImageData(width, height);
<del> var data = imgData.data;
<del> var tmpImgDataPixels = tmpImgData.data;
<del> var dataPos = 0;
<del>
<del> // Expand the mask so it can be used by the canvas. Any required inversion
<del> // has already been handled.
<del> var tmpPos = 3; // alpha component offset
<del> for (var i = 0; i < height; i++) {
<del> var mask = 0;
<del> for (var j = 0; j < width; j++) {
<del> if (!mask) {
<del> var elem = data[dataPos++];
<del> mask = 128;
<del> }
<del> if (!(elem & mask)) {
<del> tmpImgDataPixels[tmpPos] = 255;
<add> var height = imgData.height, width = imgData.width;
<add> var fullChunkHeight = 16;
<add> var fracChunks = height / fullChunkHeight;
<add> var fullChunks = Math.floor(fracChunks);
<add> var totalChunks = Math.ceil(fracChunks);
<add> var partialChunkHeight = height - fullChunks * fullChunkHeight;
<add>
<add> var chunkImgData = ctx.createImageData(width, fullChunkHeight);
<add> var srcPos = 0;
<add> var src = imgData.data;
<add> var dest = chunkImgData.data;
<add>
<add> for (var i = 0; i < totalChunks; i++) {
<add> var thisChunkHeight =
<add> (i < fullChunks) ? fullChunkHeight : partialChunkHeight;
<add>
<add> // Expand the mask so it can be used by the canvas. Any required
<add> // inversion has already been handled.
<add> var destPos = 3; // alpha component offset
<add> for (var j = 0; j < thisChunkHeight; j++) {
<add> var mask = 0;
<add> for (var k = 0; k < width; k++) {
<add> if (!mask) {
<add> var elem = src[srcPos++];
<add> mask = 128;
<add> }
<add> dest[destPos] = (elem & mask) ? 0 : 255;
<add> destPos += 4;
<add> mask >>= 1;
<ide> }
<del> tmpPos += 4;
<del> mask >>= 1;
<ide> }
<add> ctx.putImageData(chunkImgData, 0, i * fullChunkHeight);
<ide> }
<del>
<del> ctx.putImageData(tmpImgData, 0, 0);
<ide> }
<ide>
<ide> function copyCtxState(sourceCtx, destCtx) { | 1 |
Text | Text | add v3.27.0 to changelog | 3afd20cb64c334d86c657d021fdd52de1fb0c1d6 | <ide><path>CHANGELOG.md
<ide> # Ember Changelog
<ide>
<del>### v3.27.0-beta.4 (April 21, 2021)
<del>
<del>- [#19499](https://github.com/emberjs/ember.js/pull/19499) [BUGFIX beta] [DEPRECATION] Deprecate `@foo={{helper}}`
<del>- [#19499](https://github.com/emberjs/ember.js/pull/19499) [BUGFIX beta] `<:else>` and `<:inverse>` should be aliases (see )
<del>- [#19499](https://github.com/emberjs/ember.js/pull/19499) [BUGFIX beta] Fix nested calls to helpers in dynamic helpers (see https://github.com/glimmerjs/glimmer-vm/pull/1293)
<del>
<del>### v3.27.0-beta.3 (March 30, 2021)
<del>
<del>- [#19477](https://github.com/emberjs/ember.js/pull/19477) [BUGFIX] Move `LinkTo` assertion into a method so `LinkToExternal` can override it
<del>- [#19481](https://github.com/emberjs/ember.js/pull/19481) [BUGFIX] Export `on` from correct path
<del>- [#19487](https://github.com/emberjs/ember.js/pull/19487) [BUGFIX] Allow passing a string literal to {{helper}} and {{modifier}}
<del>
<del>### v3.27.0-beta.2 (March 25, 2021)
<del>
<del>- [#19473](https://github.com/emberjs/ember.js/pull/19473) Update GlimmerVM to latest (fix compatibility for template import proposals)
<del>- [#19474](https://github.com/emberjs/ember.js/pull/19474) [FEATURE] Enable `(helper` and `(modifier` helpers
<del>
<del>### v3.27.0-beta.1 (March 22, 2021)
<add>### v3.27.0 (May 3, 2021)
<ide>
<add>- [#19309](https://github.com/emberjs/ember.js/pull/19309) / [#19487](https://github.com/emberjs/ember.js/pull/19487) / [#19474](https://github.com/emberjs/ember.js/pull/19474) [FEATURE] Enable `(helper` and `(modifier` helpers per [RFC #432](https://github.com/emberjs/rfcs/blob/master/text/0432-contextual-helpers.md).
<ide> - [#19382](https://github.com/emberjs/ember.js/pull/19382) / [#19430](https://github.com/emberjs/ember.js/pull/19430) [FEATURE] Remaining implementation work per [RFC #671](https://github.com/emberjs/rfcs/blob/master/text/0671-modernize-built-in-components-1.md).
<ide> - [#19457](https://github.com/emberjs/ember.js/pull/19457) / [#19463](https://github.com/emberjs/ember.js/pull/19463) / [#19464](https://github.com/emberjs/ember.js/pull/19464) / [#19467](https://github.com/emberjs/ember.js/pull/19467) [DEPRECATION] Add deprecation for the Ember Global per [RFC #706](https://github.com/emberjs/rfcs/blob/master/text/0706-deprecate-ember-global.md).
<ide> - [#19407](https://github.com/emberjs/ember.js/pull/19407) [DEPRECATION] Add deprecation for `Route#disconnectOutlet` per [RFC #491](https://github.com/emberjs/rfcs/blob/master/text/0491-deprecate-disconnect-outlet.md).
<ide> - [#19433](https://github.com/emberjs/ember.js/pull/19433) [DEPRECATION] Add deprecation for `Route#renderTemplate` per [RFC #418](https://github.com/emberjs/rfcs/blob/master/text/0418-deprecate-route-render-methods.md).
<ide> - [#19442](https://github.com/emberjs/ember.js/pull/19442) [DEPRECATION] Add deprecation for `Route#render` method per [RFC #418](https://github.com/emberjs/rfcs/blob/master/text/0418-deprecate-route-render-methods.md).
<ide> - [#19429](https://github.com/emberjs/ember.js/pull/19429) [DEPRECATION] `registerPlugin` / `unregisterPlugin` and legacy class based AST plugins (private APIs)
<add>- [#19499](https://github.com/emberjs/ember.js/pull/19499) [DEPRECATION] Deprecate `@foo={{helper}}` per [RFC #496](https://github.com/emberjs/rfcs/blob/master/text/0496-handlebars-strict-mode.md#3-no-implicit-invocation-of-argument-less-helpers).
<add>- [#19499](https://github.com/emberjs/ember.js/pull/19499) [BUGFIX] Update rendering engine to `@glimmer/*` 0.78.2 for fixes including:
<add> - `<:else>` and `<:inverse>` should be aliases (see https://github.com/glimmerjs/glimmer-vm/pull/1296)
<add> - Fix nested calls to helpers in dynamic helpers (see https://github.com/glimmerjs/glimmer-vm/pull/1293)
<add>- [#19477](https://github.com/emberjs/ember.js/pull/19477) [BUGFIX] Allow `<LinkToExternal />` to override internal assertion
<add>- [#19481](https://github.com/emberjs/ember.js/pull/19481) [BUGFIX] Export `on` from correct path
<ide> - [#19466](https://github.com/emberjs/ember.js/pull/19466) [BUGFIX] Rename private runloop functions
<ide> - [#19384](https://github.com/emberjs/ember.js/pull/19384) Use qunit-dom in helper and component test blueprints
<ide> - [#19390](https://github.com/emberjs/ember.js/pull/19390) Refactor the internal Ember loader to use the standard Ember CLI loader | 1 |
Text | Text | add reference to deployment wiki page | 9acc7cfc68186779ef485689c3602a00d1ffb064 | <ide><path>readme.md
<ide> componentWillReceiveProps(nextProps) {
<ide> ```
<ide>
<ide> > NOTES:
<del>>
<add>>
<ide> > Shallow routing works **only** for same page URL changes. For an example, let's assume we've another page called `about`, and you run this:
<ide> > ```js
<ide> > Router.push('/about?counter=10', '/about?counter=10', { shallow: true })
<ide> Here's an example `.babelrc` file:
<ide>
<ide> ## Production deployment
<ide>
<del>To deploy, instead of running `next`, you probably want to build ahead of time. Therefore, building and starting are separate commands:
<add>To deploy, instead of running `next`, you want to build for production usage ahead of time. Therefore, building and starting are separate commands:
<ide>
<ide> ```bash
<ide> next build
<ide> For example, to deploy with [`now`](https://zeit.co/now) a `package.json` like f
<ide>
<ide> Then run `now` and enjoy!
<ide>
<add>Next.js can be deployed to other hosting solutions too. Please have a look at the ['Deployment']('https://github.com/zeit/next.js/wiki/Deployment') section of the wiki.
<add>
<ide> Note: we recommend putting `.next` in `.npmignore` or `.gitignore`. Otherwise, use `files` or `now.files` to opt-into a whitelist of files you want to deploy (and obviously exclude `.next`)
<ide>
<ide> ## FAQ | 1 |
Java | Java | introduce "aware" superinterface | 5e6912302af99d78d05aff7080ed9b8bb33df913 | <ide><path>org.springframework.beans/src/main/java/org/springframework/beans/factory/Aware.java
<add>/*
<add> * Copyright 2002-2011 the original author or authors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.springframework.beans.factory;
<add>
<add>/**
<add> * Marker superinterface indicating that a bean is eligible to be
<add> * notified by the Spring container of a particular framework object
<add> * through a callback-style method. Actual method signature is
<add> * determined by individual subinterfaces, but should typically
<add> * consist of just one void-returning method that accepts a single
<add> * argument.
<add> *
<add> * <p>Note that merely implementing {@link Aware} provides no default
<add> * functionality. Rather, processing must be done explicitly, for example
<add> * in a {@link org.springframework.beans.factory.config.BeanPostProcessor BeanPostProcessor}.
<add> * Refer to {@link org.springframework.context.support.ApplicationContextAwareProcessor}
<add> * and {@link org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory}
<add> * for examples of processing {@code *Aware} interface callbacks.
<add> *
<add> * @author Chris Beams
<add> * @since 3.1
<add> */
<add>public interface Aware {
<add>
<add>}
<ide><path>org.springframework.beans/src/main/java/org/springframework/beans/factory/BeanClassLoaderAware.java
<ide> /*
<del> * Copyright 2002-2006 the original author or authors.
<add> * Copyright 2002-2011 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> * {@link BeanFactory BeanFactory javadocs}.
<ide> *
<ide> * @author Juergen Hoeller
<add> * @author Chris Beams
<ide> * @since 2.0
<ide> * @see BeanNameAware
<ide> * @see BeanFactoryAware
<ide> * @see InitializingBean
<ide> */
<del>public interface BeanClassLoaderAware {
<add>public interface BeanClassLoaderAware extends Aware {
<ide>
<ide> /**
<ide> * Callback that supplies the bean {@link ClassLoader class loader} to
<ide><path>org.springframework.beans/src/main/java/org/springframework/beans/factory/BeanFactoryAware.java
<ide> /*
<del> * Copyright 2002-2007 the original author or authors.
<add> * Copyright 2002-2010 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> * {@link BeanFactory BeanFactory javadocs}.
<ide> *
<ide> * @author Rod Johnson
<add> * @author Chris Beams
<ide> * @since 11.03.2003
<ide> * @see BeanNameAware
<ide> * @see BeanClassLoaderAware
<ide> * @see InitializingBean
<ide> * @see org.springframework.context.ApplicationContextAware
<ide> */
<del>public interface BeanFactoryAware {
<add>public interface BeanFactoryAware extends Aware {
<ide>
<ide> /**
<ide> * Callback that supplies the owning factory to a bean instance.
<ide><path>org.springframework.beans/src/main/java/org/springframework/beans/factory/BeanNameAware.java
<ide> /*
<del> * Copyright 2002-2007 the original author or authors.
<add> * Copyright 2002-2011 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> * {@link BeanFactory BeanFactory javadocs}.
<ide> *
<ide> * @author Juergen Hoeller
<add> * @author Chris Beams
<ide> * @since 01.11.2003
<ide> * @see BeanClassLoaderAware
<ide> * @see BeanFactoryAware
<ide> * @see InitializingBean
<ide> */
<del>public interface BeanNameAware {
<add>public interface BeanNameAware extends Aware {
<ide>
<ide> /**
<ide> * Set the name of the bean in the bean factory that created this bean.
<ide><path>org.springframework.beans/src/main/java/org/springframework/beans/factory/support/AbstractAutowireCapableBeanFactory.java
<ide> import org.springframework.beans.PropertyValue;
<ide> import org.springframework.beans.PropertyValues;
<ide> import org.springframework.beans.TypeConverter;
<add>import org.springframework.beans.factory.Aware;
<ide> import org.springframework.beans.factory.BeanClassLoaderAware;
<ide> import org.springframework.beans.factory.BeanCreationException;
<ide> import org.springframework.beans.factory.BeanCurrentlyInCreationException;
<ide> public Object run() {
<ide> }
<ide> return wrappedBean;
<ide> }
<del>
<add>
<ide> private void invokeAwareMethods(final String beanName, final Object bean) {
<del> if (bean instanceof BeanNameAware) {
<del> ((BeanNameAware) bean).setBeanName(beanName);
<del> }
<del> if (bean instanceof BeanClassLoaderAware) {
<del> ((BeanClassLoaderAware) bean).setBeanClassLoader(getBeanClassLoader());
<del> }
<del> if (bean instanceof BeanFactoryAware) {
<del> ((BeanFactoryAware) bean).setBeanFactory(AbstractAutowireCapableBeanFactory.this);
<add> if (bean instanceof Aware) {
<add> if (bean instanceof BeanNameAware) {
<add> ((BeanNameAware) bean).setBeanName(beanName);
<add> }
<add> if (bean instanceof BeanClassLoaderAware) {
<add> ((BeanClassLoaderAware) bean).setBeanClassLoader(getBeanClassLoader());
<add> }
<add> if (bean instanceof BeanFactoryAware) {
<add> ((BeanFactoryAware) bean).setBeanFactory(AbstractAutowireCapableBeanFactory.this);
<add> }
<ide> }
<ide> }
<ide>
<ide><path>org.springframework.beans/src/main/java/org/springframework/beans/factory/support/DefaultListableBeanFactory.java
<ide> public void setAllowEagerClassLoading(boolean allowEagerClassLoading) {
<ide> */
<ide> public void setAutowireCandidateResolver(final AutowireCandidateResolver autowireCandidateResolver) {
<ide> Assert.notNull(autowireCandidateResolver, "AutowireCandidateResolver must not be null");
<del> // TODO SPR-7515: should also do EnvironmentAware injection here?
<ide> if (autowireCandidateResolver instanceof BeanFactoryAware) {
<ide> if (System.getSecurityManager() != null) {
<ide> final BeanFactory target = this;
<ide><path>org.springframework.context.support/src/main/java/org/springframework/scheduling/quartz/SchedulerContextAware.java
<ide> /*
<del> * Copyright 2002-2006 the original author or authors.
<add> * Copyright 2002-2011 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> package org.springframework.scheduling.quartz;
<ide>
<ide> import org.quartz.SchedulerContext;
<add>import org.springframework.beans.factory.Aware;
<ide>
<ide> /**
<ide> * Callback interface to be implemented by Spring-managed
<ide> * that are passed in via Spring's SchedulerFactoryBean.
<ide> *
<ide> * @author Juergen Hoeller
<add> * @author Chris Beams
<ide> * @since 2.0
<ide> * @see org.quartz.spi.JobFactory
<ide> * @see SchedulerFactoryBean#setJobFactory
<ide> */
<del>public interface SchedulerContextAware {
<add>public interface SchedulerContextAware extends Aware {
<ide>
<ide> /**
<ide> * Set the SchedulerContext of the current Quartz Scheduler.
<ide><path>org.springframework.context/src/main/java/org/springframework/context/ApplicationContextAware.java
<ide> /*
<del> * Copyright 2002-2007 the original author or authors.
<add> * Copyright 2002-2011 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> package org.springframework.context;
<ide>
<ide> import org.springframework.beans.BeansException;
<add>import org.springframework.beans.factory.Aware;
<ide>
<ide> /**
<ide> * Interface to be implemented by any object that wishes to be notified
<ide> *
<ide> * @author Rod Johnson
<ide> * @author Juergen Hoeller
<add> * @author Chris Beams
<ide> * @see ResourceLoaderAware
<ide> * @see ApplicationEventPublisherAware
<ide> * @see MessageSourceAware
<ide> * @see org.springframework.context.support.ApplicationObjectSupport
<ide> * @see org.springframework.beans.factory.BeanFactoryAware
<ide> */
<del>public interface ApplicationContextAware {
<add>public interface ApplicationContextAware extends Aware {
<ide>
<ide> /**
<ide> * Set the ApplicationContext that this object runs in.
<ide><path>org.springframework.context/src/main/java/org/springframework/context/ApplicationEventPublisherAware.java
<ide> /*
<del> * Copyright 2002-2005 the original author or authors.
<add> * Copyright 2002-2011 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide>
<ide> package org.springframework.context;
<ide>
<add>import org.springframework.beans.factory.Aware;
<add>
<ide> /**
<ide> * Interface to be implemented by any object that wishes to be notified
<ide> * of the ApplicationEventPublisher (typically the ApplicationContext)
<ide> * that it runs in.
<ide> *
<ide> * @author Juergen Hoeller
<add> * @author Chris Beams
<ide> * @since 1.1.1
<ide> * @see ApplicationContextAware
<ide> */
<del>public interface ApplicationEventPublisherAware {
<add>public interface ApplicationEventPublisherAware extends Aware {
<ide>
<ide> /**
<ide> * Set the ApplicationEventPublisher that this object runs in.
<ide><path>org.springframework.context/src/main/java/org/springframework/context/EmbeddedValueResolverAware.java
<ide> /*
<del> * Copyright 2002-2010 the original author or authors.
<add> * Copyright 2002-2011 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide>
<ide> package org.springframework.context;
<ide>
<add>import org.springframework.beans.factory.Aware;
<ide> import org.springframework.util.StringValueResolver;
<ide>
<ide> /**
<ide> * ApplicationContextAware/BeanFactoryAware interfaces.
<ide> *
<ide> * @author Juergen Hoeller
<add> * @author Chris Beams
<ide> * @since 3.0.3
<ide> * @see org.springframework.beans.factory.config.ConfigurableBeanFactory#resolveEmbeddedValue
<ide> */
<del>public interface EmbeddedValueResolverAware {
<add>public interface EmbeddedValueResolverAware extends Aware {
<ide>
<ide> /**
<ide> * Set the StringValueResolver to use for resolving embedded definition values.
<ide><path>org.springframework.context/src/main/java/org/springframework/context/EnvironmentAware.java
<ide> /*
<del> * Copyright 2002-2010 the original author or authors.
<add> * Copyright 2002-2011 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide>
<ide> package org.springframework.context;
<ide>
<add>import org.springframework.beans.factory.Aware;
<ide> import org.springframework.core.env.Environment;
<ide>
<ide> /**
<ide> * @author Chris Beams
<ide> * @since 3.1
<ide> */
<del>public interface EnvironmentAware {
<add>public interface EnvironmentAware extends Aware {
<ide>
<ide> /**
<ide> * Set the {@code Environment} that this object runs in.
<ide><path>org.springframework.context/src/main/java/org/springframework/context/MessageSourceAware.java
<ide> /*
<del> * Copyright 2002-2005 the original author or authors.
<add> * Copyright 2002-2011 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide>
<ide> package org.springframework.context;
<ide>
<add>import org.springframework.beans.factory.Aware;
<add>
<ide> /**
<ide> * Interface to be implemented by any object that wishes to be notified
<ide> * of the MessageSource (typically the ApplicationContext) that it runs in.
<ide> * it is defined as bean with name "messageSource" in the application context.
<ide> *
<ide> * @author Juergen Hoeller
<add> * @author Chris Beams
<ide> * @since 1.1.1
<ide> * @see ApplicationContextAware
<ide> */
<del>public interface MessageSourceAware {
<add>public interface MessageSourceAware extends Aware {
<ide>
<ide> /**
<ide> * Set the MessageSource that this object runs in.
<ide><path>org.springframework.context/src/main/java/org/springframework/context/ResourceLoaderAware.java
<ide> /*
<del> * Copyright 2002-2006 the original author or authors.
<add> * Copyright 2002-2011 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide>
<ide> package org.springframework.context;
<ide>
<add>import org.springframework.beans.factory.Aware;
<ide> import org.springframework.core.io.ResourceLoader;
<ide>
<ide> /**
<ide> * automatic type conversion by the bean factory.
<ide> *
<ide> * @author Juergen Hoeller
<add> * @author Chris Beams
<ide> * @since 10.03.2004
<ide> * @see ApplicationContextAware
<ide> * @see org.springframework.beans.factory.InitializingBean
<ide> * @see org.springframework.core.io.support.PathMatchingResourcePatternResolver
<ide> * @see org.springframework.context.support.ReloadableResourceBundleMessageSource
<ide> */
<del>public interface ResourceLoaderAware {
<add>public interface ResourceLoaderAware extends Aware {
<ide>
<ide> /**
<ide> * Set the ResourceLoader that this object runs in.
<ide><path>org.springframework.context/src/main/java/org/springframework/context/support/ApplicationContextAwareProcessor.java
<ide> import java.security.PrivilegedAction;
<ide>
<ide> import org.springframework.beans.BeansException;
<add>import org.springframework.beans.factory.Aware;
<ide> import org.springframework.beans.factory.config.BeanPostProcessor;
<ide> import org.springframework.beans.factory.config.ConfigurableBeanFactory;
<ide> import org.springframework.context.ApplicationContextAware;
<ide> public Object run() {
<ide> }
<ide>
<ide> private void invokeAwareInterfaces(Object bean) {
<del> if (bean instanceof EmbeddedValueResolverAware) {
<del> ((EmbeddedValueResolverAware) bean).setEmbeddedValueResolver(
<del> new EmbeddedValueResolver(this.applicationContext.getBeanFactory()));
<del> }
<del> if (bean instanceof ResourceLoaderAware) {
<del> ((ResourceLoaderAware) bean).setResourceLoader(this.applicationContext);
<del> }
<del> if (bean instanceof ApplicationEventPublisherAware) {
<del> ((ApplicationEventPublisherAware) bean).setApplicationEventPublisher(this.applicationContext);
<del> }
<del> if (bean instanceof MessageSourceAware) {
<del> ((MessageSourceAware) bean).setMessageSource(this.applicationContext);
<del> }
<del> if (bean instanceof ApplicationContextAware) {
<del> ((ApplicationContextAware) bean).setApplicationContext(this.applicationContext);
<del> }
<del> if (bean instanceof EnvironmentAware) {
<del> ((EnvironmentAware) bean).setEnvironment(this.applicationContext.getEnvironment());
<add> if (bean instanceof Aware) {
<add> if (bean instanceof EmbeddedValueResolverAware) {
<add> ((EmbeddedValueResolverAware) bean).setEmbeddedValueResolver(
<add> new EmbeddedValueResolver(this.applicationContext.getBeanFactory()));
<add> }
<add> if (bean instanceof ResourceLoaderAware) {
<add> ((ResourceLoaderAware) bean).setResourceLoader(this.applicationContext);
<add> }
<add> if (bean instanceof ApplicationEventPublisherAware) {
<add> ((ApplicationEventPublisherAware) bean).setApplicationEventPublisher(this.applicationContext);
<add> }
<add> if (bean instanceof MessageSourceAware) {
<add> ((MessageSourceAware) bean).setMessageSource(this.applicationContext);
<add> }
<add> if (bean instanceof ApplicationContextAware) {
<add> ((ApplicationContextAware) bean).setApplicationContext(this.applicationContext);
<add> }
<add> if (bean instanceof EnvironmentAware) {
<add> ((EnvironmentAware) bean).setEnvironment(this.applicationContext.getEnvironment());
<add> }
<ide> }
<ide> }
<ide>
<ide><path>org.springframework.context/src/main/java/org/springframework/context/weaving/LoadTimeWeaverAware.java
<ide> /*
<del> * Copyright 2002-2007 the original author or authors.
<add> * Copyright 2002-2011 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide>
<ide> package org.springframework.context.weaving;
<ide>
<add>import org.springframework.beans.factory.Aware;
<ide> import org.springframework.instrument.classloading.LoadTimeWeaver;
<ide>
<ide> /**
<ide> * Interface to be implemented by any object that wishes to be notified
<ide> * of the application context's default {@link LoadTimeWeaver}.
<ide> *
<ide> * @author Juergen Hoeller
<add> * @author Chris Beams
<ide> * @since 2.5
<ide> * @see org.springframework.context.ConfigurableApplicationContext#LOAD_TIME_WEAVER_BEAN_NAME
<ide> */
<del>public interface LoadTimeWeaverAware {
<add>public interface LoadTimeWeaverAware extends Aware {
<ide>
<ide> /**
<ide> * Set the {@link LoadTimeWeaver} of this object's containing
<ide><path>org.springframework.context/src/main/java/org/springframework/jmx/export/notification/NotificationPublisherAware.java
<ide> /*
<del> * Copyright 2002-2007 the original author or authors.
<add> * Copyright 2002-2011 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide>
<ide> package org.springframework.jmx.export.notification;
<ide>
<add>import org.springframework.beans.factory.Aware;
<add>
<ide> /**
<ide> * Interface to be implemented by any Spring-managed resource that is to be
<ide> * registered with an {@link javax.management.MBeanServer} and wishes to send
<ide> * interface (or implementing a full {@link javax.management.modelmbean.ModelMBean}).
<ide> *
<ide> * @author Rob Harrop
<add> * @author Chris Beams
<ide> * @since 2.0
<ide> * @see NotificationPublisher
<ide> */
<del>public interface NotificationPublisherAware {
<add>public interface NotificationPublisherAware extends Aware {
<ide>
<ide> /**
<ide> * Set the {@link NotificationPublisher} instance for the current managed resource instance.
<ide><path>org.springframework.transaction/src/main/java/org/springframework/jca/context/BootstrapContextAware.java
<ide> /*
<del> * Copyright 2002-2007 the original author or authors.
<add> * Copyright 2002-2011 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide>
<ide> import javax.resource.spi.BootstrapContext;
<ide>
<add>import org.springframework.beans.factory.Aware;
<add>
<ide> /**
<ide> * Interface to be implemented by any object that wishes to be
<ide> * notified of the BootstrapContext (typically determined by the
<ide> * {@link ResourceAdapterApplicationContext}) that it runs in.
<ide> *
<ide> * @author Juergen Hoeller
<add> * @author Chris Beams
<ide> * @since 2.5
<ide> * @see javax.resource.spi.BootstrapContext
<ide> */
<del>public interface BootstrapContextAware {
<add>public interface BootstrapContextAware extends Aware {
<ide>
<ide> /**
<ide> * Set the BootstrapContext that this object runs in.
<ide><path>org.springframework.web.portlet/src/main/java/org/springframework/web/portlet/context/PortletConfigAware.java
<ide> /*
<del> * Copyright 2002-2005 the original author or authors.
<add> * Copyright 2002-2011 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide>
<ide> import javax.portlet.PortletConfig;
<ide>
<add>import org.springframework.beans.factory.Aware;
<add>
<ide> /**
<ide> * Interface to be implemented by any object that wishes to be notified
<ide> * of the PortletConfig (typically determined by the PortletApplicationContext)
<ide> * that it runs in.
<ide> *
<ide> * @author Juergen Hoeller
<add> * @author Chris Beams
<ide> * @since 2.0
<ide> * @see PortletContextAware
<ide> */
<del>public interface PortletConfigAware {
<add>public interface PortletConfigAware extends Aware {
<ide>
<ide> /**
<ide> * Set the PortletConfigthat this object runs in.
<ide><path>org.springframework.web.portlet/src/main/java/org/springframework/web/portlet/context/PortletContextAware.java
<ide> /*
<del> * Copyright 2002-2005 the original author or authors.
<add> * Copyright 2002-2011 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide>
<ide> import javax.portlet.PortletContext;
<ide>
<add>import org.springframework.beans.factory.Aware;
<add>
<ide> /**
<ide> * Interface to be implemented by any object that wishes to be notified
<ide> * of the PortletContext (typically determined by the PortletApplicationContext)
<ide> * that it runs in.
<ide> *
<ide> * @author Juergen Hoeller
<ide> * @author William G. Thompson, Jr.
<add> * @author Chris Beams
<ide> * @since 2.0
<ide> * @see PortletConfigAware
<ide> */
<del>public interface PortletContextAware {
<add>public interface PortletContextAware extends Aware {
<ide>
<ide> /**
<ide> * Set the PortletContext that this object runs in.
<ide><path>org.springframework.web/src/main/java/org/springframework/web/context/ServletConfigAware.java
<ide> /*
<del> * Copyright 2002-2006 the original author or authors.
<add> * Copyright 2002-2011 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide>
<ide> import javax.servlet.ServletConfig;
<ide>
<add>import org.springframework.beans.factory.Aware;
<add>
<ide> /**
<ide> * Interface to be implemented by any object that wishes to be notified
<ide> * of the ServletConfig (typically determined by the WebApplicationContext)
<ide> * elsewhere, an exception will be thrown on bean creation.
<ide> *
<ide> * @author Juergen Hoeller
<add> * @author Chris Beams
<ide> * @since 2.0
<ide> * @see ServletContextAware
<ide> */
<del>public interface ServletConfigAware {
<add>public interface ServletConfigAware extends Aware {
<ide>
<ide> /**
<ide> * Set the ServletConfig that this object runs in.
<ide><path>org.springframework.web/src/main/java/org/springframework/web/context/ServletContextAware.java
<ide> /*
<del> * Copyright 2002-2005 the original author or authors.
<add> * Copyright 2002-2011 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide>
<ide> import javax.servlet.ServletContext;
<ide>
<add>import org.springframework.beans.factory.Aware;
<add>
<ide> /**
<ide> * Interface to be implemented by any object that wishes to be notified
<ide> * of the ServletContext (typically determined by the WebApplicationContext)
<ide> * that it runs in.
<ide> *
<ide> * @author Juergen Hoeller
<add> * @author Chris Beams
<ide> * @since 12.03.2004
<ide> * @see ServletConfigAware
<ide> */
<del>public interface ServletContextAware {
<add>public interface ServletContextAware extends Aware {
<ide>
<ide> /**
<ide> * Set the ServletContext that this object runs in. | 21 |
Javascript | Javascript | add workaround for invalid rawmodel entries | e507c44e4c853e92475cddf86105831fe468481d | <ide><path>examples/js/loaders/FBXLoader.js
<ide> THREE.FBXLoader = ( function () {
<ide> if ( modelID !== undefined ) {
<ide>
<ide> var rawModel = fbxTree.Objects.Model[ modelID.toString() ];
<add>
<add> if (!rawModel) {
<add> console.log("invalid rawModel", modelID, child);
<add> return;
<add> }
<ide>
<ide> var node = {
<ide> | 1 |
Ruby | Ruby | prevent possible bug in build's optlink | feb77b208566eed340197dc17d27e0de5c1e16f7 | <ide><path>Library/Homebrew/build.rb
<ide> def fixopt f
<ide> f.linked_keg.readlink
<ide> elsif f.prefix.directory?
<ide> f.prefix
<del> elsif (kids = f.rack.children).size == 1
<add> elsif (kids = f.rack.children).size == 1 and kids.first.directory?
<ide> kids.first
<ide> else
<ide> raise | 1 |
PHP | PHP | use "expire" to be consistent with other drivers | 880107fa8c24d4b31f155bd1b92fd4bfa1a1ac52 | <ide><path>src/Illuminate/Queue/Connectors/BeanstalkdConnector.php
<ide> public function connect(array $config)
<ide> $pheanstalk = new Pheanstalk($config['host'], Arr::get($config, 'port', PheanstalkInterface::DEFAULT_PORT));
<ide>
<ide> return new BeanstalkdQueue(
<del> $pheanstalk, $config['queue'], Arr::get($config, 'ttr', Pheanstalk::DEFAULT_TTR)
<add> $pheanstalk, $config['queue'], Arr::get($config, 'expire', Pheanstalk::DEFAULT_TTR)
<ide> );
<ide> }
<ide> } | 1 |
PHP | PHP | fix each when there's no order by | 408aba4ec776e0a8bf016bb090eaa354d3725abc | <ide><path>src/Illuminate/Database/Eloquent/Builder.php
<ide> public function chunk($count, callable $callback)
<ide> */
<ide> public function each(callable $callback, $count = 1000)
<ide> {
<add> if (is_null($this->getOrderBys())) {
<add> $this->orderBy('id', 'asc');
<add> }
<add>
<ide> return $this->chunk($count, function ($results) use ($callback) {
<ide> foreach ($results as $key => $value) {
<ide> if ($callback($item, $key) === false) {
<ide><path>src/Illuminate/Database/Query/Builder.php
<ide> public function chunk($count, callable $callback)
<ide> */
<ide> public function each(callable $callback, $count = 1000)
<ide> {
<add> if (is_null($this->getOrderBys())) {
<add> $this->orderBy('id', 'asc');
<add> }
<add>
<ide> return $this->chunk($count, function ($results) use ($callback) {
<ide> foreach ($results as $key => $value) {
<ide> if ($callback($item, $key) === false) {
<ide> public function each(callable $callback, $count = 1000)
<ide> });
<ide> }
<ide>
<add> /**
<add> * Returns the currently set ordering.
<add> *
<add> * @return array|null
<add> */
<add> public function getOrderBys()
<add> {
<add> $property = $this->unions ? 'unionOrders' : 'orders';
<add>
<add> return $this->{$property};
<add> }
<add>
<ide> /**
<ide> * Get an array with the values of a given column.
<ide> * | 2 |
Python | Python | fix broken mssql test | 933d863d6d39198dee40bd100658aa69e95d1895 | <ide><path>tests/task/task_runner/test_standard_task_runner.py
<ide> import psutil
<ide> import pytest
<ide>
<del>from airflow import models, settings
<ide> from airflow.jobs.local_task_job import LocalTaskJob
<del>from airflow.models import TaskInstance as TI
<add>from airflow.models.dagbag import DagBag
<add>from airflow.models.taskinstance import TaskInstance
<ide> from airflow.task.task_runner.standard_task_runner import StandardTaskRunner
<ide> from airflow.utils import timezone
<ide> from airflow.utils.platform import getuser
<add>from airflow.utils.session import create_session
<ide> from airflow.utils.state import State
<ide> from tests.test_utils.db import clear_db_runs
<ide>
<ide> def logging_and_db(self):
<ide> (as the test environment does not have enough context for the normal
<ide> way to run) and ensures they reset back to normal on the way out.
<ide> """
<add> clear_db_runs()
<ide> dictConfig(LOGGING_CONFIG)
<ide> yield
<ide> airflow_logger = logging.getLogger('airflow')
<ide> airflow_logger.handlers = []
<del> try:
<del> clear_db_runs()
<del> except Exception:
<del> # It might happen that we lost connection to the server here so we need to ignore any errors here
<del> pass
<add> clear_db_runs()
<ide>
<ide> def test_start_and_terminate(self):
<ide> local_task_job = mock.Mock()
<ide> def test_on_kill(self):
<ide> except OSError:
<ide> pass
<ide>
<del> dagbag = models.DagBag(
<add> dagbag = DagBag(
<ide> dag_folder=TEST_DAG_FOLDER,
<ide> include_examples=False,
<ide> )
<ide> dag = dagbag.dags.get('test_on_kill')
<ide> task = dag.get_task('task1')
<ide>
<del> session = settings.Session()
<add> with create_session() as session:
<add> dag.create_dagrun(
<add> run_id="test",
<add> state=State.RUNNING,
<add> execution_date=DEFAULT_DATE,
<add> start_date=DEFAULT_DATE,
<add> session=session,
<add> )
<add> ti = TaskInstance(task=task, execution_date=DEFAULT_DATE)
<add> job1 = LocalTaskJob(task_instance=ti, ignore_ti_state=True)
<add> session.commit()
<ide>
<del> dag.clear()
<del> dag.create_dagrun(
<del> run_id="test",
<del> state=State.RUNNING,
<del> execution_date=DEFAULT_DATE,
<del> start_date=DEFAULT_DATE,
<del> session=session,
<del> )
<del> ti = TI(task=task, execution_date=DEFAULT_DATE)
<del> job1 = LocalTaskJob(task_instance=ti, ignore_ti_state=True)
<del> session.commit()
<add> runner = StandardTaskRunner(job1)
<add> runner.start()
<ide>
<del> runner = StandardTaskRunner(job1)
<del> runner.start()
<add> # give the task some time to startup
<add> time.sleep(3)
<ide>
<del> # give the task some time to startup
<del> time.sleep(3)
<add> pgid = os.getpgid(runner.process.pid)
<add> assert pgid > 0
<add> assert pgid != os.getpgid(0), "Task should be in a different process group to us"
<ide>
<del> pgid = os.getpgid(runner.process.pid)
<del> assert pgid > 0
<del> assert pgid != os.getpgid(0), "Task should be in a different process group to us"
<add> processes = list(self._procs_in_pgroup(pgid))
<ide>
<del> processes = list(self._procs_in_pgroup(pgid))
<add> runner.terminate()
<ide>
<del> runner.terminate()
<add> session.close() # explicitly close as `create_session`s commit will blow up otherwise
<ide>
<ide> # Wait some time for the result
<ide> for _ in range(20): | 1 |
Mixed | Python | use flags utils and distribution_utils | 252e63849b5cf8dd12d6930dc2e9f8c51ea70251 | <ide><path>official/nlp/bert/README.md
<ide> script should run with `tf-nightly`.
<ide> Just add the following flags to `run_classifier.py` or `run_squad.py`:
<ide>
<ide> ```shell
<del> --strategy_type=tpu
<add> --distribution_strategy=tpu
<ide> --tpu=grpc://${TPU_IP_ADDRESS}:8470
<ide> ```
<ide>
<ide> python run_classifier.py \
<ide> --learning_rate=2e-5 \
<ide> --num_train_epochs=3 \
<ide> --model_dir=${MODEL_DIR} \
<del> --strategy_type=mirror
<add> --distribution_strategy=mirror
<ide> ```
<ide>
<ide> To use TPU, you only need to switch distribution strategy type to `tpu` with TPU
<ide> python run_classifier.py \
<ide> --learning_rate=2e-5 \
<ide> --num_train_epochs=3 \
<ide> --model_dir=${MODEL_DIR} \
<del> --strategy_type=tpu \
<add> --distribution_strategy=tpu \
<ide> --tpu=grpc://${TPU_IP_ADDRESS}:8470
<ide> ```
<ide>
<ide> python run_squad.py \
<ide> --learning_rate=8e-5 \
<ide> --num_train_epochs=2 \
<ide> --model_dir=${MODEL_DIR} \
<del> --strategy_type=mirror
<add> --distribution_strategy=mirror
<ide> ```
<ide>
<ide> To use TPU, you need switch distribution strategy type to `tpu` with TPU
<ide> python run_squad.py \
<ide> --learning_rate=8e-5 \
<ide> --num_train_epochs=2 \
<ide> --model_dir=${MODEL_DIR} \
<del> --strategy_type=tpu \
<add> --distribution_strategy=tpu \
<ide> --tpu=grpc://${TPU_IP_ADDRESS}:8470
<ide> ```
<ide>
<ide><path>official/nlp/bert/common_flags.py
<ide>
<ide> def define_common_bert_flags():
<ide> """Define common flags for BERT tasks."""
<add> flags_core.define_base(
<add> data_dir=False,
<add> model_dir=True,
<add> clean=False,
<add> train_epochs=False,
<add> epochs_between_evals=False,
<add> stop_threshold=False,
<add> batch_size=False,
<add> num_gpu=True,
<add> hooks=False,
<add> export_dir=False,
<add> distribution_strategy=True,
<add> run_eagerly=True)
<ide> flags.DEFINE_string('bert_config_file', None,
<ide> 'Bert configuration file to define core bert layers.')
<del> flags.DEFINE_string('model_dir', None, (
<del> 'The directory where the model weights and training/evaluation summaries '
<del> 'are stored. If not specified, save to /tmp/bert20/.'))
<ide> flags.DEFINE_string(
<ide> 'model_export_path', None,
<ide> 'Path to the directory, where trainined model will be '
<ide> def define_common_bert_flags():
<ide> flags.DEFINE_string(
<ide> 'init_checkpoint', None,
<ide> 'Initial checkpoint (usually from a pre-trained BERT model).')
<del> flags.DEFINE_enum(
<del> 'strategy_type', 'mirror', ['tpu', 'mirror', 'multi_worker_mirror'],
<del> 'Distribution Strategy type to use for training. `tpu` uses '
<del> 'TPUStrategy for running on TPUs, `mirror` uses GPUs with single host, '
<del> '`multi_worker_mirror` uses CPUs or GPUs with multiple hosts.')
<ide> flags.DEFINE_integer('num_train_epochs', 3,
<ide> 'Total number of training epochs to perform.')
<ide> flags.DEFINE_integer(
<ide> def define_common_bert_flags():
<ide> 'inside.')
<ide> flags.DEFINE_float('learning_rate', 5e-5,
<ide> 'The initial learning rate for Adam.')
<del> flags.DEFINE_boolean(
<del> 'run_eagerly', False,
<del> 'Run the model op by op without building a model function.')
<ide> flags.DEFINE_boolean(
<ide> 'scale_loss', False,
<ide> 'Whether to divide the loss by number of replica inside the per-replica '
<ide><path>official/nlp/bert/run_classifier.py
<ide> from official.nlp.bert import common_flags
<ide> from official.nlp.bert import input_pipeline
<ide> from official.nlp.bert import model_saving_utils
<add>from official.utils.misc import distribution_utils
<ide> from official.utils.misc import keras_utils
<del>from official.utils.misc import tpu_lib
<ide>
<ide> flags.DEFINE_enum(
<ide> 'mode', 'train_and_eval', ['train_and_eval', 'export_only'],
<ide> def main(_):
<ide> if not FLAGS.model_dir:
<ide> FLAGS.model_dir = '/tmp/bert20/'
<ide>
<del> strategy = None
<del> if FLAGS.strategy_type == 'mirror':
<del> strategy = tf.distribute.MirroredStrategy()
<del> elif FLAGS.strategy_type == 'tpu':
<del> cluster_resolver = tpu_lib.tpu_initialize(FLAGS.tpu)
<del> strategy = tf.distribute.experimental.TPUStrategy(cluster_resolver)
<del> else:
<del> raise ValueError('The distribution strategy type is not supported: %s' %
<del> FLAGS.strategy_type)
<del>
<add> strategy = distribution_utils.get_distribution_strategy(
<add> distribution_strategy=FLAGS.distribution_strategy,
<add> num_gpus=FLAGS.num_gpus,
<add> tpu_address=FLAGS.tpu)
<ide> max_seq_length = input_meta_data['max_seq_length']
<ide> train_input_fn = get_dataset_fn(
<ide> FLAGS.train_data_path,
<ide><path>official/nlp/bert/run_pretraining.py
<ide> from official.nlp.bert import common_flags
<ide> from official.nlp.bert import input_pipeline
<ide> from official.nlp.bert import model_saving_utils
<add>from official.utils.misc import distribution_utils
<ide> from official.utils.misc import tpu_lib
<ide>
<ide> flags.DEFINE_string('input_files', None,
<ide> def main(_):
<ide>
<ide> if not FLAGS.model_dir:
<ide> FLAGS.model_dir = '/tmp/bert20/'
<del> strategy = None
<del> if FLAGS.strategy_type == 'mirror':
<del> strategy = tf.distribute.MirroredStrategy()
<del> elif FLAGS.strategy_type == 'tpu':
<del> cluster_resolver = tpu_lib.tpu_initialize(FLAGS.tpu)
<del> strategy = tf.distribute.experimental.TPUStrategy(cluster_resolver)
<del> else:
<del> raise ValueError('The distribution strategy type is not supported: %s' %
<del> FLAGS.strategy_type)
<add> strategy = distribution_utils.get_distribution_strategy(
<add> distribution_strategy=FLAGS.distribution_strategy,
<add> num_gpus=FLAGS.num_gpus,
<add> tpu_address=FLAGS.tpu)
<ide> if strategy:
<ide> print('***** Number of cores used : ', strategy.num_replicas_in_sync)
<ide>
<ide><path>official/nlp/bert/run_squad.py
<ide> from official.nlp.bert import model_saving_utils
<ide> from official.nlp.bert import squad_lib
<ide> from official.nlp.bert import tokenization
<add>from official.utils.misc import distribution_utils
<ide> from official.utils.misc import keras_utils
<ide> from official.utils.misc import tpu_lib
<ide>
<ide> def main(_):
<ide> export_squad(FLAGS.model_export_path, input_meta_data)
<ide> return
<ide>
<del> strategy = None
<del> if FLAGS.strategy_type == 'mirror':
<del> strategy = tf.distribute.MirroredStrategy()
<del> elif FLAGS.strategy_type == 'multi_worker_mirror':
<del> strategy = tf.distribute.experimental.MultiWorkerMirroredStrategy()
<del> elif FLAGS.strategy_type == 'tpu':
<del> cluster_resolver = tpu_lib.tpu_initialize(FLAGS.tpu)
<del> strategy = tf.distribute.experimental.TPUStrategy(cluster_resolver)
<del> else:
<del> raise ValueError('The distribution strategy type is not supported: %s' %
<del> FLAGS.strategy_type)
<add> strategy = distribution_utils.get_distribution_strategy(
<add> distribution_strategy=FLAGS.distribution_strategy,
<add> num_gpus=FLAGS.num_gpus,
<add> tpu_address=FLAGS.tpu)
<ide> if FLAGS.mode in ('train', 'train_and_predict'):
<ide> train_squad(strategy, input_meta_data)
<ide> if FLAGS.mode in ('predict', 'train_and_predict'): | 5 |
Go | Go | log output as string | d898372568a3e00d4bda0359393d833a9f30fe1d | <ide><path>integration-cli/docker_cli_exec_test.go
<ide> func (s *DockerSuite) TestExecParseError(c *check.C) {
<ide> }
<ide>
<ide> func (s *DockerSuite) TestExecStopNotHanging(c *check.C) {
<del> if out, err := exec.Command(dockerBinary, "run", "-d", "--name", "testing", "busybox", "top").CombinedOutput(); err != nil {
<add> runCmd := exec.Command(dockerBinary, "run", "-d", "--name", "testing", "busybox", "top")
<add> if out, _, err := runCommandWithOutput(runCmd); err != nil {
<ide> c.Fatal(out, err)
<ide> }
<ide> | 1 |
PHP | PHP | add test for #5a6c10d | 9b7eea0fa73c0bc359bec0132ccb8afec66084a0 | <ide><path>tests/Validation/ValidationValidatorTest.php
<ide> public function testAttributeNamesAreReplaced()
<ide> $this->assertFalse($v->passes());
<ide> $v->messages()->setFormat(':message');
<ide> $this->assertEquals('Name is required!', $v->messages()->first('name'));
<add>
<add> //set customAttributes by setter
<add> $trans = $this->getRealTranslator();
<add> $trans->addResource('array', array('validation.required' => ':attribute is required!'), 'en', 'messages');
<add> $customAttributes = array('name' => 'Name');
<add> $v = new Validator($trans, array('name' => ''), array('name' => 'Required'));
<add> $v->setCustomAttributes($customAttributes);
<add> $this->assertFalse($v->passes());
<add> $v->messages()->setFormat(':message');
<add> $this->assertEquals('Name is required!', $v->messages()->first('name'));
<add>
<ide>
<ide> $trans = $this->getRealTranslator();
<ide> $trans->addResource('array', array('validation.required' => ':attribute is required!'), 'en', 'messages');
<ide> public function testDisplayableValuesAreReplaced()
<ide> $this->assertFalse($v->passes());
<ide> $v->messages()->setFormat(':message');
<ide> $this->assertEquals('type must be included in Short, Long.', $v->messages()->first('type'));
<del>
<add>
<add> // set custom values by setter
<add> $trans = $this->getRealTranslator();
<add> $trans->addResource('array', array('validation.in' => ':attribute must be included in :values.'), 'en', 'messages');
<add> $customValues = array(
<add> 'type' =>
<add> array(
<add> '5' => 'Short',
<add> '300' => 'Long',
<add> )
<add> );
<add> $v = new Validator($trans, array('type' => '4'), array('type' => 'in:5,300'));
<add> $v->setCustomValues($customValues);
<add> $this->assertFalse($v->passes());
<add> $v->messages()->setFormat(':message');
<add> $this->assertEquals('type must be included in Short, Long.', $v->messages()->first('type'));
<ide> }
<ide>
<ide> | 1 |
Go | Go | fix goroutine leak | 2a331a5ef7f2d3f044773010ff3221d2ea36a921 | <ide><path>pkg/system/rm_test.go
<ide> func TestEnsureRemoveAllWithMount(t *testing.T) {
<ide> t.Fatal(err)
<ide> }
<ide>
<del> done := make(chan struct{})
<add> done := make(chan struct{}, 1)
<ide> go func() {
<ide> err = EnsureRemoveAll(dir1)
<ide> close(done) | 1 |
Ruby | Ruby | reduce odeprecated warnings | 459b113e28cab57cde94625b3f284e54986c2167 | <ide><path>Library/Homebrew/utils.rb
<ide> def odeprecated(method, replacement = nil, disable: false, disable_on: nil, call
<ide> # - Location of caller of deprecated method (if all else fails).
<ide> backtrace = caller
<ide> tap_message = nil
<add>
<add> # Don't throw deprecations at all for cached or .brew formulae.
<add> return if backtrace.any? do |line|
<add> line.include?(HOMEBREW_CACHE) || line.include?("/.brew/")
<add> end
<add>
<ide> caller_message = backtrace.detect do |line|
<ide> next unless line =~ %r{^#{Regexp.escape(HOMEBREW_LIBRARY)}/Taps/([^/]+/[^/]+)/}
<ide> tap = Tap.fetch Regexp.last_match(1)
<ide> tap_message = "\nPlease report this to the #{tap} tap!"
<ide> true
<ide> end
<ide> caller_message ||= backtrace.detect do |line|
<del> # Don't throw deprecations at all for cached or .brew formulae.
<del> next false if line.include?(HOMEBREW_CACHE)
<del> next false if line.include?("/.brew/")
<ide> !line.start_with?("#{HOMEBREW_LIBRARY_PATH}/compat/")
<ide> end
<ide> caller_message ||= backtrace[1] | 1 |
Go | Go | improve routes initialization | 8f68adfaf0231ded0ba6bd2ec522711752031885 | <ide><path>cmd/dockerd/daemon.go
<ide> func (cli *DaemonCli) start(opts *daemonOptions) (err error) {
<ide> }()
<ide> }
<ide>
<add> // TODO: extract to newApiServerConfig()
<ide> serverConfig := &apiserver.Config{
<ide> Logging: true,
<ide> SocketGroup: cli.Config.SocketGroup,
<ide> func (cli *DaemonCli) start(opts *daemonOptions) (err error) {
<ide> cli.Config.Hosts = make([]string, 1)
<ide> }
<ide>
<del> api := apiserver.New(serverConfig)
<del> cli.api = api
<add> cli.api = apiserver.New(serverConfig)
<ide>
<ide> var hosts []string
<ide>
<ide> func (cli *DaemonCli) start(opts *daemonOptions) (err error) {
<ide> }
<ide> logrus.Debugf("Listener created for HTTP on %s (%s)", proto, addr)
<ide> hosts = append(hosts, protoAddrParts[1])
<del> api.Accept(addr, ls...)
<add> cli.api.Accept(addr, ls...)
<ide> }
<ide>
<ide> registryService := registry.NewService(cli.Config.ServiceOptions)
<ide> func (cli *DaemonCli) start(opts *daemonOptions) (err error) {
<ide>
<ide> pluginStore := plugin.NewStore()
<ide>
<del> if err := cli.initMiddlewares(api, serverConfig, pluginStore); err != nil {
<add> if err := cli.initMiddlewares(cli.api, serverConfig, pluginStore); err != nil {
<ide> logrus.Fatalf("Error creating middlewares: %v", err)
<ide> }
<ide>
<ide> if system.LCOWSupported() {
<ide> logrus.Warnln("LCOW support is enabled - this feature is incomplete")
<ide> }
<ide>
<del> sm, err := session.NewManager()
<del> if err != nil {
<del> return errors.Wrap(err, "failed to create sessionmanager")
<del> }
<del>
<ide> d, err := daemon.NewDaemon(cli.Config, registryService, containerdRemote, pluginStore)
<ide> if err != nil {
<ide> return fmt.Errorf("Error starting daemon: %v", err)
<ide> func (cli *DaemonCli) start(opts *daemonOptions) (err error) {
<ide> return fmt.Errorf("Error validating authorization plugin: %v", err)
<ide> }
<ide>
<add> // TODO: move into startMetricsServer()
<ide> if cli.Config.MetricsAddress != "" {
<ide> if !d.HasExperimental() {
<ide> return fmt.Errorf("metrics-addr is only supported when experimental is enabled")
<ide> func (cli *DaemonCli) start(opts *daemonOptions) (err error) {
<ide> }
<ide> }
<ide>
<add> // TODO: createAndStartCluster()
<ide> name, _ := os.Hostname()
<ide>
<ide> // Use a buffered channel to pass changes from store watch API to daemon
<ide> func (cli *DaemonCli) start(opts *daemonOptions) (err error) {
<ide> logrus.Fatalf("Error starting cluster component: %v", err)
<ide> }
<ide>
<del> builderStateDir := filepath.Join(cli.Config.Root, "builder")
<del>
<del> fsCache, err := fscache.NewFSCache(fscache.Opt{
<del> Backend: fscache.NewNaiveCacheBackend(builderStateDir),
<del> Root: builderStateDir,
<del> GCPolicy: fscache.GCPolicy{ // TODO: expose this in config
<del> MaxSize: 1024 * 1024 * 512, // 512MB
<del> MaxKeepDuration: 7 * 24 * time.Hour, // 1 week
<del> },
<del> })
<del> if err != nil {
<del> return errors.Wrap(err, "failed to create fscache")
<del> }
<del>
<del> manager, err := dockerfile.NewBuildManager(d, sm, fsCache, d.IDMappings())
<del> if err != nil {
<del> return err
<del> }
<del>
<del> bb, err := buildbackend.NewBackend(d, manager, fsCache)
<del> if err != nil {
<del> return errors.Wrap(err, "failed to create buildmanager")
<del> }
<del>
<ide> // Restart all autostart containers which has a swarm endpoint
<ide> // and is not yet running now that we have successfully
<ide> // initialized the cluster.
<ide> func (cli *DaemonCli) start(opts *daemonOptions) (err error) {
<ide>
<ide> cli.d = d
<ide>
<del> initRouter(api, d, c, sm, bb, fsCache)
<add> routerOptions, err := newRouterOptions(cli.Config, d)
<add> if err != nil {
<add> return err
<add> }
<add> routerOptions.api = cli.api
<add> routerOptions.cluster = c
<add>
<add> initRouter(routerOptions)
<ide>
<ide> // process cluster change notifications
<ide> watchCtx, cancel := context.WithCancel(context.Background())
<ide> func (cli *DaemonCli) start(opts *daemonOptions) (err error) {
<ide> // We need to start it as a goroutine and wait on it so
<ide> // daemon doesn't exit
<ide> serveAPIWait := make(chan error)
<del> go api.Wait(serveAPIWait)
<add> go cli.api.Wait(serveAPIWait)
<ide>
<ide> // after the daemon is done setting up we can notify systemd api
<ide> notifySystem()
<ide> func (cli *DaemonCli) start(opts *daemonOptions) (err error) {
<ide> return nil
<ide> }
<ide>
<add>type routerOptions struct {
<add> sessionManager *session.Manager
<add> buildBackend *buildbackend.Backend
<add> buildCache *fscache.FSCache
<add> daemon *daemon.Daemon
<add> api *apiserver.Server
<add> cluster *cluster.Cluster
<add>}
<add>
<add>func newRouterOptions(config *config.Config, daemon *daemon.Daemon) (routerOptions, error) {
<add> opts := routerOptions{}
<add> sm, err := session.NewManager()
<add> if err != nil {
<add> return opts, errors.Wrap(err, "failed to create sessionmanager")
<add> }
<add>
<add> builderStateDir := filepath.Join(config.Root, "builder")
<add>
<add> buildCache, err := fscache.NewFSCache(fscache.Opt{
<add> Backend: fscache.NewNaiveCacheBackend(builderStateDir),
<add> Root: builderStateDir,
<add> GCPolicy: fscache.GCPolicy{ // TODO: expose this in config
<add> MaxSize: 1024 * 1024 * 512, // 512MB
<add> MaxKeepDuration: 7 * 24 * time.Hour, // 1 week
<add> },
<add> })
<add> if err != nil {
<add> return opts, errors.Wrap(err, "failed to create fscache")
<add> }
<add>
<add> manager, err := dockerfile.NewBuildManager(daemon, sm, buildCache, daemon.IDMappings())
<add> if err != nil {
<add> return opts, err
<add> }
<add>
<add> bb, err := buildbackend.NewBackend(daemon, manager, buildCache)
<add> if err != nil {
<add> return opts, errors.Wrap(err, "failed to create buildmanager")
<add> }
<add>
<add> return routerOptions{
<add> sessionManager: sm,
<add> buildBackend: bb,
<add> buildCache: buildCache,
<add> daemon: daemon,
<add> }, nil
<add>}
<add>
<ide> func (cli *DaemonCli) reloadConfig() {
<ide> reload := func(config *config.Config) {
<ide>
<ide> func loadDaemonCliConfig(opts *daemonOptions) (*config.Config, error) {
<ide> return conf, nil
<ide> }
<ide>
<del>func initRouter(s *apiserver.Server, d *daemon.Daemon, c *cluster.Cluster, sm *session.Manager, bb *buildbackend.Backend, bc *fscache.FSCache) {
<add>func initRouter(opts routerOptions) {
<ide> decoder := runconfig.ContainerDecoder{}
<ide>
<ide> routers := []router.Router{
<ide> // we need to add the checkpoint router before the container router or the DELETE gets masked
<del> checkpointrouter.NewRouter(d, decoder),
<del> container.NewRouter(d, decoder),
<del> image.NewRouter(d, decoder),
<del> systemrouter.NewRouter(d, c, bc),
<del> volume.NewRouter(d),
<del> build.NewRouter(bb, d),
<del> sessionrouter.NewRouter(sm),
<del> swarmrouter.NewRouter(c),
<del> pluginrouter.NewRouter(d.PluginManager()),
<del> distributionrouter.NewRouter(d),
<add> checkpointrouter.NewRouter(opts.daemon, decoder),
<add> container.NewRouter(opts.daemon, decoder),
<add> image.NewRouter(opts.daemon, decoder),
<add> systemrouter.NewRouter(opts.daemon, opts.cluster, opts.buildCache),
<add> volume.NewRouter(opts.daemon),
<add> build.NewRouter(opts.buildBackend, opts.daemon),
<add> sessionrouter.NewRouter(opts.sessionManager),
<add> swarmrouter.NewRouter(opts.cluster),
<add> pluginrouter.NewRouter(opts.daemon.PluginManager()),
<add> distributionrouter.NewRouter(opts.daemon),
<ide> }
<ide>
<del> if d.NetworkControllerEnabled() {
<del> routers = append(routers, network.NewRouter(d, c))
<add> if opts.daemon.NetworkControllerEnabled() {
<add> routers = append(routers, network.NewRouter(opts.daemon, opts.cluster))
<ide> }
<ide>
<del> if d.HasExperimental() {
<add> if opts.daemon.HasExperimental() {
<ide> for _, r := range routers {
<ide> for _, route := range r.Routes() {
<ide> if experimental, ok := route.(router.ExperimentalRoute); ok {
<ide> func initRouter(s *apiserver.Server, d *daemon.Daemon, c *cluster.Cluster, sm *s
<ide> }
<ide> }
<ide>
<del> s.InitRouter(debug.IsEnabled(), routers...)
<add> opts.api.InitRouter(debug.IsEnabled(), routers...)
<ide> }
<ide>
<add>// TODO: remove this from cli and return the authzMiddleware
<ide> func (cli *DaemonCli) initMiddlewares(s *apiserver.Server, cfg *apiserver.Config, pluginStore *plugin.Store) error {
<ide> v := cfg.Version
<ide> | 1 |
Python | Python | add simpler verbose mode to sequential model | 731f0ab42b808b0f90d4c1e97fcd80dbc214191c | <ide><path>keras/models.py
<ide> def fit(self, X, y, batch_size=128, nb_epoch=100, verbose=1,
<ide> np.random.shuffle(index_array)
<ide>
<ide> batches = make_batches(len(X), batch_size)
<del> progbar = Progbar(target=len(X))
<add> if verbose==1:
<add> progbar = Progbar(target=len(X))
<ide> for batch_index, (batch_start, batch_end) in enumerate(batches):
<ide> if shuffle:
<ide> batch_ids = index_array[batch_start:batch_end]
<ide> def fit(self, X, y, batch_size=128, nb_epoch=100, verbose=1,
<ide> # logging
<ide> if verbose:
<ide> is_last_batch = (batch_index == len(batches) - 1)
<del> if not is_last_batch or not do_validation:
<del> if show_accuracy:
<del> progbar.update(batch_end, [('loss', loss), ('acc.', acc)])
<del> else:
<del> progbar.update(batch_end, [('loss', loss)])
<add> if (not is_last_batch or not do_validation):
<add> if verbose==1:
<add> if show_accuracy:
<add> progbar.update(batch_end, [('loss', loss), ('acc.', acc)])
<add> else:
<add> progbar.update(batch_end, [('loss', loss)])
<ide> else:
<ide> if show_accuracy:
<ide> val_loss, val_acc = self.test(X_val, y_val, accuracy=True)
<del> progbar.update(batch_end, [('loss', loss), ('acc.', acc), ('val. loss', val_loss), ('val. acc.', val_acc)])
<add> if verbose==1:
<add> progbar.update(batch_end, [('loss', loss), ('acc.', acc), ('val. loss', val_loss), ('val. acc.', val_acc)])
<add> if verbose==2:
<add> print("loss: %.4f - acc.: %.4f - val. loss: %.4f - val. acc.: %.4f" % (loss, acc, val_loss, val_acc))
<ide> else:
<ide> val_loss = self.test(X_val, y_val, accuracy=False)
<del> progbar.update(batch_end, [('loss', loss), ('val. loss', val_loss)])
<add> if verbose==1:
<add> progbar.update(batch_end, [('loss', loss), ('val. loss', val_loss)])
<add> if verbose==2:
<add> print("loss: %.4f - acc.: %.4f" % (loss, acc))
<ide>
<ide>
<ide> def predict_proba(self, X, batch_size=128, verbose=1):
<ide> batches = make_batches(len(X), batch_size)
<del> if verbose:
<add> if verbose==1:
<ide> progbar = Progbar(target=len(X))
<ide> for batch_index, (batch_start, batch_end) in enumerate(batches):
<ide> X_batch = X[batch_start:batch_end]
<ide> def predict_proba(self, X, batch_size=128, verbose=1):
<ide> preds = np.zeros(shape)
<ide> preds[batch_start:batch_end] = batch_preds
<ide>
<del> if verbose:
<add> if verbose==1:
<ide> progbar.update(batch_end)
<add>
<ide> return preds
<ide>
<ide>
<ide> def evaluate(self, X, y, batch_size=128, show_accuracy=False, verbose=1):
<ide> tot_score += loss
<ide>
<ide> if verbose:
<del> if show_accuracy:
<del> progbar.update(batch_end, [('loss', loss), ('acc.', acc)])
<del> else:
<del> progbar.update(batch_end, [('loss', loss)])
<add> if verbose==1:
<add> if show_accuracy:
<add> progbar.update(batch_end, [('loss', loss), ('acc.', acc)])
<add> else:
<add> progbar.update(batch_end, [('loss', loss)])
<add> if batch_index == len(batches) and verbose==2:
<add> if show_accuracy:
<add> print("loss: %.4f - acc.: %.4f" % (loss, acc))
<add> else:
<add> print("loss: %.4f")
<ide>
<ide> if show_accuracy:
<ide> return tot_score/len(batches), tot_acc/len(batches) | 1 |
Ruby | Ruby | improve dependency listing | e02d6f2500fc81f98645d16b614ae83a76713a78 | <ide><path>Library/Homebrew/cmd/info.rb
<ide> def info_formula f
<ide> puts
<ide> end
<ide>
<del> puts "Depends on: #{f.deps*', '}" unless f.deps.empty?
<del> conflicts = f.conflicts.map { |c| c.formula }.sort
<add> conflicts = f.conflicts.map(&:formula).sort!
<ide> puts "Conflicts with: #{conflicts*', '}" unless conflicts.empty?
<ide>
<ide> if f.rack.directory?
<ide> def info_formula f
<ide> history = github_info(f)
<ide> puts history if history
<ide>
<add> unless f.deps.empty?
<add> ohai "Dependencies"
<add> %w{build required recommended optional}.map do |type|
<add> deps = f.deps.send(type)
<add> puts "#{type.capitalize}: #{deps*', '}" unless deps.empty?
<add> end
<add> end
<add>
<ide> unless f.build.empty?
<ide> require 'cmd/options'
<ide> ohai "Options" | 1 |
Java | Java | add support for mime-based message conversion | 7d3b6497b5e95eaa212cf2dd985d6a7e686e2940 | <ide><path>spring-messaging/src/main/java/org/springframework/messaging/core/AbstractDestinationResolvingMessagingTemplate.java
<ide> */
<ide> package org.springframework.messaging.core;
<ide>
<add>import java.util.Map;
<add>
<ide> import org.springframework.messaging.Message;
<ide> import org.springframework.util.Assert;
<ide>
<ide>
<ide> /**
<add> * Base class for a messaging template that can resolve String-based destinations.
<add> *
<ide> * @author Mark Fisher
<add> * @author Rossen Stoyanchev
<ide> * @since 4.0
<ide> */
<del>public abstract class AbstractDestinationResolvingMessagingTemplate<D> extends AbstractMessagingTemplate<D>
<del> implements DestinationResolvingMessageSendingOperations<D>,
<add>public abstract class AbstractDestinationResolvingMessagingTemplate<D> extends
<add> AbstractMessagingTemplate<D> implements
<add> DestinationResolvingMessageSendingOperations<D>,
<ide> DestinationResolvingMessageReceivingOperations<D>,
<ide> DestinationResolvingMessageRequestReplyOperations<D> {
<ide>
<ide> protected final D resolveDestination(String destinationName) {
<ide> }
<ide>
<ide> @Override
<del> public <T> void convertAndSend(String destinationName, T message) {
<del> this.convertAndSend(destinationName, message, null);
<add> public <T> void convertAndSend(String destinationName, T payload) {
<add> Map<String, Object> headers = null;
<add> this.convertAndSend(destinationName, payload, headers);
<add> }
<add>
<add> @Override
<add> public <T> void convertAndSend(String destinationName, T payload, Map<String, Object> headers) {
<add> MessagePostProcessor postProcessor = null;
<add> this.convertAndSend(destinationName, payload, headers, postProcessor);
<ide> }
<ide>
<ide> @Override
<del> public <T> void convertAndSend(String destinationName, T message, MessagePostProcessor postProcessor) {
<add> public <T> void convertAndSend(String destinationName, T payload, MessagePostProcessor postProcessor) {
<add> Map<String, Object> headers = null;
<add> this.convertAndSend(destinationName, payload, headers, postProcessor);
<add> }
<add>
<add> @Override
<add> public <T> void convertAndSend(String destinationName, T payload, Map<String, Object> headers,
<add> MessagePostProcessor postProcessor) {
<add>
<ide> D destination = resolveDestination(destinationName);
<del> super.convertAndSend(destination, message, postProcessor);
<add> super.convertAndSend(destination, payload, headers, postProcessor);
<ide> }
<ide>
<ide> @Override
<ide> public <P> Message<P> receive(String destinationName) {
<ide> }
<ide>
<ide> @Override
<del> public Object receiveAndConvert(String destinationName) {
<add> public <T> T receiveAndConvert(String destinationName, Class<T> targetClass) {
<ide> D destination = resolveDestination(destinationName);
<del> return super.receiveAndConvert(destination);
<add> return super.receiveAndConvert(destination, targetClass);
<ide> }
<ide>
<ide> @Override
<ide> public Message<?> sendAndReceive(String destinationName, Message<?> requestMessa
<ide> }
<ide>
<ide> @Override
<del> public Object convertSendAndReceive(String destinationName, Object request) {
<add> public <T> T convertSendAndReceive(String destinationName, Object request, Class<T> targetClass) {
<add> D destination = resolveDestination(destinationName);
<add> return super.convertSendAndReceive(destination, request, targetClass);
<add> }
<add>
<add> @Override
<add> public <T> T convertSendAndReceive(String destinationName, Object request, Map<String, Object> headers,
<add> Class<T> targetClass) {
<add>
<add> D destination = resolveDestination(destinationName);
<add> return super.convertSendAndReceive(destination, request, headers, targetClass);
<add> }
<add>
<add> @Override
<add> public <T> T convertSendAndReceive(String destinationName, Object request, Class<T> targetClass,
<add> MessagePostProcessor postProcessor) {
<add>
<ide> D destination = resolveDestination(destinationName);
<del> return super.convertSendAndReceive(destination, request);
<add> return super.convertSendAndReceive(destination, request, targetClass, postProcessor);
<ide> }
<ide>
<ide> @Override
<del> public Object convertSendAndReceive(String destinationName, Object request, MessagePostProcessor postProcessor) {
<add> public <T> T convertSendAndReceive(String destinationName, Object request, Map<String, Object> headers,
<add> Class<T> targetClass, MessagePostProcessor postProcessor) {
<add>
<ide> D destination = resolveDestination(destinationName);
<del> return super.convertSendAndReceive(destination, request, postProcessor);
<add> return super.convertSendAndReceive(destination, request, headers, targetClass, postProcessor);
<ide> }
<ide>
<ide> }
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/core/AbstractMessageSendingTemplate.java
<ide> */
<ide> package org.springframework.messaging.core;
<ide>
<add>import java.util.ArrayList;
<add>import java.util.Collection;
<add>import java.util.Map;
<add>
<ide> import org.apache.commons.logging.Log;
<ide> import org.apache.commons.logging.LogFactory;
<ide> import org.springframework.messaging.Message;
<add>import org.springframework.messaging.MessageHeaders;
<ide> import org.springframework.messaging.MessagingException;
<add>import org.springframework.messaging.support.converter.ByteArrayMessageConverter;
<add>import org.springframework.messaging.support.converter.CompositeMessageConverter;
<ide> import org.springframework.messaging.support.converter.MessageConverter;
<del>import org.springframework.messaging.support.converter.SimplePayloadMessageConverter;
<add>import org.springframework.messaging.support.converter.StringMessageConverter;
<ide> import org.springframework.util.Assert;
<ide>
<ide>
<ide> /**
<add> * Base class for templates that support sending messages.
<add> *
<ide> * @author Mark Fisher
<add> * @author Rossen Stoyanchev
<ide> * @since 4.0
<ide> */
<ide> public abstract class AbstractMessageSendingTemplate<D> implements MessageSendingOperations<D> {
<ide>
<ide> private volatile D defaultDestination;
<ide>
<del> private volatile MessageConverter converter = new SimplePayloadMessageConverter();
<add> private volatile MessageConverter converter;
<ide>
<ide>
<add> public AbstractMessageSendingTemplate() {
<add> Collection<MessageConverter> converters = new ArrayList<MessageConverter>();
<add> converters.add(new StringMessageConverter());
<add> converters.add(new ByteArrayMessageConverter());
<add> this.converter = new CompositeMessageConverter(converters);
<add> }
<add>
<ide> public void setDefaultDestination(D defaultDestination) {
<ide> this.defaultDestination = defaultDestination;
<ide> }
<ide> public void setMessageConverter(MessageConverter messageConverter) {
<ide> /**
<ide> * @return the configured {@link MessageConverter}
<ide> */
<del> public MessageConverter getConverter() {
<add> public MessageConverter getMessageConverter() {
<ide> return this.converter;
<ide> }
<ide>
<del> /**
<del> * @param converter the converter to set
<del> */
<del> public void setConverter(MessageConverter converter) {
<del> this.converter = converter;
<del> }
<ide>
<ide> @Override
<del> public <P> void send(Message<P> message) {
<add> public void send(Message<?> message) {
<ide> this.send(getRequiredDefaultDestination(), message);
<ide> }
<ide>
<ide> protected final D getRequiredDefaultDestination() {
<ide> }
<ide>
<ide> @Override
<del> public <P> void send(D destination, Message<P> message) {
<add> public void send(D destination, Message<?> message) {
<ide> this.doSend(destination, message);
<ide> }
<ide>
<ide> protected abstract void doSend(D destination, Message<?> message);
<ide>
<ide>
<ide> @Override
<del> public <T> void convertAndSend(T message) {
<add> public void convertAndSend(Object message) throws MessagingException {
<ide> this.convertAndSend(getRequiredDefaultDestination(), message);
<ide> }
<ide>
<ide> @Override
<del> public <T> void convertAndSend(D destination, T object) {
<del> this.convertAndSend(destination, object, null);
<add> public void convertAndSend(D destination, Object payload) throws MessagingException {
<add> this.convertAndSend(destination, payload, (Map<String, Object>) null);
<ide> }
<ide>
<ide> @Override
<del> public <T> void convertAndSend(T object, MessagePostProcessor postProcessor) {
<del> this.convertAndSend(getRequiredDefaultDestination(), object, postProcessor);
<add> public void convertAndSend(D destination, Object payload, Map<String, Object> headers) throws MessagingException {
<add> MessagePostProcessor postProcessor = null;
<add> this.convertAndSend(destination, payload, headers, postProcessor);
<ide> }
<ide>
<ide> @Override
<del> public <T> void convertAndSend(D destination, T object, MessagePostProcessor postProcessor)
<add> public void convertAndSend(Object payload, MessagePostProcessor postProcessor) throws MessagingException {
<add> this.convertAndSend(getRequiredDefaultDestination(), payload, postProcessor);
<add> }
<add>
<add> @Override
<add> public void convertAndSend(D destination, Object payload, MessagePostProcessor postProcessor)
<ide> throws MessagingException {
<ide>
<del> @SuppressWarnings("unchecked")
<del> Message<?> message = this.converter.toMessage(object);
<add> Map<String, Object> headers = null;
<add> this.convertAndSend(destination, payload, headers, postProcessor);
<add> }
<add>
<add> @Override
<add> public void convertAndSend(D destination, Object payload, Map<String, Object> headers,
<add> MessagePostProcessor postProcessor) throws MessagingException {
<add>
<add> MessageHeaders messageHeaders = (headers != null) ? new MessageHeaders(headers) : null;
<add> Message<?> message = this.converter.toMessage(payload, messageHeaders);
<ide> if (postProcessor != null) {
<ide> message = postProcessor.postProcessMessage(message);
<ide> }
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/core/AbstractMessagingTemplate.java
<ide> */
<ide> package org.springframework.messaging.core;
<ide>
<add>import java.util.Map;
<add>
<ide> import org.springframework.messaging.Message;
<add>import org.springframework.messaging.MessageHeaders;
<ide>
<ide>
<ide> /**
<add> * Base class for a messaging template that send and receive messages.
<add> *
<ide> * @author Mark Fisher
<add> * @author Rossen Stoyanchev
<ide> * @since 4.0
<ide> */
<ide> public abstract class AbstractMessagingTemplate<D> extends AbstractMessageSendingTemplate<D>
<ide> public <P> Message<P> receive(D destination) {
<ide>
<ide>
<ide> @Override
<del> public Object receiveAndConvert() {
<del> return this.receiveAndConvert(getRequiredDefaultDestination());
<add> public <T> T receiveAndConvert(Class<T> targetClass) {
<add> return this.receiveAndConvert(getRequiredDefaultDestination(), targetClass);
<ide> }
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> @Override
<del> public Object receiveAndConvert(D destination) {
<add> public <T> T receiveAndConvert(D destination, Class<T> targetClass) {
<ide> Message<?> message = this.doReceive(destination);
<del> return (message != null) ? getConverter().fromMessage(message, null) : null;
<add> if (message != null) {
<add> return (T) getMessageConverter().fromMessage(message, targetClass);
<add> }
<add> else {
<add> return null;
<add> }
<ide> }
<ide>
<del>
<ide> @Override
<ide> public Message<?> sendAndReceive(Message<?> requestMessage) {
<ide> return this.sendAndReceive(getRequiredDefaultDestination(), requestMessage);
<ide> public Message<?> sendAndReceive(D destination, Message<?> requestMessage) {
<ide>
<ide>
<ide> @Override
<del> public Object convertSendAndReceive(Object request) {
<del> return this.convertSendAndReceive(getRequiredDefaultDestination(), request);
<add> public <T> T convertSendAndReceive(Object request, Class<T> targetClass) {
<add> return this.convertSendAndReceive(getRequiredDefaultDestination(), request, targetClass);
<add> }
<add>
<add> @Override
<add> public <T> T convertSendAndReceive(D destination, Object request, Class<T> targetClass) {
<add> Map<String, Object> headers = null;
<add> return this.convertSendAndReceive(destination, request, headers, targetClass);
<ide> }
<ide>
<ide> @Override
<del> public Object convertSendAndReceive(D destination, Object request) {
<add> public <T> T convertSendAndReceive(D destination, Object request, Map<String, Object> headers,
<add> Class<T> targetClass) {
<add>
<ide> return this.convertSendAndReceive(destination, request, null);
<ide> }
<ide>
<ide> @Override
<del> public Object convertSendAndReceive(Object request, MessagePostProcessor postProcessor) {
<del> return this.convertSendAndReceive(getRequiredDefaultDestination(), request, postProcessor);
<add> public <T> T convertSendAndReceive(Object request, Class<T> targetClass, MessagePostProcessor postProcessor) {
<add> return this.convertSendAndReceive(getRequiredDefaultDestination(), request, targetClass, postProcessor);
<add> }
<add>
<add> @Override
<add> public <T> T convertSendAndReceive(D destination, Object request, Class<T> targetClass,
<add> MessagePostProcessor postProcessor) {
<add>
<add> Map<String, Object> headers = null;
<add> return this.convertSendAndReceive(destination, request, headers, targetClass, postProcessor);
<ide> }
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> @Override
<del> public Object convertSendAndReceive(D destination, Object request, MessagePostProcessor postProcessor) {
<del> Message<?> requestMessage = getConverter().toMessage(request);
<add> public <T> T convertSendAndReceive(D destination, Object request, Map<String, Object> headers,
<add> Class<T> targetClass, MessagePostProcessor postProcessor) {
<add>
<add> MessageHeaders messageHeaders = (headers != null) ? new MessageHeaders(headers) : null;
<add> Message<?> requestMessage = getMessageConverter().toMessage(request, messageHeaders);
<ide> if (postProcessor != null) {
<ide> requestMessage = postProcessor.postProcessMessage(requestMessage);
<ide> }
<ide> Message<?> replyMessage = this.sendAndReceive(destination, requestMessage);
<del> return getConverter().fromMessage(replyMessage, null);
<add> return (T) getMessageConverter().fromMessage(replyMessage, targetClass);
<ide> }
<ide>
<ide> }
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/core/DestinationResolvingMessageReceivingOperations.java
<ide>
<ide>
<ide> /**
<add> * A {@link MessageReceivingOperations} that can resolve a String-based destinations.
<add> *
<ide> * @author Mark Fisher
<add> * @author Rossen Stoyanchev
<ide> * @since 4.0
<ide> */
<ide> public interface DestinationResolvingMessageReceivingOperations<D> extends MessageReceivingOperations<D> {
<ide>
<ide> <P> Message<P> receive(String destinationName) throws MessagingException;
<ide>
<del> Object receiveAndConvert(String destinationName) throws MessagingException;
<add> <T> T receiveAndConvert(String destinationName, Class<T> targetClass) throws MessagingException;
<ide>
<ide> }
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/core/DestinationResolvingMessageRequestReplyOperations.java
<ide> */
<ide> package org.springframework.messaging.core;
<ide>
<add>import java.util.Map;
<add>
<ide> import org.springframework.messaging.Message;
<add>import org.springframework.messaging.MessagingException;
<ide>
<ide>
<ide> /**
<add> * A {@link MessageRequestReplyOperations} that can resolve a String-based destinations.
<add> *
<ide> * @author Mark Fisher
<add> * @author Rossen Stoyanchev
<ide> * @since 4.0
<ide> */
<ide> public interface DestinationResolvingMessageRequestReplyOperations<D> extends MessageRequestReplyOperations<D> {
<ide>
<del> Message<?> sendAndReceive(String destinationName, Message<?> requestMessage);
<add> Message<?> sendAndReceive(String destinationName, Message<?> requestMessage) throws MessagingException;
<add>
<add> <T> T convertSendAndReceive(String destinationName, Object request, Class<T> targetClass)
<add> throws MessagingException;
<add>
<add> <T> T convertSendAndReceive(String destinationName, Object request, Map<String, Object> headers,
<add> Class<T> targetClass) throws MessagingException;
<ide>
<del> Object convertSendAndReceive(String destinationName, Object request);
<add> <T> T convertSendAndReceive(String destinationName, Object request,
<add> Class<T> targetClass, MessagePostProcessor requestPostProcessor) throws MessagingException;
<ide>
<del> Object convertSendAndReceive(String destinationName, Object request, MessagePostProcessor requestPostProcessor);
<add> <T> T convertSendAndReceive(String destinationName, Object request, Map<String, Object> headers,
<add> Class<T> targetClass, MessagePostProcessor requestPostProcessor) throws MessagingException;
<ide>
<ide> }
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/core/DestinationResolvingMessageSendingOperations.java
<ide> */
<ide> package org.springframework.messaging.core;
<ide>
<add>import java.util.Map;
<add>
<ide> import org.springframework.messaging.Message;
<ide> import org.springframework.messaging.MessagingException;
<ide>
<ide>
<ide> /**
<add> * A {@link MessageSendingOperations} that can resolve a String-based destinations.
<add> *
<ide> * @author Mark Fisher
<add> * @author Rossen Stoyanchev
<ide> * @since 4.0
<ide> */
<ide> public interface DestinationResolvingMessageSendingOperations<D> extends MessageSendingOperations<D> {
<ide>
<ide> <T> void convertAndSend(String destinationName, T payload) throws MessagingException;
<ide>
<del> <T> void convertAndSend(String destinationName, T payload, MessagePostProcessor postProcessor)
<del> throws MessagingException;
<add> <T> void convertAndSend(String destinationName, T payload, Map<String, Object> headers) throws MessagingException;
<add>
<add> <T> void convertAndSend(String destinationName, T payload,
<add> MessagePostProcessor postProcessor) throws MessagingException;
<add>
<add> <T> void convertAndSend(String destinationName, T payload, Map<String, Object> headers,
<add> MessagePostProcessor postProcessor) throws MessagingException;
<ide>
<ide> }
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/core/GenericMessagingTemplate.java
<ide>
<ide>
<ide> /**
<add> * A messaging template for sending to and/or receiving messages from a
<add> * {@link MessageChannel}.
<add> *
<ide> * @author Mark Fisher
<ide> * @since 4.0
<ide> */
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/core/MessageReceivingOperations.java
<ide>
<ide>
<ide> /**
<add> * A set of operations receiving messages from a destination.
<add> *
<add> * @param <D> the type of destination from which messages can be received
<add> *
<ide> * @author Mark Fisher
<add> * @author Rossen Stoyanchev
<ide> * @since 4.0
<ide> */
<ide> public interface MessageReceivingOperations<D> {
<ide>
<ide> <P> Message<P> receive(D destination) throws MessagingException;
<ide>
<del> Object receiveAndConvert() throws MessagingException;
<add> <T> T receiveAndConvert(Class<T> targetClass) throws MessagingException;
<ide>
<del> Object receiveAndConvert(D destination) throws MessagingException;
<add> <T> T receiveAndConvert(D destination, Class<T> targetClass) throws MessagingException;
<ide>
<ide> }
<add>
<add>
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/core/MessageRequestReplyOperations.java
<ide> */
<ide> package org.springframework.messaging.core;
<ide>
<add>import java.util.Map;
<add>
<ide> import org.springframework.messaging.Message;
<add>import org.springframework.messaging.MessagingException;
<ide>
<ide>
<ide> /**
<add> * A set of operations for exchanging messages to and from a destination.
<add> *
<add> * @param <D> the type of destination to send and receive messages from
<add> *
<ide> * @author Mark Fisher
<add> * @author Rossen Stoyanchev
<ide> * @since 4.0
<ide> */
<ide> public interface MessageRequestReplyOperations<D> {
<ide>
<del> Message<?> sendAndReceive(Message<?> requestMessage);
<add> Message<?> sendAndReceive(Message<?> requestMessage) throws MessagingException;
<add>
<add> Message<?> sendAndReceive(D destination, Message<?> requestMessage) throws MessagingException;
<add>
<add> <T> T convertSendAndReceive(Object request, Class<T> targetClass) throws MessagingException;
<ide>
<del> Message<?> sendAndReceive(D destination, Message<?> requestMessage);
<add> <T> T convertSendAndReceive(D destination, Object request, Class<T> targetClass) throws MessagingException;
<ide>
<del> Object convertSendAndReceive(Object request);
<add> <T> T convertSendAndReceive(D destination, Object request, Map<String, Object> headers, Class<T> targetClass)
<add> throws MessagingException;
<ide>
<del> Object convertSendAndReceive(D destination, Object request);
<add> <T> T convertSendAndReceive(Object request, Class<T> targetClass, MessagePostProcessor requestPostProcessor)
<add> throws MessagingException;
<ide>
<del> Object convertSendAndReceive(Object request, MessagePostProcessor requestPostProcessor);
<add> <T> T convertSendAndReceive(D destination, Object request, Class<T> targetClass,
<add> MessagePostProcessor requestPostProcessor) throws MessagingException;
<ide>
<del> Object convertSendAndReceive(D destination, Object request, MessagePostProcessor requestPostProcessor);
<add> <T> T convertSendAndReceive(D destination, Object request, Map<String, Object> headers,
<add> Class<T> targetClass, MessagePostProcessor requestPostProcessor) throws MessagingException;
<ide>
<ide> }
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/core/MessageSendingOperations.java
<ide> */
<ide> package org.springframework.messaging.core;
<ide>
<add>import java.util.Map;
<add>
<ide> import org.springframework.messaging.Message;
<ide> import org.springframework.messaging.MessagingException;
<ide>
<ide>
<ide> /**
<add> * A set of operations sending messages to a destination.
<add> *
<add> * @param <D> the type of destination to which messages can be sent
<add> *
<ide> * @author Mark Fisher
<add> * @author Rossen Stoyanchev
<ide> * @since 4.0
<ide> */
<ide> public interface MessageSendingOperations<D> {
<ide>
<del> <P> void send(Message<P> message) throws MessagingException;
<add> void send(Message<?> message) throws MessagingException;
<add>
<add> void send(D destination, Message<?> message) throws MessagingException;
<add>
<add> void convertAndSend(Object payload) throws MessagingException;
<ide>
<del> <P> void send(D destination, Message<P> message) throws MessagingException;
<add> void convertAndSend(D destination, Object payload) throws MessagingException;
<ide>
<del> <T> void convertAndSend(T payload) throws MessagingException;
<add> void convertAndSend(D destination, Object payload, Map<String, Object> headers) throws MessagingException;
<ide>
<del> <T> void convertAndSend(D destination, T payload) throws MessagingException;
<add> void convertAndSend(Object payload, MessagePostProcessor postProcessor) throws MessagingException;
<ide>
<del> <T> void convertAndSend(T payload, MessagePostProcessor postProcessor) throws MessagingException;
<add> void convertAndSend(D destination, Object payload,
<add> MessagePostProcessor postProcessor) throws MessagingException;
<ide>
<del> <T> void convertAndSend(D destination, T payload, MessagePostProcessor postProcessor) throws MessagingException;
<add> void convertAndSend(D destination, Object payload, Map<String, Object> headers,
<add> MessagePostProcessor postProcessor) throws MessagingException;
<ide>
<ide> }
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/handler/annotation/support/MessageBodyMethodArgumentResolver.java
<ide> */
<ide> public class MessageBodyMethodArgumentResolver implements HandlerMethodArgumentResolver {
<ide>
<del> private final MessageConverter<?> converter;
<add> private final MessageConverter converter;
<ide>
<ide>
<del> public MessageBodyMethodArgumentResolver(MessageConverter<?> converter) {
<add> public MessageBodyMethodArgumentResolver(MessageConverter converter) {
<ide> Assert.notNull(converter, "converter is required");
<ide> this.converter = converter;
<ide> }
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/simp/SimpMessageHeaderAccessor.java
<ide> import java.util.List;
<ide> import java.util.Map;
<ide>
<del>import org.springframework.http.MediaType;
<ide> import org.springframework.messaging.Message;
<del>import org.springframework.messaging.MessageHeaders;
<ide> import org.springframework.messaging.support.NativeMessageHeaderAccessor;
<ide> import org.springframework.util.Assert;
<ide>
<ide> public String getDestination() {
<ide> return (String) getHeader(DESTINATION_HEADER);
<ide> }
<ide>
<del> public MediaType getContentType() {
<del> return (MediaType) getHeader(MessageHeaders.CONTENT_TYPE);
<del> }
<del>
<del> public void setContentType(MediaType contentType) {
<del> setHeader(MessageHeaders.CONTENT_TYPE, contentType);
<del> }
<del>
<ide> public String getSubscriptionId() {
<ide> return (String) getHeader(SUBSCRIPTION_ID_HEADER);
<ide> }
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/simp/SimpMessageSendingOperations.java
<ide>
<ide> package org.springframework.messaging.simp;
<ide>
<add>import java.util.Map;
<add>
<ide> import org.springframework.messaging.MessagingException;
<ide> import org.springframework.messaging.core.MessagePostProcessor;
<ide> import org.springframework.messaging.core.MessageSendingOperations;
<ide> public interface SimpMessageSendingOperations extends MessageSendingOperations<S
<ide> *
<ide> * @param user the user that should receive the message.
<ide> * @param destination the destination to send the message to.
<del> * @param message the message to send
<add> * @param payload the payload to send
<ide> */
<del> <T> void convertAndSendToUser(String user, String destination, T message) throws MessagingException;
<add> void convertAndSendToUser(String user, String destination, Object payload) throws MessagingException;
<add>
<add> void convertAndSendToUser(String user, String destination, Object payload, Map<String, Object> headers)
<add> throws MessagingException;
<ide>
<ide> /**
<ide> * Send a message to a specific user.
<ide> *
<ide> * @param user the user that should receive the message.
<ide> * @param destination the destination to send the message to.
<del> * @param message the message to send
<add> * @param payload the payload to send
<ide> * @param postProcessor a postProcessor to post-process or modify the created message
<ide> */
<del> <T> void convertAndSendToUser(String user, String destination, T message, MessagePostProcessor postProcessor)
<del> throws MessagingException;
<add> void convertAndSendToUser(String user, String destination, Object payload,
<add> MessagePostProcessor postProcessor) throws MessagingException;
<add>
<add> void convertAndSendToUser(String user, String destination, Object payload, Map<String, Object> headers,
<add> MessagePostProcessor postProcessor) throws MessagingException;
<ide>
<ide> }
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/simp/SimpMessagingTemplate.java
<ide> */
<ide> package org.springframework.messaging.simp;
<ide>
<add>import java.util.Map;
<add>
<ide> import org.springframework.messaging.Message;
<ide> import org.springframework.messaging.MessageChannel;
<ide> import org.springframework.messaging.MessageDeliveryException;
<ide> public long getSendTimeout() {
<ide>
<ide>
<ide> @Override
<del> public <P> void send(Message<P> message) {
<add> public void send(Message<?> message) {
<ide> SimpMessageHeaderAccessor headers = SimpMessageHeaderAccessor.wrap(message);
<ide> String destination = headers.getDestination();
<ide> destination = (destination != null) ? destination : getRequiredDefaultDestination();
<ide> protected void doSend(String destination, Message<?> message) {
<ide>
<ide>
<ide> @Override
<del> public <T> void convertAndSendToUser(String user, String destination, T message) throws MessagingException {
<del> convertAndSendToUser(user, destination, message, null);
<add> public void convertAndSendToUser(String user, String destination, Object payload) throws MessagingException {
<add> MessagePostProcessor postProcessor = null;
<add> this.convertAndSendToUser(user, destination, payload, postProcessor);
<add> }
<add>
<add> @Override
<add> public void convertAndSendToUser(String user, String destination, Object payload,
<add> Map<String, Object> headers) throws MessagingException {
<add>
<add> MessagePostProcessor postProcessor = null;
<add> this.convertAndSendToUser(user, destination, payload, headers, postProcessor);
<add> }
<add>
<add> @Override
<add> public void convertAndSendToUser(String user, String destination, Object payload,
<add> MessagePostProcessor postProcessor) throws MessagingException {
<add>
<add> Map<String, Object> headers = null;
<add> this.convertAndSendToUser(user, destination, payload, headers, postProcessor);
<ide> }
<ide>
<ide> @Override
<del> public <T> void convertAndSendToUser(String user, String destination, T message,
<add> public void convertAndSendToUser(String user, String destination, Object payload, Map<String, Object> headers,
<ide> MessagePostProcessor postProcessor) throws MessagingException {
<ide>
<ide> Assert.notNull(user, "user is required");
<del> convertAndSend(this.userDestinationPrefix + user + destination, message, postProcessor);
<add> super.convertAndSend(this.userDestinationPrefix + user + destination, payload, headers, postProcessor);
<ide> }
<ide>
<ide> }
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/simp/config/WebSocketMessageBrokerConfigurationSupport.java
<ide>
<ide> package org.springframework.messaging.simp.config;
<ide>
<add>import java.util.ArrayList;
<add>import java.util.List;
<add>
<ide> import org.springframework.context.annotation.Bean;
<ide> import org.springframework.messaging.Message;
<ide> import org.springframework.messaging.SubscribableChannel;
<ide> import org.springframework.messaging.simp.handler.SimpleUserQueueSuffixResolver;
<ide> import org.springframework.messaging.simp.handler.UserDestinationMessageHandler;
<ide> import org.springframework.messaging.support.channel.ExecutorSubscribableChannel;
<add>import org.springframework.messaging.support.converter.ByteArrayMessageConverter;
<add>import org.springframework.messaging.support.converter.CompositeMessageConverter;
<add>import org.springframework.messaging.support.converter.DefaultContentTypeResolver;
<ide> import org.springframework.messaging.support.converter.MappingJackson2MessageConverter;
<ide> import org.springframework.messaging.support.converter.MessageConverter;
<add>import org.springframework.messaging.support.converter.StringMessageConverter;
<ide> import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
<ide> import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
<add>import org.springframework.util.ClassUtils;
<add>import org.springframework.util.MimeTypeUtils;
<ide> import org.springframework.web.servlet.HandlerMapping;
<add>import org.springframework.web.servlet.config.annotation.WebMvcConfigurationSupport;
<ide> import org.springframework.web.servlet.handler.AbstractHandlerMapping;
<ide> import org.springframework.web.socket.WebSocketHandler;
<ide> import org.springframework.web.socket.server.config.SockJsServiceRegistration;
<ide> */
<ide> public abstract class WebSocketMessageBrokerConfigurationSupport {
<ide>
<add> private static final boolean jackson2Present =
<add> ClassUtils.isPresent("com.fasterxml.jackson.databind.ObjectMapper", WebMvcConfigurationSupport.class.getClassLoader()) &&
<add> ClassUtils.isPresent("com.fasterxml.jackson.core.JsonGenerator", WebMvcConfigurationSupport.class.getClassLoader());
<add>
<ide> private MessageBrokerConfigurer messageBrokerConfigurer;
<ide>
<ide>
<ide> public AnnotationMethodMessageHandler annotationMethodMessageHandler() {
<ide> AnnotationMethodMessageHandler handler =
<ide> new AnnotationMethodMessageHandler(brokerMessagingTemplate(), webSocketResponseChannel());
<ide> handler.setDestinationPrefixes(getMessageBrokerConfigurer().getAnnotationMethodDestinationPrefixes());
<del> handler.setMessageConverter(brokerMessageConverter());
<add> handler.setMessageConverter(simpMessageConverter());
<ide> webSocketRequestChannel().subscribe(handler);
<ide> return handler;
<ide> }
<ide> public UserDestinationMessageHandler userDestinationMessageHandler() {
<ide> @Bean
<ide> public SimpMessageSendingOperations brokerMessagingTemplate() {
<ide> SimpMessagingTemplate template = new SimpMessagingTemplate(brokerChannel());
<del> template.setMessageConverter(brokerMessageConverter());
<add> template.setMessageConverter(simpMessageConverter());
<ide> return template;
<ide> }
<ide>
<ide> public SubscribableChannel brokerChannel() {
<ide> }
<ide>
<ide> @Bean
<del> public MessageConverter<?> brokerMessageConverter() {
<del> return new MappingJackson2MessageConverter();
<add> public CompositeMessageConverter simpMessageConverter() {
<add> DefaultContentTypeResolver contentTypeResolver = new DefaultContentTypeResolver();
<add> List<MessageConverter> converters = new ArrayList<MessageConverter>();
<add> converters.add(new StringMessageConverter());
<add> converters.add(new ByteArrayMessageConverter());
<add> if (jackson2Present) {
<add> converters.add(new MappingJackson2MessageConverter());
<add> contentTypeResolver.setDefaultMimeType(MimeTypeUtils.APPLICATION_JSON);
<add> }
<add> return new CompositeMessageConverter(converters, contentTypeResolver);
<ide> }
<ide>
<ide>
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/simp/handler/AnnotationMethodMessageHandler.java
<ide> public class AnnotationMethodMessageHandler implements MessageHandler, Applicati
<ide>
<ide> private Collection<String> destinationPrefixes = new ArrayList<String>();
<ide>
<del> private MessageConverter<?> messageConverter;
<add> private MessageConverter messageConverter;
<ide>
<ide> private ApplicationContext applicationContext;
<ide>
<ide> public Collection<String> getDestinationPrefixes() {
<ide> return this.destinationPrefixes;
<ide> }
<ide>
<del> public void setMessageConverter(MessageConverter<?> converter) {
<add> public void setMessageConverter(MessageConverter converter) {
<ide> this.messageConverter = converter;
<ide> if (converter != null) {
<ide> ((AbstractMessageSendingTemplate<?>) this.webSocketResponseTemplate).setMessageConverter(converter);
<ide> public void setCustomReturnValueHandlers(List<HandlerMethodReturnValueHandler> c
<ide> this.customReturnValueHandlers = customReturnValueHandlers;
<ide> }
<ide>
<del> public MessageConverter<?> getMessageConverter() {
<add> public MessageConverter getMessageConverter() {
<ide> return this.messageConverter;
<ide> }
<ide>
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/simp/stomp/StompHeaderAccessor.java
<ide> import java.util.Set;
<ide> import java.util.concurrent.atomic.AtomicLong;
<ide>
<del>import org.springframework.http.MediaType;
<ide> import org.springframework.messaging.Message;
<ide> import org.springframework.messaging.simp.SimpMessageHeaderAccessor;
<ide> import org.springframework.messaging.simp.SimpMessageType;
<ide> import org.springframework.util.Assert;
<ide> import org.springframework.util.CollectionUtils;
<add>import org.springframework.util.MimeType;
<add>import org.springframework.util.MimeTypeUtils;
<ide> import org.springframework.util.StringUtils;
<ide>
<ide>
<ide> private void setSimpMessageHeaders(StompCommand command, Map<String, List<String
<ide>
<ide> values = extHeaders.get(StompHeaderAccessor.STOMP_CONTENT_TYPE_HEADER);
<ide> if (!CollectionUtils.isEmpty(values)) {
<del> super.setContentType(MediaType.parseMediaType(values.get(0)));
<add> super.setContentType(MimeTypeUtils.parseMimeType(values.get(0)));
<ide> }
<ide>
<ide> if (StompCommand.SUBSCRIBE.equals(command) || StompCommand.UNSUBSCRIBE.equals(command)) {
<ide> public Map<String, List<String>> toNativeHeaderMap() {
<ide> result.put(STOMP_DESTINATION_HEADER, Arrays.asList(destination));
<ide> }
<ide>
<del> MediaType contentType = getContentType();
<add> MimeType contentType = super.getContentType();
<ide> if (contentType != null) {
<ide> result.put(STOMP_CONTENT_TYPE_HEADER, Arrays.asList(contentType.toString()));
<ide> }
<ide> public long[] getHeartbeat() {
<ide> return new long[] { Long.valueOf(rawValues[0]), Long.valueOf(rawValues[1])};
<ide> }
<ide>
<del> public void setContentType(MediaType mediaType) {
<del> if (mediaType != null) {
<del> super.setContentType(mediaType);
<del> setNativeHeader(STOMP_CONTENT_TYPE_HEADER, mediaType.toString());
<del> }
<del> }
<del>
<del> public MediaType getContentType() {
<del> String value = getFirstNativeHeader(STOMP_CONTENT_TYPE_HEADER);
<del> return (value != null) ? MediaType.parseMediaType(value) : null;
<add> public void setContentType(MimeType contentType) {
<add> super.setContentType(contentType);
<add> setNativeHeader(STOMP_CONTENT_TYPE_HEADER, contentType.toString());
<ide> }
<ide>
<ide> public Integer getContentLength() {
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/support/MessageHeaderAccessor.java
<ide> import org.springframework.messaging.MessageChannel;
<ide> import org.springframework.messaging.MessageHeaders;
<ide> import org.springframework.util.Assert;
<add>import org.springframework.util.MimeType;
<ide> import org.springframework.util.ObjectUtils;
<ide> import org.springframework.util.PatternMatchUtils;
<ide> import org.springframework.util.StringUtils;
<ide> public void setErrorChannelName(String errorChannelName) {
<ide> setHeader(MessageHeaders.ERROR_CHANNEL, errorChannelName);
<ide> }
<ide>
<add> public MimeType getContentType() {
<add> return (MimeType) getHeader(MessageHeaders.CONTENT_TYPE);
<add> }
<add>
<add> public void setContentType(MimeType contentType) {
<add> setHeader(MessageHeaders.CONTENT_TYPE, contentType);
<add> }
<add>
<add>
<ide> @Override
<ide> public String toString() {
<ide> return getClass().getSimpleName() + " [originalHeaders=" + this.originalHeaders
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/support/converter/AbstractMessageConverter.java
<add>/*
<add> * Copyright 2002-2013 the original author or authors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.springframework.messaging.support.converter;
<add>
<add>import java.util.ArrayList;
<add>import java.util.Collection;
<add>import java.util.Collections;
<add>import java.util.List;
<add>
<add>import org.apache.commons.logging.Log;
<add>import org.apache.commons.logging.LogFactory;
<add>import org.springframework.messaging.Message;
<add>import org.springframework.messaging.MessageHeaders;
<add>import org.springframework.messaging.support.MessageBuilder;
<add>import org.springframework.util.Assert;
<add>import org.springframework.util.MimeType;
<add>
<add>
<add>/**
<add> * Abstract base class for {@link MessageConverter} implementations including support for
<add> * common properties and a partial implementation of the conversion methods mainly to
<add> * check if the converter supports the conversion based on the payload class and MIME
<add> * type.
<add> *
<add> * @author Rossen Stoyanchev
<add> * @since 4.0
<add> */
<add>public abstract class AbstractMessageConverter implements MessageConverter {
<add>
<add> protected final Log logger = LogFactory.getLog(getClass());
<add>
<add> private final List<MimeType> supportedMimeTypes;
<add>
<add> private Class<?> serializedPayloadClass = byte[].class;
<add>
<add> private ContentTypeResolver contentTypeResolver;
<add>
<add>
<add> /**
<add> * Construct an {@code AbstractMessageConverter} with one supported MIME type.
<add> * @param supportedMimeType the supported MIME type
<add> */
<add> protected AbstractMessageConverter(MimeType supportedMimeType) {
<add> this.supportedMimeTypes = Collections.<MimeType>singletonList(supportedMimeType);
<add> }
<add>
<add> /**
<add> * Construct an {@code AbstractMessageConverter} with multiple supported MIME type.
<add> * @param supportedMimeTypes the supported MIME types
<add> */
<add> protected AbstractMessageConverter(Collection<MimeType> supportedMimeTypes) {
<add> Assert.notNull(supportedMimeTypes, "'supportedMimeTypes' is required");
<add> this.supportedMimeTypes = new ArrayList<MimeType>(supportedMimeTypes);
<add> }
<add>
<add>
<add> /**
<add> * Return the configured supported MIME types.
<add> */
<add> public List<MimeType> getSupportedMimeTypes() {
<add> return Collections.unmodifiableList(this.supportedMimeTypes);
<add> }
<add>
<add> /**
<add> * Configure the {@link ContentTypeResolver} to use.
<add> * <p>
<add> * The default value is {@code null}. However when {@link CompositeMessageConverter}
<add> * is used it configures all of its delegates with a default resolver.
<add> */
<add> public void setContentTypeResolver(ContentTypeResolver resolver) {
<add> this.contentTypeResolver = resolver;
<add> }
<add>
<add> /**
<add> * Return the default {@link ContentTypeResolver}.
<add> */
<add> public ContentTypeResolver getContentTypeResolver() {
<add> return this.contentTypeResolver;
<add> }
<add>
<add> /**
<add> * Configure the preferred serialization class to use (byte[] or String) when
<add> * converting an Object payload to a {@link Message}.
<add> * <p>
<add> * The default value is byte[].
<add> *
<add> * @param clazz either byte[] or String
<add> */
<add> public void setSerializedPayloadClass(Class<?> clazz) {
<add> Assert.isTrue(byte[].class.equals(clazz) || String.class.equals(clazz),
<add> "Payload class must be byte[] or String: " + clazz);
<add> this.serializedPayloadClass = clazz;
<add> }
<add>
<add> /**
<add> * Return the configured preferred serialization payload class.
<add> */
<add> public Class<?> getSerializedPayloadClass() {
<add> return this.serializedPayloadClass;
<add> }
<add>
<add> /**
<add> * Returns the default content type for the payload. Called when
<add> * {@link #toMessage(Object, MessageHeaders)} is invoked without message headers or
<add> * without a content type header.
<add> * <p>
<add> * By default, this returns the first element of the {@link #getSupportedMimeTypes()
<add> * supportedMimeTypes}, if any. Can be overridden in sub-classes.
<add> *
<add> * @param payload the payload being converted to message
<add> * @return the content type, or {@code null} if not known
<add> */
<add> protected MimeType getDefaultContentType(Object payload) {
<add> List<MimeType> mimeTypes = getSupportedMimeTypes();
<add> return (!mimeTypes.isEmpty() ? mimeTypes.get(0) : null);
<add> }
<add>
<add> /**
<add> * Whether the given class is supported by this converter.
<add> *
<add> * @param clazz the class to test for support
<add> * @return {@code true} if supported; {@code false} otherwise
<add> */
<add> protected abstract boolean supports(Class<?> clazz);
<add>
<add>
<add> @Override
<add> public final Object fromMessage(Message<?> message, Class<?> targetClass) {
<add> if (!canConvertFrom(message, targetClass)) {
<add> return null;
<add> }
<add> return convertFromInternal(message, targetClass);
<add> }
<add>
<add> protected boolean canConvertFrom(Message<?> message, Class<?> targetClass) {
<add> return (supports(targetClass) && supportsMimeType(message.getHeaders()));
<add> }
<add>
<add> /**
<add> * Convert the message payload from serialized form to an Object.
<add> */
<add> public abstract Object convertFromInternal(Message<?> message, Class<?> targetClass);
<add>
<add> @Override
<add> public final Message<?> toMessage(Object payload, MessageHeaders headers) {
<add> if (!canConvertTo(payload, headers)) {
<add> return null;
<add> }
<add> payload = convertToInternal(payload, headers);
<add> MessageBuilder<?> builder = MessageBuilder.withPayload(payload);
<add> if (headers != null) {
<add> builder.copyHeaders(headers);
<add> }
<add> MimeType mimeType = getDefaultContentType(payload);
<add> if (mimeType != null) {
<add> builder.setHeaderIfAbsent(MessageHeaders.CONTENT_TYPE, mimeType);
<add> }
<add> return builder.build();
<add> }
<add>
<add> protected boolean canConvertTo(Object payload, MessageHeaders headers) {
<add> Class<?> clazz = (payload != null) ? payload.getClass() : null;
<add> return (supports(clazz) && supportsMimeType(headers));
<add> }
<add>
<add> /**
<add> * Convert the payload object to serialized form.
<add> */
<add> public abstract Object convertToInternal(Object payload, MessageHeaders headers);
<add>
<add> protected boolean supportsMimeType(MessageHeaders headers) {
<add> MimeType mimeType = getMimeType(headers);
<add> if (mimeType == null) {
<add> return true;
<add> }
<add> if (getSupportedMimeTypes().isEmpty()) {
<add> return true;
<add> }
<add> for (MimeType supported : getSupportedMimeTypes()) {
<add> if (supported.getType().equals(mimeType.getType()) &&
<add> supported.getSubtype().equals(mimeType.getSubtype())) {
<add> return true;
<add> }
<add> }
<add> return false;
<add> }
<add>
<add> protected MimeType getMimeType(MessageHeaders headers) {
<add> return (this.contentTypeResolver != null) ? this.contentTypeResolver.resolve(headers) : null;
<add> }
<add>
<add>}
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/support/converter/ByteArrayMessageConverter.java
<add>/*
<add> * Copyright 2002-2013 the original author or authors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.springframework.messaging.support.converter;
<add>
<add>import org.springframework.messaging.Message;
<add>import org.springframework.messaging.MessageHeaders;
<add>import org.springframework.util.MimeTypeUtils;
<add>
<add>
<add>/**
<add> * A {@link MessageConverter} that supports MIME type "application/octet-stream" with the
<add> * payload converted to and from a byte[].
<add> *
<add> * @author Rossen Stoyanchev
<add> * @since 4.0
<add> */
<add>public class ByteArrayMessageConverter extends AbstractMessageConverter {
<add>
<add>
<add> public ByteArrayMessageConverter() {
<add> super(MimeTypeUtils.APPLICATION_OCTET_STREAM);
<add> }
<add>
<add>
<add> @Override
<add> protected boolean supports(Class<?> clazz) {
<add> return byte[].class.equals(clazz);
<add> }
<add>
<add> @Override
<add> public Object convertFromInternal(Message<?> message, Class<?> targetClass) {
<add> return message.getPayload();
<add> }
<add>
<add> @Override
<add> public Object convertToInternal(Object payload, MessageHeaders headers) {
<add> return payload;
<add> }
<add>
<add>}
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/support/converter/CompositeMessageConverter.java
<add>/*
<add> * Copyright 2002-2013 the original author or authors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.springframework.messaging.support.converter;
<add>
<add>import java.util.ArrayList;
<add>import java.util.Collection;
<add>import java.util.List;
<add>
<add>import org.springframework.messaging.Message;
<add>import org.springframework.messaging.MessageHeaders;
<add>import org.springframework.util.Assert;
<add>
<add>
<add>/**
<add> * A {@link MessageConverter} that delegates to a list of other converters to invoke until
<add> * one of them returns a non-null value.
<add> *
<add> * @author Rossen Stoyanchev
<add> * @since 4.0
<add> */
<add>public class CompositeMessageConverter implements MessageConverter {
<add>
<add> private final List<MessageConverter> converters;
<add>
<add> private ContentTypeResolver contentTypeResolver;
<add>
<add>
<add> /**
<add> * Create a new instance with the given {@link MessageConverter}s in turn configuring
<add> * each with a {@link DefaultContentTypeResolver}.
<add> */
<add> public CompositeMessageConverter(Collection<MessageConverter> converters) {
<add> this(new ArrayList<MessageConverter>(converters), new DefaultContentTypeResolver());
<add> }
<add>
<add> /**
<add> * Create an instance with the given {@link MessageConverter}s and configure all with
<add> * the given {@link ContentTypeResolver}.
<add> */
<add> public CompositeMessageConverter(Collection<MessageConverter> converters, ContentTypeResolver resolver) {
<add> Assert.notEmpty(converters, "converters is required");
<add> Assert.notNull(resolver, "contentTypeResolver is required");
<add> this.converters = new ArrayList<MessageConverter>(converters);
<add> this.contentTypeResolver = resolver;
<add> applyContentTypeResolver(converters, resolver);
<add> }
<add>
<add>
<add> private static void applyContentTypeResolver(Collection<MessageConverter> converters,
<add> ContentTypeResolver resolver) {
<add>
<add> for (MessageConverter converter : converters) {
<add> if (converter instanceof AbstractMessageConverter) {
<add> ((AbstractMessageConverter) converter).setContentTypeResolver(resolver);
<add> }
<add> }
<add> }
<add>
<add>
<add> public void setContentTypeResolver(ContentTypeResolver resolver) {
<add> this.contentTypeResolver = resolver;
<add> applyContentTypeResolver(getConverters(), resolver);
<add> }
<add>
<add> public ContentTypeResolver getContentTypeResolver() {
<add> return this.contentTypeResolver;
<add> }
<add>
<add> public Collection<MessageConverter> getConverters() {
<add> return this.converters;
<add> }
<add>
<add>
<add> @Override
<add> public Object fromMessage(Message<?> message, Class<?> targetClass) {
<add> for (MessageConverter converter : this.converters) {
<add> Object result = converter.fromMessage(message, targetClass);
<add> if (result != null) {
<add> return result;
<add> }
<add> }
<add> return null;
<add> }
<add>
<add> @Override
<add> public Message<?> toMessage(Object payload, MessageHeaders headers) {
<add> for (MessageConverter converter : this.converters) {
<add> Message<?> result = converter.toMessage(payload, headers);
<add> if (result != null) {
<add> return result;
<add> }
<add> }
<add> return null;
<add> }
<add>
<add>}
<add><path>spring-messaging/src/main/java/org/springframework/messaging/support/converter/ContentTypeResolver.java
<del><path>spring-messaging/src/main/java/org/springframework/messaging/support/converter/SimplePayloadMessageConverter.java
<ide> * you may not use this file except in compliance with the License.
<ide> * You may obtain a copy of the License at
<ide> *
<del> * http://www.apache.org/licenses/LICENSE-2.0
<add> * http://www.apache.org/licenses/LICENSE-2.0
<ide> *
<ide> * Unless required by applicable law or agreed to in writing, software
<ide> * distributed under the License is distributed on an "AS IS" BASIS,
<ide> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<ide> * See the License for the specific language governing permissions and
<ide> * limitations under the License.
<ide> */
<del>package org.springframework.messaging.support.converter;
<ide>
<del>import java.lang.reflect.Type;
<add>package org.springframework.messaging.support.converter;
<ide>
<del>import org.springframework.messaging.Message;
<del>import org.springframework.messaging.support.MessageBuilder;
<add>import org.springframework.messaging.MessageHeaders;
<add>import org.springframework.util.MimeType;
<ide>
<ide>
<ide> /**
<del> * @author Mark Fisher
<add> * Resolve the content type for a message given a set of {@link MessageHeaders}.
<add> *
<add> * @author Rossen Stoyanchev
<ide> * @since 4.0
<ide> */
<del>public class SimplePayloadMessageConverter implements MessageConverter<Object> {
<del>
<del> @Override
<del> public Message<Object> toMessage(Object object) {
<del> return MessageBuilder.withPayload(object).build();
<del> }
<add>public interface ContentTypeResolver {
<ide>
<del> @Override
<del> public Object fromMessage(Message<?> message, Type targetClass) {
<del> return message.getPayload();
<del> }
<add> MimeType resolve(MessageHeaders headers);
<ide>
<ide> }
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/support/converter/DefaultContentTypeResolver.java
<add>/*
<add> * Copyright 2002-2013 the original author or authors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.springframework.messaging.support.converter;
<add>
<add>import org.springframework.messaging.MessageHeaders;
<add>import org.springframework.util.MimeType;
<add>
<add>
<add>/**
<add> * A default {@link ContentTypeResolver} that checks the
<add> * {@link MessageHeaders#CONTENT_TYPE} header or falls back to a default, if a default is
<add> * configured.
<add> *
<add> * @author Rossen Stoyanchev
<add> * @since 4.0
<add> */
<add>public class DefaultContentTypeResolver implements ContentTypeResolver {
<add>
<add> private MimeType defaultMimeType;
<add>
<add>
<add> /**
<add> * Set the default MIME type to use, if the message headers don't have one.
<add> * By default this property is set to {@code null}.
<add> */
<add> public void setDefaultMimeType(MimeType defaultMimeType) {
<add> this.defaultMimeType = defaultMimeType;
<add> }
<add>
<add> /**
<add> * Return the default MIME type to use.
<add> */
<add> public MimeType getDefaultMimeType() {
<add> return this.defaultMimeType;
<add> }
<add>
<add> @Override
<add> public MimeType resolve(MessageHeaders headers) {
<add> MimeType mimeType = null;
<add> if (headers != null) {
<add> mimeType = headers.get(MessageHeaders.CONTENT_TYPE, MimeType.class);
<add> }
<add> return (mimeType != null) ? mimeType : this.defaultMimeType;
<add> }
<add>
<add>}
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/support/converter/MappingJackson2MessageConverter.java
<ide> import java.io.IOException;
<ide> import java.io.StringWriter;
<ide> import java.io.Writer;
<del>import java.lang.reflect.Type;
<del>import java.util.Map;
<add>import java.nio.charset.Charset;
<ide>
<ide> import org.springframework.messaging.Message;
<del>import org.springframework.messaging.support.MessageBuilder;
<del>import org.springframework.util.Assert;
<add>import org.springframework.messaging.MessageHeaders;
<add>import org.springframework.util.MimeType;
<ide>
<add>import com.fasterxml.jackson.core.JsonEncoding;
<add>import com.fasterxml.jackson.core.JsonGenerator;
<add>import com.fasterxml.jackson.core.util.DefaultPrettyPrinter;
<ide> import com.fasterxml.jackson.databind.JavaType;
<ide> import com.fasterxml.jackson.databind.ObjectMapper;
<add>import com.fasterxml.jackson.databind.SerializationFeature;
<ide>
<ide>
<ide> /**
<add> * A Jackson 2 based {@link MessageConverter} implementation.
<add> *
<ide> * @author Rossen Stoyanchev
<del> * @sicne 4.0
<add> * @since 4.0
<ide> */
<del>public class MappingJackson2MessageConverter implements MessageConverter<Object> {
<add>public class MappingJackson2MessageConverter extends AbstractMessageConverter {
<ide>
<ide> private ObjectMapper objectMapper = new ObjectMapper();
<ide>
<del> private Type defaultObjectType = Map.class;
<add> private Boolean prettyPrint;
<add>
<ide>
<del> private Class<?> defaultMessagePayloadClass = byte[].class;
<add> public MappingJackson2MessageConverter() {
<add> super(new MimeType("application", "json", Charset.forName("UTF-8")));
<add> }
<ide>
<ide>
<ide> /**
<del> * Set the default target Object class to convert to in
<del> * {@link #fromMessage(Message, Class)}.
<add> * Whether to use the {@link DefaultPrettyPrinter} when writing JSON.
<add> * This is a shortcut for setting up an {@code ObjectMapper} as follows:
<add> * <pre class="code">
<add> * ObjectMapper mapper = new ObjectMapper();
<add> * mapper.configure(SerializationFeature.INDENT_OUTPUT, true);
<add> * converter.setObjectMapper(mapper);
<add> * </pre>
<ide> */
<del> public void setDefaultObjectClass(Type defaultObjectType) {
<del> Assert.notNull(defaultObjectType, "defaultObjectType is required");
<del> this.defaultObjectType = defaultObjectType;
<add> public void setPrettyPrint(boolean prettyPrint) {
<add> this.prettyPrint = prettyPrint;
<add> configurePrettyPrint();
<ide> }
<ide>
<del> /**
<del> * Set the type of Message payload to convert to in {@link #toMessage(Object)}.
<del> * @param payloadClass either byte[] or String
<del> */
<del> public void setDefaultTargetPayloadClass(Class<?> payloadClass) {
<del> Assert.isTrue(byte[].class.equals(payloadClass) || String.class.equals(payloadClass),
<del> "Payload class must be byte[] or String: " + payloadClass);
<del> this.defaultMessagePayloadClass = payloadClass;
<add> private void configurePrettyPrint() {
<add> if (this.prettyPrint != null) {
<add> this.objectMapper.configure(SerializationFeature.INDENT_OUTPUT, this.prettyPrint);
<add> }
<ide> }
<ide>
<ide> @Override
<del> public Object fromMessage(Message<?> message, Type objectType) {
<add> protected boolean canConvertFrom(Message<?> message, Class<?> targetClass) {
<add> if (targetClass == null) {
<add> return false;
<add> }
<add> JavaType type = this.objectMapper.constructType(targetClass);
<add> return (this.objectMapper.canDeserialize(type) && supportsMimeType(message.getHeaders()));
<add> }
<ide>
<del> JavaType javaType = (objectType != null) ?
<del> this.objectMapper.constructType(objectType) :
<del> this.objectMapper.constructType(this.defaultObjectType);
<add> @Override
<add> protected boolean canConvertTo(Object payload, MessageHeaders headers) {
<add> return (this.objectMapper.canSerialize(payload.getClass()) && supportsMimeType(headers));
<add> }
<ide>
<add> @Override
<add> protected boolean supports(Class<?> clazz) {
<add> // should not be called, since we override canConvertFrom/canConvertTo instead
<add> throw new UnsupportedOperationException();
<add> }
<add>
<add> @Override
<add> public Object convertFromInternal(Message<?> message, Class<?> targetClass) {
<add> JavaType javaType = this.objectMapper.constructType(targetClass);
<ide> Object payload = message.getPayload();
<ide> try {
<ide> if (payload instanceof byte[]) {
<ide> return this.objectMapper.readValue((byte[]) payload, javaType);
<ide> }
<del> else if (payload instanceof String) {
<del> return this.objectMapper.readValue((String) payload, javaType);
<del> }
<ide> else {
<del> throw new IllegalArgumentException("Unexpected message payload type: " + payload);
<add> return this.objectMapper.readValue((String) payload, javaType);
<ide> }
<ide> }
<ide> catch (IOException ex) {
<ide> throw new MessageConversionException(message, "Could not read JSON: " + ex.getMessage(), ex);
<ide> }
<ide> }
<ide>
<del> @SuppressWarnings("unchecked")
<ide> @Override
<del> public <P> Message<P> toMessage(Object object) {
<del> P payload;
<add> public Object convertToInternal(Object payload, MessageHeaders headers) {
<ide> try {
<del> if (byte[].class.equals(this.defaultMessagePayloadClass)) {
<add> if (byte[].class.equals(getSerializedPayloadClass())) {
<ide> ByteArrayOutputStream out = new ByteArrayOutputStream();
<del> this.objectMapper.writeValue(out, object);
<del> payload = (P) out.toByteArray();
<del> }
<del> else if (String.class.equals(this.defaultMessagePayloadClass)) {
<del> Writer writer = new StringWriter();
<del> this.objectMapper.writeValue(writer, object);
<del> payload = (P) writer.toString();
<add> JsonEncoding encoding = getJsonEncoding(getMimeType(headers));
<add>
<add> // The following has been deprecated as late as Jackson 2.2 (April 2013);
<add> // preserved for the time being, for Jackson 2.0/2.1 compatibility.
<add> @SuppressWarnings("deprecation")
<add> JsonGenerator generator = this.objectMapper.getJsonFactory().createJsonGenerator(out, encoding);
<add>
<add> // A workaround for JsonGenerators not applying serialization features
<add> // https://github.com/FasterXML/jackson-databind/issues/12
<add> if (this.objectMapper.isEnabled(SerializationFeature.INDENT_OUTPUT)) {
<add> generator.useDefaultPrettyPrinter();
<add> }
<add>
<add> this.objectMapper.writeValue(generator, payload);
<add> payload = out.toByteArray();
<ide> }
<ide> else {
<del> // Should never happen..
<del> throw new IllegalStateException("Unexpected payload class: " + defaultMessagePayloadClass);
<add> Writer writer = new StringWriter();
<add> this.objectMapper.writeValue(writer, payload);
<add> payload = writer.toString();
<ide> }
<ide> }
<ide> catch (IOException ex) {
<ide> throw new MessageConversionException("Could not write JSON: " + ex.getMessage(), ex);
<ide> }
<del> return MessageBuilder.withPayload(payload).build();
<add> return payload;
<add> }
<add>
<add> /**
<add> * Determine the JSON encoding to use for the given content type.
<add> *
<add> * @param contentType the MIME type from the MessageHeaders, if any
<add> * @return the JSON encoding to use (never {@code null})
<add> */
<add> protected JsonEncoding getJsonEncoding(MimeType contentType) {
<add> if ((contentType != null) && (contentType.getCharSet() != null)) {
<add> Charset charset = contentType.getCharSet();
<add> for (JsonEncoding encoding : JsonEncoding.values()) {
<add> if (charset.name().equals(encoding.getJavaName())) {
<add> return encoding;
<add> }
<add> }
<add> }
<add> return JsonEncoding.UTF8;
<ide> }
<ide>
<ide> }
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/support/converter/MessageConverter.java
<ide>
<ide> package org.springframework.messaging.support.converter;
<ide>
<del>import java.lang.reflect.Type;
<del>
<ide> import org.springframework.messaging.Message;
<add>import org.springframework.messaging.MessageHeaders;
<ide>
<ide>
<ide> /**
<add> * A converter to turn the payload of a {@link Message} from serialized form to a typed
<add> * Object and vice versa. The {@link MessageHeaders#CONTENT_TYPE} message header may be
<add> * used to specify the media type of the message content.
<add> *
<ide> * @author Mark Fisher
<add> * @author Rossen Stoyanchev
<ide> * @since 4.0
<ide> */
<del>public interface MessageConverter<T> {
<add>public interface MessageConverter {
<ide>
<del> <P> Message<P> toMessage(T object);
<add> /**
<add> * Convert the payload of a {@link Message} from serialized form to a typed Object of
<add> * the specified target class. The {@link MessageHeaders#CONTENT_TYPE} header should
<add> * indicate the MIME type to convert from.
<add> * <p>
<add> * If the converter does not support the specified media type or cannot perform the
<add> * conversion, it should return {@code null}.
<add> *
<add> * @param message the input message
<add> * @param targetClass the target class for the conversion
<add> *
<add> * @return the result of the conversion or {@code null} if the converter cannot
<add> * perform the conversion
<add> */
<add> Object fromMessage(Message<?> message, Class<?> targetClass);
<ide>
<del> T fromMessage(Message<?> message, Type targetClass);
<add> /**
<add> * Create a {@link Message} whose payload is the result of converting the given
<add> * payload Object to serialized form. The optional {@link MessageHeaders} parameter
<add> * may contain a {@link MessageHeaders#CONTENT_TYPE} header to specify the target
<add> * media type for the conversion and it may contain additional headers to be added to
<add> * the message.
<add> * <p>
<add> * If the converter does not support the specified media type or cannot perform the
<add> * conversion, it should return {@code null}.
<add> *
<add> * @param payload the Object to convert
<add> * @param header optional headers for the message, may be {@code null}
<add> *
<add> * @return the new message or {@code null} if the converter does not support the
<add> * Object type or the target media type
<add> */
<add> Message<?> toMessage(Object payload, MessageHeaders header);
<ide>
<ide> }
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/support/converter/StringMessageConverter.java
<add>/*
<add> * Copyright 2002-2013 the original author or authors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.springframework.messaging.support.converter;
<add>
<add>import java.nio.charset.Charset;
<add>
<add>import org.springframework.messaging.Message;
<add>import org.springframework.messaging.MessageHeaders;
<add>import org.springframework.util.MimeType;
<add>
<add>
<add>/**
<add> * A {@link MessageConverter} that supports MIME type "text/plain" with the
<add> * payload converted to and from a String.
<add> *
<add> * @author Rossen Stoyanchev
<add> * @since 4.0
<add> */
<add>public class StringMessageConverter extends AbstractMessageConverter {
<add>
<add> private final Charset defaultCharset;
<add>
<add>
<add> public StringMessageConverter() {
<add> this(Charset.forName("UTF-8"));
<add> }
<add>
<add> public StringMessageConverter(Charset defaultCharset) {
<add> super(new MimeType("text", "plain", defaultCharset));
<add> this.defaultCharset = defaultCharset;
<add> }
<add>
<add>
<add> @Override
<add> protected boolean supports(Class<?> clazz) {
<add> return String.class.equals(clazz);
<add> }
<add>
<add> @Override
<add> public Object convertFromInternal(Message<?> message, Class<?> targetClass) {
<add> Charset charset = getContentTypeCharset(getMimeType(message.getHeaders()));
<add> Object payload = message.getPayload();
<add> return (payload instanceof String) ? payload : new String((byte[]) payload, charset);
<add> }
<add>
<add> @Override
<add> public Object convertToInternal(Object payload, MessageHeaders headers) {
<add> if (byte[].class.equals(getSerializedPayloadClass())) {
<add> Charset charset = getContentTypeCharset(getMimeType(headers));
<add> payload = ((String) payload).getBytes(charset);
<add> }
<add> return payload;
<add> }
<add>
<add> private Charset getContentTypeCharset(MimeType mimeType) {
<add> if (mimeType != null && mimeType.getCharSet() != null) {
<add> return mimeType.getCharSet();
<add> }
<add> else {
<add> return this.defaultCharset;
<add> }
<add> }
<add>
<add>}
<ide><path>spring-messaging/src/test/java/org/springframework/messaging/simp/annotation/support/SendToMethodReturnValueHandlerTests.java
<ide> public void setup() throws Exception {
<ide>
<ide> MockitoAnnotations.initMocks(this);
<ide>
<del> Message<String> message = MessageBuilder.withPayload(payloadContent).build();
<del> when(this.messageConverter.toMessage(payloadContent)).thenReturn(message);
<add> Message message = MessageBuilder.withPayload(payloadContent).build();
<add> when(this.messageConverter.toMessage(payloadContent, null)).thenReturn(message);
<ide>
<ide> SimpMessagingTemplate messagingTemplate = new SimpMessagingTemplate(this.messageChannel);
<del> messagingTemplate.setConverter(this.messageConverter);
<add> messagingTemplate.setMessageConverter(this.messageConverter);
<ide>
<ide> this.handler = new SendToMethodReturnValueHandler(messagingTemplate, true);
<ide> this.handlerAnnotationNotRequired = new SendToMethodReturnValueHandler(messagingTemplate, false);
<ide><path>spring-messaging/src/test/java/org/springframework/messaging/simp/annotation/support/SubscriptionMethodReturnValueHandlerTests.java
<ide> public void setup() throws Exception {
<ide>
<ide> MockitoAnnotations.initMocks(this);
<ide>
<del> Message<String> message = MessageBuilder.withPayload(payloadContent).build();
<del> when(this.messageConverter.toMessage(payloadContent)).thenReturn(message);
<add> Message message = MessageBuilder.withPayload(payloadContent).build();
<add> when(this.messageConverter.toMessage(payloadContent, null)).thenReturn(message);
<ide>
<ide> SimpMessagingTemplate messagingTemplate = new SimpMessagingTemplate(this.messageChannel);
<del> messagingTemplate.setConverter(this.messageConverter);
<add> messagingTemplate.setMessageConverter(this.messageConverter);
<ide>
<ide> this.handler = new SubscriptionMethodReturnValueHandler(messagingTemplate);
<ide>
<ide><path>spring-messaging/src/test/java/org/springframework/messaging/simp/config/WebSocketMessageBrokerConfigurationSupportTests.java
<ide> import org.springframework.messaging.simp.stomp.StompHeaderAccessor;
<ide> import org.springframework.messaging.simp.stomp.StompTextMessageBuilder;
<ide> import org.springframework.messaging.support.MessageBuilder;
<add>import org.springframework.messaging.support.converter.CompositeMessageConverter;
<add>import org.springframework.messaging.support.converter.DefaultContentTypeResolver;
<ide> import org.springframework.stereotype.Controller;
<add>import org.springframework.util.MimeTypeUtils;
<ide> import org.springframework.web.servlet.HandlerMapping;
<ide> import org.springframework.web.servlet.handler.SimpleUrlHandlerMapping;
<ide> import org.springframework.web.socket.TextMessage;
<ide> public void brokerChannelUsedByUserDestinationMessageHandler() {
<ide> assertEquals("/foos1", headers.getDestination());
<ide> }
<ide>
<add> @Test
<add> public void messageConverter() {
<add> CompositeMessageConverter messageConverter = this.cxtStompBroker.getBean(
<add> "simpMessageConverter", CompositeMessageConverter.class);
<add>
<add> DefaultContentTypeResolver resolver = (DefaultContentTypeResolver) messageConverter.getContentTypeResolver();
<add> assertEquals(MimeTypeUtils.APPLICATION_JSON, resolver.getDefaultMimeType());
<add> }
<add>
<ide>
<ide> @Controller
<ide> static class TestController {
<ide><path>spring-messaging/src/test/java/org/springframework/messaging/simp/stomp/StompHeaderAccessorTests.java
<ide>
<ide> import org.junit.Test;
<ide> import org.springframework.http.MediaType;
<add>import org.springframework.messaging.Message;
<add>import org.springframework.messaging.MessageHeaders;
<ide> import org.springframework.messaging.simp.SimpMessageType;
<add>import org.springframework.messaging.support.MessageBuilder;
<ide> import org.springframework.util.LinkedMultiValueMap;
<add>import org.springframework.util.MimeTypeUtils;
<ide> import org.springframework.util.MultiValueMap;
<ide>
<ide> import static org.junit.Assert.*;
<ide> public void toNativeHeadersMessageFrame() {
<ide> assertNotNull("message-id was not created", actual.get(StompHeaderAccessor.STOMP_MESSAGE_ID_HEADER).get(0));
<ide> }
<ide>
<add> @Test
<add> public void toNativeHeadersContentType() {
<add>
<add> Message<byte[]> message = MessageBuilder.withPayload(new byte[0])
<add> .setHeader(MessageHeaders.CONTENT_TYPE, MimeTypeUtils.APPLICATION_ATOM_XML).build();
<add>
<add> StompHeaderAccessor headers = StompHeaderAccessor.wrap(message);
<add> Map<String, List<String>> map = headers.toNativeHeaderMap();
<add>
<add> assertEquals("application/atom+xml", map.get(StompHeaderAccessor.STOMP_CONTENT_TYPE_HEADER).get(0));
<add> }
<add>
<ide> @Test
<ide> public void modifyCustomNativeHeader() {
<ide>
<ide><path>spring-messaging/src/test/java/org/springframework/messaging/support/converter/AbstractMessageConverterTests.java
<add>/*
<add> * Copyright 2002-2013 the original author or authors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.springframework.messaging.support.converter;
<add>
<add>import java.util.Collection;
<add>import java.util.Collections;
<add>import java.util.HashMap;
<add>import java.util.Map;
<add>
<add>import org.junit.Before;
<add>import org.junit.Test;
<add>import org.springframework.messaging.Message;
<add>import org.springframework.messaging.MessageHeaders;
<add>import org.springframework.messaging.support.MessageBuilder;
<add>import org.springframework.util.MimeType;
<add>import org.springframework.util.MimeTypeUtils;
<add>
<add>import static org.junit.Assert.*;
<add>
<add>
<add>/**
<add> * Test fixture for {@link AbstractMessageConverter}.
<add> *
<add> * @author Rossen Stoyanchev
<add> */
<add>public class AbstractMessageConverterTests {
<add>
<add> private TestMessageConverter converter;
<add>
<add>
<add> @Before
<add> public void setup() {
<add> this.converter = new TestMessageConverter();
<add> this.converter.setContentTypeResolver(new DefaultContentTypeResolver());
<add> }
<add>
<add> @Test
<add> public void supportsTargetClass() {
<add> Message<String> message = MessageBuilder.withPayload("ABC").build();
<add>
<add> assertEquals("success-from", this.converter.fromMessage(message, String.class));
<add> assertNull(this.converter.fromMessage(message, Integer.class));
<add> }
<add>
<add> @Test
<add> public void supportsMimeType() {
<add> Message<String> message = MessageBuilder.withPayload(
<add> "ABC").setHeader(MessageHeaders.CONTENT_TYPE, MimeTypeUtils.TEXT_PLAIN).build();
<add>
<add> assertEquals("success-from", this.converter.fromMessage(message, String.class));
<add> }
<add>
<add> @Test
<add> public void supportsMimeTypeNotSupported() {
<add> Message<String> message = MessageBuilder.withPayload(
<add> "ABC").setHeader(MessageHeaders.CONTENT_TYPE, MimeTypeUtils.APPLICATION_JSON).build();
<add>
<add> assertNull(this.converter.fromMessage(message, String.class));
<add> }
<add>
<add> @Test
<add> public void supportsMimeTypeNotSpecified() {
<add> Message<String> message = MessageBuilder.withPayload("ABC").build();
<add> assertEquals("success-from", this.converter.fromMessage(message, String.class));
<add> }
<add>
<add> @Test
<add> public void supportsMimeTypeNoneConfigured() {
<add>
<add> Message<String> message = MessageBuilder.withPayload(
<add> "ABC").setHeader(MessageHeaders.CONTENT_TYPE, MimeTypeUtils.APPLICATION_JSON).build();
<add>
<add> this.converter = new TestMessageConverter(Collections.<MimeType>emptyList());
<add> this.converter.setContentTypeResolver(new DefaultContentTypeResolver());
<add>
<add> assertEquals("success-from", this.converter.fromMessage(message, String.class));
<add> }
<add>
<add> @Test
<add> public void toMessageHeadersCopied() {
<add> Map<String, Object> map = new HashMap<String, Object>();
<add> map.put("foo", "bar");
<add> MessageHeaders headers = new MessageHeaders(map );
<add> Message<?> message = this.converter.toMessage("ABC", headers);
<add>
<add> assertEquals("bar", message.getHeaders().get("foo"));
<add> }
<add>
<add> @Test
<add> public void toMessageContentTypeHeader() {
<add> Message<?> message = this.converter.toMessage("ABC", null);
<add> assertEquals(MimeTypeUtils.TEXT_PLAIN, message.getHeaders().get(MessageHeaders.CONTENT_TYPE));
<add> }
<add>
<add>
<add> private static class TestMessageConverter extends AbstractMessageConverter {
<add>
<add> public TestMessageConverter() {
<add> super(MimeTypeUtils.TEXT_PLAIN);
<add> }
<add>
<add> public TestMessageConverter(Collection<MimeType> supportedMimeTypes) {
<add> super(supportedMimeTypes);
<add> }
<add>
<add> @Override
<add> protected boolean supports(Class<?> clazz) {
<add> return String.class.equals(clazz);
<add> }
<add>
<add> @Override
<add> public Object convertFromInternal(Message<?> message, Class<?> targetClass) {
<add> return "success-from";
<add> }
<add>
<add> @Override
<add> public Object convertToInternal(Object payload, MessageHeaders headers) {
<add> return "success-to";
<add> }
<add> }
<add>
<add>}
<ide><path>spring-messaging/src/test/java/org/springframework/messaging/support/converter/DefaultContentTypeResolverTests.java
<add>/*
<add> * Copyright 2002-2013 the original author or authors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.springframework.messaging.support.converter;
<add>
<add>import java.util.Collections;
<add>import java.util.HashMap;
<add>import java.util.Map;
<add>
<add>import org.junit.Before;
<add>import org.junit.Test;
<add>import org.springframework.messaging.MessageHeaders;
<add>import org.springframework.util.MimeTypeUtils;
<add>
<add>import static org.junit.Assert.*;
<add>
<add>
<add>/**
<add> * Test fixture for {@link DefaultContentTypeResolver}.
<add> *
<add> * @author Rossen Stoyanchev
<add> */
<add>public class DefaultContentTypeResolverTests {
<add>
<add> private DefaultContentTypeResolver resolver;
<add>
<add>
<add> @Before
<add> public void setup() {
<add> this.resolver = new DefaultContentTypeResolver();
<add> }
<add>
<add> @Test
<add> public void resolve() {
<add> Map<String, Object> map = new HashMap<String, Object>();
<add> map.put(MessageHeaders.CONTENT_TYPE, MimeTypeUtils.APPLICATION_JSON);
<add> MessageHeaders headers = new MessageHeaders(map);
<add>
<add> assertEquals(MimeTypeUtils.APPLICATION_JSON, this.resolver.resolve(headers));
<add> }
<add>
<add> @Test
<add> public void resolveNoContentTypeHeader() {
<add> MessageHeaders headers = new MessageHeaders(Collections.<String, Object>emptyMap());
<add>
<add> assertNull(this.resolver.resolve(headers));
<add> }
<add>
<add> @Test
<add> public void resolveFromDefaultMimeType() {
<add> this.resolver.setDefaultMimeType(MimeTypeUtils.APPLICATION_JSON);
<add> MessageHeaders headers = new MessageHeaders(Collections.<String, Object>emptyMap());
<add>
<add> assertEquals(MimeTypeUtils.APPLICATION_JSON, this.resolver.resolve(headers));
<add> }
<add>
<add>}
<ide><path>spring-messaging/src/test/java/org/springframework/messaging/support/converter/MappingJackson2MessageConverterTests.java
<add>/*
<add> * Copyright 2002-2013 the original author or authors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.springframework.messaging.support.converter;
<add>
<add>import java.io.IOException;
<add>import java.nio.charset.Charset;
<add>import java.util.Arrays;
<add>import java.util.HashMap;
<add>import java.util.Map;
<add>
<add>import org.junit.Before;
<add>import org.junit.Test;
<add>import org.springframework.messaging.Message;
<add>import org.springframework.messaging.MessageHeaders;
<add>import org.springframework.messaging.support.MessageBuilder;
<add>import org.springframework.util.MimeType;
<add>
<add>import static org.junit.Assert.*;
<add>
<add>
<add>/**
<add> * Test fixture for {@link MappingJackson2MessageConverter}.
<add> *
<add> * @author Rossen Stoyanchev
<add> */
<add>public class MappingJackson2MessageConverterTests {
<add>
<add> private static Charset UTF_8 = Charset.forName("UTF-8");
<add>
<add> private MappingJackson2MessageConverter converter;
<add>
<add>
<add> @Before
<add> public void setup() {
<add> this.converter = new MappingJackson2MessageConverter();
<add> this.converter.setContentTypeResolver(new DefaultContentTypeResolver());
<add> }
<add>
<add> @Test
<add> public void fromMessage() throws Exception {
<add> String payload = "{\"bytes\":\"AQI=\",\"array\":[\"Foo\",\"Bar\"],"
<add> + "\"number\":42,\"string\":\"Foo\",\"bool\":true,\"fraction\":42.0}";
<add> Message<?> message = MessageBuilder.withPayload(payload.getBytes(UTF_8)).build();
<add> MyBean actual = (MyBean) this.converter.fromMessage(message, MyBean.class);
<add>
<add> assertEquals("Foo", actual.getString());
<add> assertEquals(42, actual.getNumber());
<add> assertEquals(42F, actual.getFraction(), 0F);
<add> assertArrayEquals(new String[]{"Foo", "Bar"}, actual.getArray());
<add> assertTrue(actual.isBool());
<add> assertArrayEquals(new byte[]{0x1, 0x2}, actual.getBytes());
<add> }
<add>
<add> @Test
<add> public void fromMessageUntyped() throws Exception {
<add> String payload = "{\"bytes\":\"AQI=\",\"array\":[\"Foo\",\"Bar\"],"
<add> + "\"number\":42,\"string\":\"Foo\",\"bool\":true,\"fraction\":42.0}";
<add> Message<?> message = MessageBuilder.withPayload(payload.getBytes(UTF_8)).build();
<add> @SuppressWarnings("unchecked")
<add> HashMap<String, Object> actual = (HashMap<String, Object>) this.converter.fromMessage(message, HashMap.class);
<add>
<add> assertEquals("Foo", actual.get("string"));
<add> assertEquals(42, actual.get("number"));
<add> assertEquals(42D, (Double) actual.get("fraction"), 0D);
<add> assertEquals(Arrays.asList("Foo", "Bar"), actual.get("array"));
<add> assertEquals(Boolean.TRUE, actual.get("bool"));
<add> assertEquals("AQI=", actual.get("bytes"));
<add> }
<add>
<add> @Test(expected = MessageConversionException.class)
<add> public void fromMessageInvalidJson() throws Exception {
<add> String payload = "FooBar";
<add> Message<?> message = MessageBuilder.withPayload(payload.getBytes(UTF_8)).build();
<add> this.converter.fromMessage(message, MyBean.class);
<add> }
<add>
<add> @Test(expected = MessageConversionException.class)
<add> public void fromMessageValidJsonWithUnknownProperty() throws IOException {
<add> String payload = "{\"string\":\"string\",\"unknownProperty\":\"value\"}";
<add> Message<?> message = MessageBuilder.withPayload(payload.getBytes(UTF_8)).build();
<add> this.converter.fromMessage(message, MyBean.class);
<add> }
<add>
<add> @Test
<add> public void toMessage() throws Exception {
<add> MyBean payload = new MyBean();
<add> payload.setString("Foo");
<add> payload.setNumber(42);
<add> payload.setFraction(42F);
<add> payload.setArray(new String[]{"Foo", "Bar"});
<add> payload.setBool(true);
<add> payload.setBytes(new byte[]{0x1, 0x2});
<add>
<add> Message<?> message = this.converter.toMessage(payload, null);
<add> String actual = new String((byte[]) message.getPayload(), UTF_8);
<add>
<add> assertTrue(actual.contains("\"string\":\"Foo\""));
<add> assertTrue(actual.contains("\"number\":42"));
<add> assertTrue(actual.contains("fraction\":42.0"));
<add> assertTrue(actual.contains("\"array\":[\"Foo\",\"Bar\"]"));
<add> assertTrue(actual.contains("\"bool\":true"));
<add> assertTrue(actual.contains("\"bytes\":\"AQI=\""));
<add> assertEquals("Invalid content-type", new MimeType("application", "json", UTF_8),
<add> message.getHeaders().get(MessageHeaders.CONTENT_TYPE, MimeType.class));
<add> }
<add>
<add> @Test
<add> public void toMessageUtf16() {
<add> Charset utf16 = Charset.forName("UTF-16BE");
<add> MimeType contentType = new MimeType("application", "json", utf16);
<add> Map<String, Object> map = new HashMap<>();
<add> map.put(MessageHeaders.CONTENT_TYPE, contentType);
<add> MessageHeaders headers = new MessageHeaders(map);
<add> String payload = "H\u00e9llo W\u00f6rld";
<add> Message<?> message = this.converter.toMessage(payload, headers);
<add>
<add> assertEquals("\"" + payload + "\"", new String((byte[]) message.getPayload(), utf16));
<add> assertEquals(contentType, message.getHeaders().get(MessageHeaders.CONTENT_TYPE));
<add> }
<add>
<add> @Test
<add> public void toMessageUtf16String() {
<add> this.converter.setSerializedPayloadClass(String.class);
<add>
<add> Charset utf16 = Charset.forName("UTF-16BE");
<add> MimeType contentType = new MimeType("application", "json", utf16);
<add> Map<String, Object> map = new HashMap<>();
<add> map.put(MessageHeaders.CONTENT_TYPE, contentType);
<add> MessageHeaders headers = new MessageHeaders(map);
<add> String payload = "H\u00e9llo W\u00f6rld";
<add> Message<?> message = this.converter.toMessage(payload, headers);
<add>
<add> assertEquals("\"" + payload + "\"", message.getPayload());
<add> assertEquals(contentType, message.getHeaders().get(MessageHeaders.CONTENT_TYPE));
<add> }
<add>
<add>
<add> public static class MyBean {
<add>
<add> private String string;
<add>
<add> private int number;
<add>
<add> private float fraction;
<add>
<add> private String[] array;
<add>
<add> private boolean bool;
<add>
<add> private byte[] bytes;
<add>
<add> public byte[] getBytes() {
<add> return bytes;
<add> }
<add>
<add> public void setBytes(byte[] bytes) {
<add> this.bytes = bytes;
<add> }
<add>
<add> public boolean isBool() {
<add> return bool;
<add> }
<add>
<add> public void setBool(boolean bool) {
<add> this.bool = bool;
<add> }
<add>
<add> public String getString() {
<add> return string;
<add> }
<add>
<add> public void setString(String string) {
<add> this.string = string;
<add> }
<add>
<add> public int getNumber() {
<add> return number;
<add> }
<add>
<add> public void setNumber(int number) {
<add> this.number = number;
<add> }
<add>
<add> public float getFraction() {
<add> return fraction;
<add> }
<add>
<add> public void setFraction(float fraction) {
<add> this.fraction = fraction;
<add> }
<add>
<add> public String[] getArray() {
<add> return array;
<add> }
<add>
<add> public void setArray(String[] array) {
<add> this.array = array;
<add> }
<add> }
<add>
<add>}
<ide><path>spring-messaging/src/test/java/org/springframework/messaging/support/converter/StringMessageConverterTests.java
<add>/*
<add> * Copyright 2002-2013 the original author or authors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.springframework.messaging.support.converter;
<add>
<add>import java.nio.charset.Charset;
<add>import java.util.HashMap;
<add>import java.util.Map;
<add>
<add>import org.junit.Before;
<add>import org.junit.Test;
<add>import org.springframework.messaging.Message;
<add>import org.springframework.messaging.MessageHeaders;
<add>import org.springframework.messaging.support.MessageBuilder;
<add>import org.springframework.util.MimeType;
<add>import org.springframework.util.MimeTypeUtils;
<add>
<add>import static org.junit.Assert.*;
<add>
<add>
<add>/**
<add> * Test fixture for {@link StringMessageConverter}.
<add> *
<add> * @author Rossen Stoyanchev
<add> */
<add>public class StringMessageConverterTests {
<add>
<add> private StringMessageConverter converter;
<add>
<add>
<add> @Before
<add> public void setUp() {
<add> this.converter = new StringMessageConverter();
<add> this.converter.setContentTypeResolver(new DefaultContentTypeResolver());
<add> }
<add>
<add>
<add> @Test
<add> public void fromByteArrayMessage() {
<add> Message<byte[]> message = MessageBuilder.withPayload(
<add> "ABC".getBytes()).setHeader(MessageHeaders.CONTENT_TYPE, MimeTypeUtils.TEXT_PLAIN).build();
<add> assertEquals("ABC", this.converter.fromMessage(message, String.class));
<add> }
<add>
<add> @Test
<add> public void fromStringMessage() {
<add> Message<String> message = MessageBuilder.withPayload(
<add> "ABC").setHeader(MessageHeaders.CONTENT_TYPE, MimeTypeUtils.TEXT_PLAIN).build();
<add> assertEquals("ABC", this.converter.fromMessage(message, String.class));
<add> }
<add>
<add> @Test
<add> public void fromMessageNoContentTypeHeader() {
<add> Message<byte[]> message = MessageBuilder.withPayload("ABC".getBytes()).build();
<add> assertEquals("ABC", this.converter.fromMessage(message, String.class));
<add> }
<add>
<add> @Test
<add> public void fromMessageCharset() {
<add> Charset iso88591 = Charset.forName("ISO-8859-1");
<add> String payload = "H\u00e9llo W\u00f6rld";
<add> Message<byte[]> message = MessageBuilder.withPayload(payload.getBytes(iso88591))
<add> .setHeader(MessageHeaders.CONTENT_TYPE, new MimeType("text", "plain", iso88591)).build();
<add>
<add> assertEquals(payload, this.converter.fromMessage(message, String.class));
<add> }
<add>
<add> @Test
<add> public void fromMessageDefaultCharset() {
<add> Charset utf8 = Charset.forName("UTF-8");
<add> String payload = "H\u00e9llo W\u00f6rld";
<add> Message<byte[]> message = MessageBuilder.withPayload(payload.getBytes(utf8)).build();
<add>
<add> assertEquals(payload, this.converter.fromMessage(message, String.class));
<add> }
<add>
<add> @Test
<add> public void fromMessageTargetClassNotSupported() {
<add> Message<byte[]> message = MessageBuilder.withPayload("ABC".getBytes()).build();
<add> assertNull(this.converter.fromMessage(message, Integer.class));
<add> }
<add>
<add> @Test
<add> public void fromMessageByteArray() {
<add> Message<byte[]> message = MessageBuilder.withPayload(
<add> "ABC".getBytes()).setHeader(MessageHeaders.CONTENT_TYPE, MimeTypeUtils.TEXT_PLAIN).build();
<add> assertEquals("ABC", this.converter.fromMessage(message, String.class));
<add> }
<add>
<add> @Test
<add> public void toMessage() {
<add> Map<String, Object> map = new HashMap<String, Object>();
<add> map.put(MessageHeaders.CONTENT_TYPE, MimeTypeUtils.TEXT_PLAIN);
<add> MessageHeaders headers = new MessageHeaders(map);
<add> Message<?> message = this.converter.toMessage("ABC", headers);
<add>
<add> assertEquals("ABC", new String(((byte[]) message.getPayload())));
<add> }
<add>
<add>} | 34 |
Python | Python | use metaclass to decorate errors | cab9209c3dfcd1b75dfe5657f10e52c4d847a3cf | <ide><path>spacy/errors.py
<ide> import warnings
<ide>
<ide>
<del>def add_codes(err_cls):
<del> """Add error codes to string messages via class attribute names."""
<del>
<del> class ErrorsWithCodes(err_cls):
<del> def __getattribute__(self, code):
<del> msg = super(ErrorsWithCodes, self).__getattribute__(code)
<del> if code.startswith("__"): # python system attributes like __class__
<del> return msg
<del> else:
<del> return "[{code}] {msg}".format(code=code, msg=msg)
<del>
<del> return ErrorsWithCodes()
<add>class ErrorsWithCodes(type):
<add> def __getattribute__(self, code):
<add> msg = super().__getattribute__(code)
<add> if code.startswith("__"): # python system attributes like __class__
<add> return msg
<add> else:
<add> return "[{code}] {msg}".format(code=code, msg=msg)
<ide>
<ide>
<ide> def setup_default_warnings():
<ide> def _escape_warning_msg(msg):
<ide>
<ide> # fmt: off
<ide>
<del>@add_codes
<del>class Warnings:
<add>class Warnings(metaclass=ErrorsWithCodes):
<ide> W005 = ("Doc object not parsed. This means displaCy won't be able to "
<ide> "generate a dependency visualization for it. Make sure the Doc "
<ide> "was processed with a model that supports dependency parsing, and "
<ide> class Warnings:
<ide> "lead to errors.")
<ide>
<ide>
<del>@add_codes
<del>class Errors:
<add>class Errors(metaclass=ErrorsWithCodes):
<ide> E001 = ("No component '{name}' found in pipeline. Available names: {opts}")
<ide> E002 = ("Can't find factory for '{name}' for language {lang} ({lang_code}). "
<ide> "This usually happens when spaCy calls `nlp.{method}` with a custom "
<ide><path>spacy/tests/test_errors.py
<ide>
<ide> import pytest
<ide>
<del>from spacy.errors import add_codes
<add>from spacy.errors import ErrorsWithCodes
<ide>
<ide>
<del>@add_codes
<del>class Errors:
<add>class Errors(metaclass=ErrorsWithCodes):
<ide> E001 = "error description"
<ide>
<ide> | 2 |
Text | Text | add changelog entry | d8faa70b648974e2681f945c66af5e326015d8d7 | <ide><path>CHANGELOG.md
<ide> * Fix `null` showing up in a warning instead of the component stack. ([@gaearon](https://github.com/gaearon) in [#10915](https://github.com/facebook/react/pull/10915))
<ide> * Fix IE11 crash in development mode. ([@leidegre](https://github.com/leidegre) in [#10921](https://github.com/facebook/react/pull/10921))
<ide> * Fix `tabIndex` not getting applied to SVG elements. ([@gaearon](http://github.com/gaearon) in [#11034](https://github.com/facebook/react/pull/11034))
<add>* Fix a crash rendering into shadow root. ([@gaearon](https://github.com/gaearon) in [#11037](https://github.com/facebook/react/pull/11037))
<ide> * Suppress the new unknown tag warning for `<dialog>` element. ([@gaearon](http://github.com/gaearon) in [#11035](https://github.com/facebook/react/pull/11035))
<ide> * Minor bundle size improvements. ([@gaearon](https://github.com/gaearon) in [#10802](https://github.com/facebook/react/pull/10802), [#10803](https://github.com/facebook/react/pull/10803))
<ide> | 1 |
Ruby | Ruby | add tests for formula#installed_prefix | 8407ad23561882b879febbf4870efa7ccca36fd7 | <ide><path>Library/Homebrew/test/test_formula.rb
<ide> def test_installed?
<ide> assert f.installed?
<ide> end
<ide>
<add> def test_installed_prefix
<add> f = Class.new(TestBall).new
<add> assert_equal f.prefix, f.installed_prefix
<add> end
<add>
<add> def test_installed_prefix_head_installed
<add> f = formula do
<add> head 'foo'
<add> devel do
<add> url 'foo'
<add> version '1.0'
<add> end
<add> end
<add> prefix = HOMEBREW_CELLAR+f.name+f.head.version
<add> prefix.mkpath
<add> assert_equal prefix, f.installed_prefix
<add> ensure
<add> prefix.rmtree
<add> end
<add>
<add> def test_installed_prefix_devel_installed
<add> f = formula do
<add> head 'foo'
<add> devel do
<add> url 'foo'
<add> version '1.0'
<add> end
<add> end
<add> prefix = HOMEBREW_CELLAR+f.name+f.devel.version
<add> prefix.mkpath
<add> assert_equal prefix, f.installed_prefix
<add> ensure
<add> prefix.rmtree
<add> end
<add>
<add> def test_installed_prefix_stable_installed
<add> f = formula do
<add> head 'foo'
<add> devel do
<add> url 'foo'
<add> version '1.0-devel'
<add> end
<add> end
<add> prefix = HOMEBREW_CELLAR+f.name+f.version
<add> prefix.mkpath
<add> assert_equal prefix, f.installed_prefix
<add> ensure
<add> prefix.rmtree
<add> end
<add>
<add> def test_installed_prefix_head_active_spec
<add> ARGV.stubs(:build_head? => true)
<add>
<add> f = formula do
<add> head 'foo'
<add> devel do
<add> url 'foo'
<add> version '1.0-devel'
<add> end
<add> end
<add> prefix = HOMEBREW_CELLAR+f.name+f.head.version
<add> assert_equal prefix, f.installed_prefix
<add> end
<add>
<add> def test_installed_prefix_devel_active_spec
<add> ARGV.stubs(:build_devel? => true)
<add>
<add> f = formula do
<add> head 'foo'
<add> devel do
<add> url 'foo'
<add> version '1.0-devel'
<add> end
<add> end
<add> prefix = HOMEBREW_CELLAR+f.name+f.devel.version
<add> assert_equal prefix, f.installed_prefix
<add> end
<add>
<ide> def test_equality
<ide> x = TestBall.new
<ide> y = TestBall.new | 1 |
Python | Python | add dependency to azure-core | 6fb4f4b260117e8b1313aa6c1c84bd9507bb194a | <ide><path>setup.py
<ide> def get_sphinx_theme_version() -> str:
<ide> # blob is the only exception
<ide> # Solution to that is being worked on in https://github.com/apache/airflow/pull/12188
<ide> # once it is merged, we can move those two back to `azure` extra.
<add> 'azure-core>=1.10.0',
<ide> 'azure-storage-blob',
<ide> 'azure-storage-common',
<ide> # Snowflake conector > 2.3.8 is needed because it has vendored urrllib3 and requests libraries which | 1 |
Mixed | Javascript | add tojson to performance class | 93f0b4d35bf72a1119b8a5bfea932b85faa1a4a6 | <ide><path>doc/api/perf_hooks.md
<ide> If the wrapped function returns a promise, a finally handler will be attached
<ide> to the promise and the duration will be reported once the finally handler is
<ide> invoked.
<ide>
<add>### `performance.toJSON()`
<add><!-- YAML
<add>added: REPLACEME
<add>-->
<add>
<add>An object which is JSON representation of the `performance` object. It
<add>is similar to [`window.performance.toJSON`][] in browsers.
<add>
<ide> ## Class: `PerformanceEntry`
<ide> <!-- YAML
<ide> added: v8.5.0
<ide> require('some-module');
<ide> [`child_process.spawnSync()`]: child_process.md#child_process_child_process_spawnsync_command_args_options
<ide> [`process.hrtime()`]: process.md#process_process_hrtime_time
<ide> [`timeOrigin`]: https://w3c.github.io/hr-time/#dom-performance-timeorigin
<add>[`window.performance.toJSON`]: https://developer.mozilla.org/en-US/docs/Web/API/Performance/toJSON
<ide> [`window.performance`]: https://developer.mozilla.org/en-US/docs/Web/API/Window/performance
<ide><path>lib/perf_hooks.js
<ide> class Performance extends EventTarget {
<ide> timeOrigin: this.timeOrigin,
<ide> }, opts)}`;
<ide> }
<add>
<add>}
<add>
<add>function toJSON() {
<add> return {
<add> nodeTiming: this.nodeTiming,
<add> timeOrigin: this.timeOrigin,
<add> eventLoopUtilization: this.eventLoopUtilization()
<add> };
<ide> }
<ide>
<ide> class InternalPerformance extends EventTarget {}
<ide> ObjectDefineProperties(Performance.prototype, {
<ide> configurable: true,
<ide> enumerable: true,
<ide> value: timeOriginTimestamp,
<add> },
<add> toJSON: {
<add> configurable: true,
<add> enumerable: true,
<add> value: toJSON,
<ide> }
<ide> });
<ide>
<ide><path>test/parallel/test-tojson-perf_hooks.js
<add>'use strict';
<add>
<add>require('../common');
<add>const assert = require('assert');
<add>const { performance } = require('perf_hooks');
<add>
<add>// Test toJSON for performance object
<add>{
<add> assert.strictEqual(typeof performance.toJSON, 'function');
<add> const jsonObject = performance.toJSON();
<add> assert.strictEqual(typeof jsonObject, 'object');
<add> assert.strictEqual(jsonObject.timeOrigin, performance.timeOrigin);
<add> assert.strictEqual(typeof jsonObject.nodeTiming, 'object');
<add>} | 3 |
Ruby | Ruby | add prefix and suffix to renamed tables, closes | 0968ee3456a874aa46d37d68b2d7d2dd0a84a5fe | <ide><path>activerecord/lib/active_record/migration.rb
<ide> def method_missing(method, *arguments, &block)
<ide> say_with_time "#{method}(#{arg_list})" do
<ide> unless arguments.empty? || method == :execute
<ide> arguments[0] = Migrator.proper_table_name(arguments.first)
<add> arguments[1] = Migrator.proper_table_name(arguments.second) if method == :rename_table
<ide> end
<ide> return super unless connection.respond_to?(method)
<ide> connection.send(method, *arguments, &block)
<ide><path>activerecord/test/cases/migration_test.rb
<ide> require 'models/developer'
<ide>
<ide> require MIGRATIONS_ROOT + "/valid/2_we_need_reminders"
<add>require MIGRATIONS_ROOT + "/rename/1_we_need_things"
<add>require MIGRATIONS_ROOT + "/rename/2_rename_things"
<ide> require MIGRATIONS_ROOT + "/decimal/1_give_me_big_numbers"
<ide>
<ide> if ActiveRecord::Base.connection.supports_migrations?
<ide> class BigNumber < ActiveRecord::Base; end
<ide>
<ide> class Reminder < ActiveRecord::Base; end
<ide>
<add> class Thing < ActiveRecord::Base; end
<add>
<ide> class ActiveRecord::Migration
<ide> class << self
<ide> attr_accessor :message_count
<ide> def teardown
<ide> ActiveRecord::Base.connection.initialize_schema_migrations_table
<ide> ActiveRecord::Base.connection.execute "DELETE FROM #{ActiveRecord::Migrator.schema_migrations_table_name}"
<ide>
<add> %w(things awesome_things prefix_things_suffix prefix_awesome_things_suffix).each do |table|
<add> Thing.connection.drop_table(table) rescue nil
<add> end
<add> Thing.reset_column_information
<add>
<ide> %w(reminders people_reminders prefix_reminders_suffix).each do |table|
<ide> Reminder.connection.drop_table(table) rescue nil
<ide> end
<ide> def test_proper_table_name
<ide> Reminder.reset_table_name
<ide> end
<ide>
<add> def test_rename_table_with_prefix_and_suffix
<add> assert !Thing.table_exists?
<add> ActiveRecord::Base.table_name_prefix = 'prefix_'
<add> ActiveRecord::Base.table_name_suffix = '_suffix'
<add> Thing.reset_table_name
<add> Thing.reset_sequence_name
<add> WeNeedThings.up
<add>
<add> assert Thing.create("content" => "hello world")
<add> assert_equal "hello world", Thing.find(:first).content
<add>
<add> RenameThings.up
<add> Thing.set_table_name("prefix_awesome_things_suffix")
<add>
<add> assert_equal "hello world", Thing.find(:first).content
<add> ensure
<add> ActiveRecord::Base.table_name_prefix = ''
<add> ActiveRecord::Base.table_name_suffix = ''
<add> Thing.reset_table_name
<add> Thing.reset_sequence_name
<add> end
<add>
<ide> def test_add_drop_table_with_prefix_and_suffix
<ide> assert !Reminder.table_exists?
<ide> ActiveRecord::Base.table_name_prefix = 'prefix_'
<ide><path>activerecord/test/migrations/rename/1_we_need_things.rb
<add>class WeNeedThings < ActiveRecord::Migration
<add> def self.up
<add> create_table("things") do |t|
<add> t.column :content, :text
<add> end
<add> end
<add>
<add> def self.down
<add> drop_table "things"
<add> end
<add>end
<ide>\ No newline at end of file
<ide><path>activerecord/test/migrations/rename/2_rename_things.rb
<add>class RenameThings < ActiveRecord::Migration
<add> def self.up
<add> rename_table "things", "awesome_things"
<add> end
<add>
<add> def self.down
<add> rename_table "awesome_things", "things"
<add> end
<add>end
<ide>\ No newline at end of file | 4 |
Python | Python | add viterbi algorithm | 7b521b66cfe3d16960c3fa8e01ff947794cc44a6 | <ide><path>dynamic_programming/viterbi.py
<add>from typing import Any
<add>
<add>
<add>def viterbi(
<add> observations_space: list,
<add> states_space: list,
<add> initial_probabilities: dict,
<add> transition_probabilities: dict,
<add> emission_probabilities: dict,
<add>) -> list:
<add> """
<add> Viterbi Algorithm, to find the most likely path of
<add> states from the start and the expected output.
<add> https://en.wikipedia.org/wiki/Viterbi_algorithm
<add> sdafads
<add> Wikipedia example
<add> >>> observations = ["normal", "cold", "dizzy"]
<add> >>> states = ["Healthy", "Fever"]
<add> >>> start_p = {"Healthy": 0.6, "Fever": 0.4}
<add> >>> trans_p = {
<add> ... "Healthy": {"Healthy": 0.7, "Fever": 0.3},
<add> ... "Fever": {"Healthy": 0.4, "Fever": 0.6},
<add> ... }
<add> >>> emit_p = {
<add> ... "Healthy": {"normal": 0.5, "cold": 0.4, "dizzy": 0.1},
<add> ... "Fever": {"normal": 0.1, "cold": 0.3, "dizzy": 0.6},
<add> ... }
<add> >>> viterbi(observations, states, start_p, trans_p, emit_p)
<add> ['Healthy', 'Healthy', 'Fever']
<add>
<add> >>> viterbi((), states, start_p, trans_p, emit_p)
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: There's an empty parameter
<add>
<add> >>> viterbi(observations, (), start_p, trans_p, emit_p)
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: There's an empty parameter
<add>
<add> >>> viterbi(observations, states, {}, trans_p, emit_p)
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: There's an empty parameter
<add>
<add> >>> viterbi(observations, states, start_p, {}, emit_p)
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: There's an empty parameter
<add>
<add> >>> viterbi(observations, states, start_p, trans_p, {})
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: There's an empty parameter
<add>
<add> >>> viterbi("invalid", states, start_p, trans_p, emit_p)
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: observations_space must be a list
<add>
<add> >>> viterbi(["valid", 123], states, start_p, trans_p, emit_p)
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: observations_space must be a list of strings
<add>
<add> >>> viterbi(observations, "invalid", start_p, trans_p, emit_p)
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: states_space must be a list
<add>
<add> >>> viterbi(observations, ["valid", 123], start_p, trans_p, emit_p)
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: states_space must be a list of strings
<add>
<add> >>> viterbi(observations, states, "invalid", trans_p, emit_p)
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: initial_probabilities must be a dict
<add>
<add> >>> viterbi(observations, states, {2:2}, trans_p, emit_p)
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: initial_probabilities all keys must be strings
<add>
<add> >>> viterbi(observations, states, {"a":2}, trans_p, emit_p)
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: initial_probabilities all values must be float
<add>
<add> >>> viterbi(observations, states, start_p, "invalid", emit_p)
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: transition_probabilities must be a dict
<add>
<add> >>> viterbi(observations, states, start_p, {"a":2}, emit_p)
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: transition_probabilities all values must be dict
<add>
<add> >>> viterbi(observations, states, start_p, {2:{2:2}}, emit_p)
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: transition_probabilities all keys must be strings
<add>
<add> >>> viterbi(observations, states, start_p, {"a":{2:2}}, emit_p)
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: transition_probabilities all keys must be strings
<add>
<add> >>> viterbi(observations, states, start_p, {"a":{"b":2}}, emit_p)
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: transition_probabilities nested dictionary all values must be float
<add>
<add> >>> viterbi(observations, states, start_p, trans_p, "invalid")
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: emission_probabilities must be a dict
<add>
<add> >>> viterbi(observations, states, start_p, trans_p, None)
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: There's an empty parameter
<add>
<add> """
<add> _validation(
<add> observations_space,
<add> states_space,
<add> initial_probabilities,
<add> transition_probabilities,
<add> emission_probabilities,
<add> )
<add> # Creates data structures and fill initial step
<add> probabilities: dict = {}
<add> pointers: dict = {}
<add> for state in states_space:
<add> observation = observations_space[0]
<add> probabilities[(state, observation)] = (
<add> initial_probabilities[state] * emission_probabilities[state][observation]
<add> )
<add> pointers[(state, observation)] = None
<add>
<add> # Fills the data structure with the probabilities of
<add> # different transitions and pointers to previous states
<add> for o in range(1, len(observations_space)):
<add> observation = observations_space[o]
<add> prior_observation = observations_space[o - 1]
<add> for state in states_space:
<add> # Calculates the argmax for probability function
<add> arg_max = ""
<add> max_probability = -1
<add> for k_state in states_space:
<add> probability = (
<add> probabilities[(k_state, prior_observation)]
<add> * transition_probabilities[k_state][state]
<add> * emission_probabilities[state][observation]
<add> )
<add> if probability > max_probability:
<add> max_probability = probability
<add> arg_max = k_state
<add>
<add> # Update probabilities and pointers dicts
<add> probabilities[(state, observation)] = (
<add> probabilities[(arg_max, prior_observation)]
<add> * transition_probabilities[arg_max][state]
<add> * emission_probabilities[state][observation]
<add> )
<add>
<add> pointers[(state, observation)] = arg_max
<add>
<add> # The final observation
<add> final_observation = observations_space[len(observations_space) - 1]
<add>
<add> # argmax for given final observation
<add> arg_max = ""
<add> max_probability = -1
<add> for k_state in states_space:
<add> probability = probabilities[(k_state, final_observation)]
<add> if probability > max_probability:
<add> max_probability = probability
<add> arg_max = k_state
<add> last_state = arg_max
<add>
<add> # Process pointers backwards
<add> previous = last_state
<add> result = []
<add> for o in range(len(observations_space) - 1, -1, -1):
<add> result.append(previous)
<add> previous = pointers[previous, observations_space[o]]
<add> result.reverse()
<add>
<add> return result
<add>
<add>
<add>def _validation(
<add> observations_space: Any,
<add> states_space: Any,
<add> initial_probabilities: Any,
<add> transition_probabilities: Any,
<add> emission_probabilities: Any,
<add>) -> None:
<add> """
<add> >>> observations = ["normal", "cold", "dizzy"]
<add> >>> states = ["Healthy", "Fever"]
<add> >>> start_p = {"Healthy": 0.6, "Fever": 0.4}
<add> >>> trans_p = {
<add> ... "Healthy": {"Healthy": 0.7, "Fever": 0.3},
<add> ... "Fever": {"Healthy": 0.4, "Fever": 0.6},
<add> ... }
<add> >>> emit_p = {
<add> ... "Healthy": {"normal": 0.5, "cold": 0.4, "dizzy": 0.1},
<add> ... "Fever": {"normal": 0.1, "cold": 0.3, "dizzy": 0.6},
<add> ... }
<add> >>> _validation(observations, states, start_p, trans_p, emit_p)
<add>
<add> >>> _validation([], states, start_p, trans_p, emit_p)
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: There's an empty parameter
<add> """
<add> _validate_not_empty(
<add> observations_space,
<add> states_space,
<add> initial_probabilities,
<add> transition_probabilities,
<add> emission_probabilities,
<add> )
<add> _validate_lists(observations_space, states_space)
<add> _validate_dicts(
<add> initial_probabilities, transition_probabilities, emission_probabilities
<add> )
<add>
<add>
<add>def _validate_not_empty(
<add> observations_space: Any,
<add> states_space: Any,
<add> initial_probabilities: Any,
<add> transition_probabilities: Any,
<add> emission_probabilities: Any,
<add>) -> None:
<add> """
<add> >>> _validate_not_empty(["a"], ["b"], {"c":0.5},
<add> ... {"d": {"e": 0.6}}, {"f": {"g": 0.7}})
<add>
<add> >>> _validate_not_empty(["a"], ["b"], {"c":0.5}, {}, {"f": {"g": 0.7}})
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: There's an empty parameter
<add> >>> _validate_not_empty(["a"], ["b"], None, {"d": {"e": 0.6}}, {"f": {"g": 0.7}})
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: There's an empty parameter
<add> """
<add> if not all(
<add> [
<add> observations_space,
<add> states_space,
<add> initial_probabilities,
<add> transition_probabilities,
<add> emission_probabilities,
<add> ]
<add> ):
<add> raise ValueError("There's an empty parameter")
<add>
<add>
<add>def _validate_lists(observations_space: Any, states_space: Any) -> None:
<add> """
<add> >>> _validate_lists(["a"], ["b"])
<add>
<add> >>> _validate_lists(1234, ["b"])
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: observations_space must be a list
<add>
<add> >>> _validate_lists(["a"], [3])
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: states_space must be a list of strings
<add> """
<add> _validate_list(observations_space, "observations_space")
<add> _validate_list(states_space, "states_space")
<add>
<add>
<add>def _validate_list(_object: Any, var_name: str) -> None:
<add> """
<add> >>> _validate_list(["a"], "mock_name")
<add>
<add> >>> _validate_list("a", "mock_name")
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: mock_name must be a list
<add> >>> _validate_list([0.5], "mock_name")
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: mock_name must be a list of strings
<add>
<add> """
<add> if not isinstance(_object, list):
<add> raise ValueError(f"{var_name} must be a list")
<add> else:
<add> for x in _object:
<add> if not isinstance(x, str):
<add> raise ValueError(f"{var_name} must be a list of strings")
<add>
<add>
<add>def _validate_dicts(
<add> initial_probabilities: Any,
<add> transition_probabilities: Any,
<add> emission_probabilities: Any,
<add>) -> None:
<add> """
<add> >>> _validate_dicts({"c":0.5}, {"d": {"e": 0.6}}, {"f": {"g": 0.7}})
<add>
<add> >>> _validate_dicts("invalid", {"d": {"e": 0.6}}, {"f": {"g": 0.7}})
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: initial_probabilities must be a dict
<add> >>> _validate_dicts({"c":0.5}, {2: {"e": 0.6}}, {"f": {"g": 0.7}})
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: transition_probabilities all keys must be strings
<add> >>> _validate_dicts({"c":0.5}, {"d": {"e": 0.6}}, {"f": {2: 0.7}})
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: emission_probabilities all keys must be strings
<add> >>> _validate_dicts({"c":0.5}, {"d": {"e": 0.6}}, {"f": {"g": "h"}})
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: emission_probabilities nested dictionary all values must be float
<add> """
<add> _validate_dict(initial_probabilities, "initial_probabilities", float)
<add> _validate_nested_dict(transition_probabilities, "transition_probabilities")
<add> _validate_nested_dict(emission_probabilities, "emission_probabilities")
<add>
<add>
<add>def _validate_nested_dict(_object: Any, var_name: str) -> None:
<add> """
<add> >>> _validate_nested_dict({"a":{"b": 0.5}}, "mock_name")
<add>
<add> >>> _validate_nested_dict("invalid", "mock_name")
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: mock_name must be a dict
<add> >>> _validate_nested_dict({"a": 8}, "mock_name")
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: mock_name all values must be dict
<add> >>> _validate_nested_dict({"a":{2: 0.5}}, "mock_name")
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: mock_name all keys must be strings
<add> >>> _validate_nested_dict({"a":{"b": 4}}, "mock_name")
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: mock_name nested dictionary all values must be float
<add> """
<add> _validate_dict(_object, var_name, dict)
<add> for x in _object.values():
<add> _validate_dict(x, var_name, float, True)
<add>
<add>
<add>def _validate_dict(
<add> _object: Any, var_name: str, value_type: type, nested: bool = False
<add>) -> None:
<add> """
<add> >>> _validate_dict({"b": 0.5}, "mock_name", float)
<add>
<add> >>> _validate_dict("invalid", "mock_name", float)
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: mock_name must be a dict
<add> >>> _validate_dict({"a": 8}, "mock_name", dict)
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: mock_name all values must be dict
<add> >>> _validate_dict({2: 0.5}, "mock_name",float, True)
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: mock_name all keys must be strings
<add> >>> _validate_dict({"b": 4}, "mock_name", float,True)
<add> Traceback (most recent call last):
<add> ...
<add> ValueError: mock_name nested dictionary all values must be float
<add> """
<add> if not isinstance(_object, dict):
<add> raise ValueError(f"{var_name} must be a dict")
<add> if not all(isinstance(x, str) for x in _object):
<add> raise ValueError(f"{var_name} all keys must be strings")
<add> if not all(isinstance(x, value_type) for x in _object.values()):
<add> nested_text = "nested dictionary " if nested else ""
<add> raise ValueError(
<add> f"{var_name} {nested_text}all values must be {value_type.__name__}"
<add> )
<add>
<add>
<add>if __name__ == "__main__":
<add> from doctest import testmod
<add>
<add> testmod() | 1 |
Javascript | Javascript | enable concurrent apis in all experimental forks | 9123c479f4f3a21f897a923aeb33bf0499f4891f | <ide><path>packages/shared/forks/ReactFeatureFlags.native-fb.js
<ide> export const enableProfilerTimer = __PROFILE__;
<ide> export const enableSchedulerTracing = __PROFILE__;
<ide> export const enableSuspenseServerRenderer = false;
<ide> export const enableSelectiveHydration = false;
<del>export const exposeConcurrentModeAPIs = false;
<add>export const exposeConcurrentModeAPIs = __EXPERIMENTAL__;
<ide> export const warnAboutShorthandPropertyCollision = false;
<ide> export const enableSchedulerDebugging = false;
<ide> export const debugRenderPhaseSideEffectsForStrictMode = true;
<ide><path>packages/shared/forks/ReactFeatureFlags.native-oss.js
<ide> export const enableSuspenseServerRenderer = false;
<ide> export const enableSelectiveHydration = false;
<ide> export const disableJavaScriptURLs = false;
<ide> export const disableInputAttributeSyncing = false;
<del>export const exposeConcurrentModeAPIs = false;
<add>export const exposeConcurrentModeAPIs = __EXPERIMENTAL__;
<ide> export const warnAboutShorthandPropertyCollision = false;
<ide> export const enableSchedulerDebugging = false;
<ide> export const enableFlareAPI = false;
<ide><path>packages/shared/forks/ReactFeatureFlags.persistent.js
<ide> export const enableSuspenseServerRenderer = false;
<ide> export const enableSelectiveHydration = false;
<ide> export const disableJavaScriptURLs = false;
<ide> export const disableInputAttributeSyncing = false;
<del>export const exposeConcurrentModeAPIs = false;
<add>export const exposeConcurrentModeAPIs = __EXPERIMENTAL__;
<ide> export const warnAboutShorthandPropertyCollision = false;
<ide> export const enableSchedulerDebugging = false;
<ide> export const enableFlareAPI = false;
<ide><path>packages/shared/forks/ReactFeatureFlags.test-renderer.js
<ide> export const enableSuspenseServerRenderer = false;
<ide> export const enableSelectiveHydration = false;
<ide> export const disableJavaScriptURLs = false;
<ide> export const disableInputAttributeSyncing = false;
<del>export const exposeConcurrentModeAPIs = false;
<add>export const exposeConcurrentModeAPIs = __EXPERIMENTAL__;
<ide> export const warnAboutShorthandPropertyCollision = false;
<ide> export const enableSchedulerDebugging = false;
<ide> export const enableFlareAPI = false;
<ide><path>packages/shared/forks/ReactFeatureFlags.test-renderer.www.js
<ide> export const enableProfilerTimer = __PROFILE__;
<ide> export const enableSchedulerTracing = __PROFILE__;
<ide> export const enableSuspenseServerRenderer = false;
<ide> export const enableSelectiveHydration = false;
<del>export const exposeConcurrentModeAPIs = false;
<add>export const exposeConcurrentModeAPIs = __EXPERIMENTAL__;
<ide> export const enableSchedulerDebugging = false;
<ide> export const disableJavaScriptURLs = false;
<ide> export const enableFlareAPI = true;
<ide><path>packages/shared/forks/ReactFeatureFlags.www.js
<ide> export const warnAboutStringRefs = false;
<ide> export const warnAboutDefaultPropsOnFunctionComponents = false;
<ide> export const disableSchedulerTimeoutBasedOnReactExpirationTime = false;
<ide>
<del>export const exposeConcurrentModeAPIs = false;
<add>export const exposeConcurrentModeAPIs = __EXPERIMENTAL__;
<ide>
<ide> export const enableSuspenseServerRenderer = true;
<ide> | 6 |
Text | Text | reduce ci options in collaborator_guide.md | f3e078fdce38cd8526e4f7f3aa657e030f141efc | <ide><path>COLLABORATOR_GUIDE.md
<ide> All bugfixes require a test case which demonstrates the defect. The
<ide> test should *fail* before the change, and *pass* after the change.
<ide>
<ide> All pull requests that modify executable code should also include a test case
<del>and be subjected to continuous integration tests on the
<add>and must be subjected to continuous integration tests on the
<ide> [project CI server](https://ci.nodejs.org/). The pull request should have a CI
<del>status indicator if possible.
<add>status indicator.
<ide>
<ide> #### Useful CI Jobs
<ide>
<ide> is the standard CI run we do to check Pull Requests. It triggers
<ide> `node-test-commit`, which runs the `build-ci` and `test-ci` targets on all
<ide> supported platforms.
<ide>
<del>* [`node-test-linter`](https://ci.nodejs.org/job/node-test-linter/)
<del>only runs the linter targets, which is useful for changes that only affect
<del>comments or documentation.
<del>
<ide> * [`node-test-pull-request-lite`](https://ci.nodejs.org/job/node-test-pull-request-lite/)
<del>only runs the linter job, as well as the tests on LinuxONE. Should only be used
<del>for trivial changes that do not require being tested on all platforms.
<add>only runs the linter job, as well as the tests on LinuxONE, which is very fast.
<add>This is useful for changes that only affect comments or documentation.
<ide>
<ide> * [`citgm-smoker`](https://ci.nodejs.org/job/citgm-smoker/)
<ide> uses [`CitGM`](https://github.com/nodejs/citgm) to allow you to run | 1 |
Javascript | Javascript | update image api | db7b44ec8ebb9a72f13a8397dd35007e24f4e903 | <ide><path>Libraries/Image/Image.ios.js
<ide> const ImageViewManager = NativeModules.ImageViewManager;
<ide> * including network images, static resources, temporary local images, and
<ide> * images from local disk, such as the camera roll.
<ide> *
<del> * Example usage:
<add> * This exmaples shows both fetching and displaying an image from local storage as well as on from
<add> * network.
<ide> *
<add> * ```ReactNativeWebPlayer
<add> * import React, { Component } from 'react';
<add> * import { AppRegistry, View, Image } from 'react-native';
<add> *
<add> * class DisplayAnImage extends Component {
<add> * render() {
<add> * return (
<add> * <View>
<add> * <Image
<add> * source={require('./img/favicon.png')}
<add> * />
<add> * <Image
<add> * source={{uri: 'http://facebook.github.io/react/img/logo_og.png'}}
<add> * />
<add> * </View>
<add> * );
<add> * }
<add> * }
<add> *
<add> * // App registration and rendering
<add> * AppRegistry.registerComponent('DisplayAnImage', () => DisplayAnImage);
<ide> * ```
<del> * renderImages: function() {
<del> * return (
<del> * <View>
<del> * <Image
<del> * style={styles.icon}
<del> * source={require('./myIcon.png')}
<del> * />
<del> * <Image
<del> * style={styles.logo}
<del> * source={{uri: 'http://facebook.github.io/react/img/logo_og.png'}}
<del> * />
<del> * </View>
<del> * );
<del> * },
<add> *
<add> * You can also add `style` to an image:
<add> *
<add> * ```ReactNativeWebPlayer
<add> * import React, { Component } from 'react';
<add> * import { AppRegistry, View, Image, StyleSheet} from 'react-native';
<add> *
<add> * const styles = StyleSheet.create({
<add> * stretch: {
<add> * width: 50,
<add> * height: 200
<add> * }
<add> * });
<add> *
<add> *class DisplayAnImageWithStyle extends Component {
<add> * render() {
<add> * return (
<add> * <View>
<add> * <Image
<add> * style={styles.stretch}
<add> * source={require('./img/favicon.png')}
<add> * />
<add> * </View>
<add> * );
<add> * }
<add> * }
<add> *
<add> * // App registration and rendering
<add> * AppRegistry.registerComponent(
<add> * 'DisplayAnImageWithStyle',
<add> * () => DisplayAnImageWithStyle
<add> * );
<ide> * ```
<ide> */
<ide> const Image = React.createClass({
<ide> propTypes: {
<add> /**
<add> * > `ImageResizeMode` is an `Enum` for different image resizing modes, set via the
<add> * > `resizeMode` style property on `Image` components. The values are `contain`, `cover`,
<add> * > `stretch`, `center`, `repeat`.
<add> */
<ide> style: StyleSheetPropType(ImageStylePropTypes),
<ide> /**
<ide> * The image source (either a remote URL or a local file resource).
<ide> */
<ide> source: ImageSourcePropType,
<ide> /**
<ide> * A static image to display while loading the image source.
<add> *
<add> * - `uri` - a string representing the resource identifier for the image, which
<add> * should be either a local file path or the name of a static image resource
<add> * (which should be wrapped in the `require('./path/to/image.png')` function).
<add> * - `width`, `height` - can be specified if known at build time, in which case
<add> * these will be used to set the default `<Image/>` component dimensions.
<add> * - `scale` - used to indicate the scale factor of the image. Defaults to 1.0 if
<add> * unspecified, meaning that one image pixel equates to one display point / DIP.
<add> * - `number` - Opaque type returned by something like `require('./image.jpg')`.
<add> *
<ide> * @platform ios
<ide> */
<ide> defaultSource: PropTypes.oneOfType([
<add> // TODO: Tooling to support documenting these directly and having them display in the docs.
<ide> PropTypes.shape({
<del> /**
<del> * `uri` is a string representing the resource identifier for the image, which
<del> * should be either a local file path or the name of a static image resource
<del> * (which should be wrapped in the `require('./path/to/image.png')` function).
<del> */
<ide> uri: PropTypes.string,
<del> /**
<del> * `width` and `height` can be specified if known at build time, in which case
<del> * these will be used to set the default `<Image/>` component dimensions.
<del> */
<ide> width: PropTypes.number,
<ide> height: PropTypes.number,
<del> /**
<del> * `scale` is used to indicate the scale factor of the image. Defaults to 1.0 if
<del> * unspecified, meaning that one image pixel equates to one display point / DIP.
<del> */
<ide> scale: PropTypes.number,
<ide> }),
<del> // Opaque type returned by require('./image.jpg')
<ide> PropTypes.number,
<ide> ]),
<ide> /**
<ide> const Image = React.createClass({
<ide> blurRadius: PropTypes.number,
<ide> /**
<ide> * When the image is resized, the corners of the size specified
<del> * by capInsets will stay a fixed size, but the center content and borders
<add> * by `capInsets` will stay a fixed size, but the center content and borders
<ide> * of the image will be stretched. This is useful for creating resizable
<del> * rounded buttons, shadows, and other resizable assets. More info on
<del> * [Apple documentation](https://developer.apple.com/library/ios/documentation/UIKit/Reference/UIImage_Class/index.html#//apple_ref/occ/instm/UIImage/resizableImageWithCapInsets)
<add> * rounded buttons, shadows, and other resizable assets. More info in the
<add> * [official Apple documentation](https://developer.apple.com/library/ios/documentation/UIKit/Reference/UIImage_Class/index.html#//apple_ref/occ/instm/UIImage/resizableImageWithCapInsets).
<add> *
<ide> * @platform ios
<ide> */
<ide> capInsets: EdgeInsetsPropType,
<ide> /**
<ide> * Determines how to resize the image when the frame doesn't match the raw
<ide> * image dimensions.
<ide> *
<del> * 'cover': Scale the image uniformly (maintain the image's aspect ratio)
<add> * - `cover`: Scale the image uniformly (maintain the image's aspect ratio)
<ide> * so that both dimensions (width and height) of the image will be equal
<ide> * to or larger than the corresponding dimension of the view (minus padding).
<ide> *
<del> * 'contain': Scale the image uniformly (maintain the image's aspect ratio)
<add> * - `contain`: Scale the image uniformly (maintain the image's aspect ratio)
<ide> * so that both dimensions (width and height) of the image will be equal to
<ide> * or less than the corresponding dimension of the view (minus padding).
<ide> *
<del> * 'stretch': Scale width and height independently, This may change the
<add> * - `stretch`: Scale width and height independently, This may change the
<ide> * aspect ratio of the src.
<ide> *
<del> * 'repeat': Repeat the image to cover the frame of the view. The
<add> * - `repeat`: Repeat the image to cover the frame of the view. The
<ide> * image will keep it's size and aspect ratio. (iOS only)
<ide> */
<ide> resizeMode: PropTypes.oneOf(['cover', 'contain', 'stretch', 'repeat']),
<ide> const Image = React.createClass({
<ide> */
<ide> onLayout: PropTypes.func,
<ide> /**
<del> * Invoked on load start
<add> * Invoked on load start.
<add> *
<add> * e.g., `onLoadStart={(e) => this.setState({loading: true})}`
<ide> */
<ide> onLoadStart: PropTypes.func,
<ide> /**
<del> * Invoked on download progress with `{nativeEvent: {loaded, total}}`
<add> * Invoked on download progress with `{nativeEvent: {loaded, total}}`.
<ide> * @platform ios
<ide> */
<ide> onProgress: PropTypes.func,
<ide> /**
<del> * Invoked on load error with `{nativeEvent: {error}}`
<add> * Invoked on load error with `{nativeEvent: {error}}`.
<ide> * @platform ios
<ide> */
<ide> onError: PropTypes.func,
<ide> /**
<del> * Invoked when load completes successfully
<add> * Invoked when load completes successfully.
<ide> */
<ide> onLoad: PropTypes.func,
<ide> /**
<del> * Invoked when load either succeeds or fails
<add> * Invoked when load either succeeds or fails.
<ide> */
<ide> onLoadEnd: PropTypes.func,
<ide> },
<ide> const Image = React.createClass({
<ide> * does not fully load/download the image data. A proper, supported way to
<ide> * preload images will be provided as a separate API.
<ide> *
<add> * @param uri The location of the image.
<add> * @param success The function that will be called if the image was sucessfully found and width
<add> * and height retrieved.
<add> * @param failure The function that will be called if there was an error, such as failing to
<add> * to retrieve the image.
<add> *
<add> * @returns void
<add> *
<ide> * @platform ios
<ide> */
<ide> getSize: function(
<ide> const Image = React.createClass({
<ide> /**
<ide> * Prefetches a remote image for later use by downloading it to the disk
<ide> * cache
<add> *
<add> * @param url The remote location of the image.
<add> *
<add> * @return The prefetched image.
<ide> */
<ide> prefetch(url: string) {
<ide> return ImageViewManager.prefetchImage(url); | 1 |
Python | Python | validate the existence of `.pyi` stub files | 66b17fbc9666b150c71bb94f2492fd880b2641e4 | <ide><path>numpy/typing/tests/test_isfile.py
<add>import os
<add>from pathlib import Path
<add>
<add>import numpy as np
<add>from numpy.testing import assert_
<add>
<add>ROOT = Path(np.__file__).parents[0]
<add>FILES = [
<add> ROOT / "py.typed",
<add> ROOT / "__init__.pyi",
<add> ROOT / "char.pyi",
<add> ROOT / "ctypeslib.pyi",
<add> ROOT / "emath.pyi",
<add> ROOT / "rec.pyi",
<add> ROOT / "version.pyi",
<add> ROOT / "core" / "__init__.pyi",
<add> ROOT / "distutils" / "__init__.pyi",
<add> ROOT / "f2py" / "__init__.pyi",
<add> ROOT / "fft" / "__init__.pyi",
<add> ROOT / "lib" / "__init__.pyi",
<add> ROOT / "linalg" / "__init__.pyi",
<add> ROOT / "ma" / "__init__.pyi",
<add> ROOT / "matrixlib" / "__init__.pyi",
<add> ROOT / "polynomial" / "__init__.pyi",
<add> ROOT / "random" / "__init__.pyi",
<add> ROOT / "testing" / "__init__.pyi",
<add>]
<add>
<add>
<add>class TestIsFile:
<add> def test_isfile(self):
<add> """Test if all ``.pyi`` files are properly installed."""
<add> for file in FILES:
<add> assert_(os.path.isfile(file)) | 1 |
Java | Java | add perf marker for create module | 1b2d9a858b8578b9e7e1c892792d07d2b9f83ab9 | <ide><path>ReactAndroid/src/main/java/com/facebook/react/bridge/ReactMarkerConstants.java
<ide> public class ReactMarkerConstants {
<ide> "CREATE_UI_MANAGER_MODULE_CONSTANTS_START";
<ide> public static final String CREATE_UI_MANAGER_MODULE_CONSTANTS_END =
<ide> "CREATE_UI_MANAGER_MODULE_CONSTANTS_END";
<add> public static final String CREATE_MODULE_START = "CREATE_MODULE_START";
<add> public static final String CREATE_MODULE_END = "CREATE_MODULE_END";
<ide> } | 1 |
Python | Python | fix stateful lstm example | ff2f8ac69b8bcf013a42add480b662daa4cde656 | <ide><path>examples/stateful_lstm.py
<ide> def gen_cosine_amp(amp=100, period=25, x0=0, xn=50000, step=1, k=0.0001):
<ide> expected_output,
<ide> batch_size=batch_size,
<ide> verbose=1,
<del> nb_epoch=1)
<add> nb_epoch=1,
<add> shuffle=False)
<ide> model.reset_states()
<ide>
<ide> print('Predicting') | 1 |
Python | Python | add netifaces optionnal dependancy | fda1085db040655e8d678c3a96cab888bf5297a9 | <ide><path>glances/core/glances_autodiscover.py
<ide>
<ide> # Import system libs
<ide> import socket
<add>try:
<add> import netifaces
<add> netifaces_tag = True
<add>except ImportError:
<add> netifaces_tag = True
<ide> try:
<ide> from zeroconf import ServiceBrowser, ServiceInfo, Zeroconf
<ide> zeroconf_tag = True
<ide> except ImportError:
<ide> zeroconf_tag = False
<ide>
<ide> # Import Glances libs
<del>from glances.core.glances_globals import appname, version, logger
<add>from glances.core.glances_globals import appname, logger
<ide>
<ide> # Global var
<ide> zeroconf_type = "_%s._tcp." % appname
<ide>
<ide>
<ide> class AutoDiscovered(object):
<add>
<ide> """Class to manage the auto discovered servers dict"""
<ide>
<ide> def __init__(self):
<ide> def remove_server(self, name):
<ide>
<ide>
<ide> class GlancesAutoDiscoverListener(object):
<add>
<ide> """Zeroconf listener for Glances server"""
<ide>
<ide> def __init__(self):
<ide> def addService(self, zeroconf, srv_type, srv_name):
<ide>
<ide> # Add server to the global dict
<ide> self.servers.add_server(srv_name, new_server_ip, new_server_port)
<del> logger.info("New Glances server detected (%s from %s:%s)" % (srv_name, new_server_ip, new_server_port))
<add> logger.info("New Glances server detected (%s from %s:%s)" %
<add> (srv_name, new_server_ip, new_server_port))
<ide> else:
<del> logger.warning("New Glances server detected, but Zeroconf info failed to be grabbed")
<add> logger.warning(
<add> "New Glances server detected, but Zeroconf info failed to be grabbed")
<ide> return True
<ide>
<ide> def removeService(self, zeroconf, srv_type, srv_name):
<ide> # Remove the server from the list
<ide> self.servers.remove_server(srv_name)
<del> logger.info("Glances server %s removed from the autodetect list" % srv_name)
<add> logger.info(
<add> "Glances server %s removed from the autodetect list" % srv_name)
<ide>
<ide>
<ide> class GlancesAutoDiscoverServer(object):
<add>
<ide> """Implementation of the Zeroconf protocol (server side for the Glances client)"""
<ide>
<ide> def __init__(self, args=None):
<ide> if zeroconf_tag:
<ide> logger.info("Init autodiscover mode (Zeroconf protocol)")
<ide> self.zeroconf = Zeroconf()
<ide> self.listener = GlancesAutoDiscoverListener()
<del> self.browser = ServiceBrowser(self.zeroconf, zeroconf_type, self.listener)
<add> self.browser = ServiceBrowser(
<add> self.zeroconf, zeroconf_type, self.listener)
<ide> else:
<del> logger.error("Can not start autodiscover mode (Zeroconf lib is not installed)")
<add> logger.error(
<add> "Can not start autodiscover mode (Zeroconf lib is not installed)")
<ide>
<ide> def get_servers_list(self):
<ide> """Return the current server list (dict of dict)"""
<ide> def close(self):
<ide>
<ide>
<ide> class GlancesAutoDiscoverClient(object):
<add>
<ide> """Implementation of the Zeroconf protocol (client side for the Glances server)"""
<ide>
<ide> def __init__(self, hostname, args=None):
<del> if zeroconf_tag:
<del> logger.info("Announce the Glances server on the local area network")
<del> self.zeroconf = Zeroconf()
<del> self.info = ServiceInfo(zeroconf_type,
<del> hostname + '.' + zeroconf_type,
<del> address=socket.inet_aton(args.bind_address),
<del> port=args.port,
<del> weight=0,
<del> priority=0,
<del> properties={},
<del> server=hostname)
<del> self.zeroconf.registerService(self.info)
<add> if netifaces_tag:
<add> # !!! TO BE REFACTOR
<add> # OK with server: LANGUAGE=en_US.utf8 python -m glances -s -d -B 192.168.176.128
<add> # KO with server: LANGUAGE=en_US.utf8 python -m glances -s -d
<add> try:
<add> zeroconf_bind_address = socket.inet_aton(netifaces.ifaddresses(
<add> netifaces.interfaces()[1])[netifaces.AF_INET][0]['addr'])
<add> except:
<add> zeroconf_bind_address = socket.inet_aton(args.bind_address)
<add> # /!!!
<add>
<add> if zeroconf_tag:
<add> logger.info(
<add> "Announce the Glances server on the local area network (using %s IP address)" % zeroconf_bind_address)
<add> self.zeroconf = Zeroconf()
<add> self.info = ServiceInfo(zeroconf_type,
<add> hostname + '.' + zeroconf_type,
<add> address=zeroconf_bind_address,
<add> port=args.port,
<add> weight=0,
<add> priority=0,
<add> properties={},
<add> server=hostname)
<add> self.zeroconf.registerService(self.info)
<add> else:
<add> logger.error(
<add> "Can not announce Glances server on the network (Zeroconf lib is not installed)")
<ide> else:
<del> logger.error("Can not announce Glances server on the network (Zeroconf lib is not installed)")
<add> logger.error(
<add> "Can not announce Glances server on the network (Netifaces lib is not installed)")
<ide>
<ide> def close(self):
<ide> if zeroconf_tag: | 1 |
Python | Python | reuse ex_deploy_node when instatiating a new node | 101f77f06c2da6417678f9df9c0902bea5aba0fe | <ide><path>libcloud/compute/drivers/vcloud.py
<ide> def create_node(self, **kwargs):
<ide>
<ide> # Power on the VM.
<ide> if ex_deploy:
<add> res = self.connection.request(get_url_path(vapp_href))
<add> node = self._to_node(res.object)
<ide> # Retry 3 times: when instantiating large number of VMs at the same
<ide> # time some may fail on resource allocation
<ide> retry = 3
<ide> while True:
<ide> try:
<del> res = self.connection.request(
<del> '%s/power/action/powerOn' % get_url_path(vapp_href),
<del> method='POST')
<del> self._wait_for_task_completion(res.object.get('href'))
<add> self.ex_deploy_node(node, ex_force_customization)
<ide> break
<ide> except Exception:
<ide> if retry <= 0: | 1 |
Javascript | Javascript | fix extra space | 3352b240596d2eec784dda8eeaf0d8ad989ead12 | <ide><path>lib/dependencies/ImportDependency.js
<ide> class ImportDependency extends ModuleDependency {
<ide> }
<ide> }
<ide>
<del>makeSerializable(
<del> ImportDependency,
<del> "webpack/lib/dependencies/ImportDependency "
<del>);
<add>makeSerializable(ImportDependency, "webpack/lib/dependencies/ImportDependency");
<ide>
<ide> ImportDependency.Template = class ImportDependencyTemplate extends ModuleDependency.Template {
<ide> /** | 1 |
PHP | PHP | add check for response status code being 204 | c19ec3bd1351c7097d62ca60ffdfb581808e9c35 | <ide><path>src/Illuminate/Foundation/Testing/TestResponse.php
<ide> public function assertJsonValidationErrors($keys)
<ide> */
<ide> public function assertJsonMissingValidationErrors($keys = null)
<ide> {
<del> if (empty($this->getContent())) {
<add> if (empty($this->getContent()) && $this->getStatusCode() == 204) {
<ide> PHPUnit::assertTrue(true);
<ide>
<ide> return $this;
<ide><path>tests/Foundation/FoundationTestResponseTest.php
<ide> public function testAssertJsonMissingValidationErrorsWithoutArgumentCanFail()
<ide>
<ide> public function testAssertJsonMissingValidationErrorsOnAnEmptyResponse()
<ide> {
<del> $testResponse = TestResponse::fromBaseResponse(
<add> $emptyTestResponse204 = TestResponse::fromBaseResponse(
<ide> (new Response)->setContent('')
<ide> );
<add> $emptyTestResponse204->setStatusCode(204);
<add> $emptyTestResponse204->assertJsonMissingValidationErrors();
<ide>
<del> $testResponse->assertJsonMissingValidationErrors();
<add> $this->expectException(AssertionFailedError::class);
<add>
<add> $emptyTestResponseNot204 = TestResponse::fromBaseResponse(
<add> (new Response)->setContent('')
<add> );
<add> $emptyTestResponseNot204->assertJsonMissingValidationErrors();
<ide> }
<ide>
<ide> public function testMacroable() | 2 |
PHP | PHP | add replacedimensions for validator messages | a8e8b75ea7bc77919968300410b9739e7e6076f8 | <ide><path>src/Illuminate/Validation/Concerns/ReplacesAttributes.php
<ide> protected function replaceAfterOrEqual($message, $attribute, $rule, $parameters)
<ide> {
<ide> return $this->replaceBefore($message, $attribute, $rule, $parameters);
<ide> }
<add>
<add> /**
<add> * Replace all place-holders for the dimensions rule.
<add> *
<add> * @param string $message
<add> * @param string $attribute
<add> * @param string $rule
<add> * @param array $parameters
<add> * @return string
<add> */
<add> protected function replaceDimensions($message, $attribute, $rule, $parameters)
<add> {
<add> $nesteds = $this->parseNamedParameters($parameters);
<add> if (is_array($nesteds)) {
<add> foreach ($nesteds as $key => $value) {
<add> $message = str_replace(':'.$key, $value, $message);
<add> }
<add> }
<add>
<add> return $message;
<add> }
<ide> }
<ide><path>tests/Validation/ValidationValidatorTest.php
<ide> public function testClassBasedCustomReplacers()
<ide> $this->assertEquals('replaced!', $v->messages()->first('name'));
<ide> }
<ide>
<add> public function testNestedAttributesAreReplacedInDimensions()
<add> {
<add> // Knowing that demo image.gif has width = 3 and height = 2
<add> $uploadedFile = new \Symfony\Component\HttpFoundation\File\UploadedFile(__DIR__.'/fixtures/image.gif', '', null, null, null, true);
<add>
<add> $trans = $this->getIlluminateArrayTranslator();
<add> $trans->addLines(['validation.dimensions' => ':min_width :max_height :ratio'], 'en');
<add> $v = new Validator($trans, ['x' => $uploadedFile], ['x' => 'dimensions:min_width=10,max_height=20,ratio=1']);
<add> $v->messages()->setFormat(':message');
<add> $this->assertTrue($v->fails());
<add> $this->assertEquals('10 20 1', $v->messages()->first('x'));
<add>
<add> $trans = $this->getIlluminateArrayTranslator();
<add> $trans->addLines(['validation.dimensions' => ':width :height :ratio'], 'en');
<add> $v = new Validator($trans, ['x' => $uploadedFile], ['x' => 'dimensions:min_width=10,max_height=20,ratio=1']);
<add> $v->messages()->setFormat(':message');
<add> $this->assertTrue($v->fails());
<add> $this->assertEquals(':width :height 1', $v->messages()->first('x'));
<add> }
<add>
<ide> public function testAttributeNamesAreReplaced()
<ide> {
<ide> $trans = $this->getIlluminateArrayTranslator(); | 2 |
Ruby | Ruby | convert cxxstdlib test to spec | b89c0f16dd5e5d75e6e72187bf2956ce456c8090 | <ide><path>Library/Homebrew/test/cxxstdlib_spec.rb
<add>require "formula"
<add>require "cxxstdlib"
<add>
<add>describe CxxStdlib do
<add> let(:clang) { CxxStdlib.create(:libstdcxx, :clang) }
<add> let(:gcc) { CxxStdlib.create(:libstdcxx, :gcc) }
<add> let(:gcc40) { CxxStdlib.create(:libstdcxx, :gcc_4_0) }
<add> let(:gcc42) { CxxStdlib.create(:libstdcxx, :gcc_4_2) }
<add> let(:gcc48) { CxxStdlib.create(:libstdcxx, "gcc-4.8") }
<add> let(:gcc49) { CxxStdlib.create(:libstdcxx, "gcc-4.9") }
<add> let(:lcxx) { CxxStdlib.create(:libcxx, :clang) }
<add> let(:purec) { CxxStdlib.create(nil, :clang) }
<add>
<add> describe "#compatible_with?" do
<add> specify "Apple libstdcxx intercompatibility" do
<add> expect(clang).to be_compatible_with(gcc)
<add> expect(clang).to be_compatible_with(gcc42)
<add> end
<add>
<add> specify "compatibility with itself" do
<add> expect(gcc).to be_compatible_with(gcc)
<add> expect(gcc48).to be_compatible_with(gcc48)
<add> expect(clang).to be_compatible_with(clang)
<add> end
<add>
<add> specify "Apple/GNU libstdcxx incompatibility" do
<add> expect(clang).not_to be_compatible_with(gcc48)
<add> expect(gcc48).not_to be_compatible_with(clang)
<add> end
<add>
<add> specify "GNU cross-version incompatibility" do
<add> expect(gcc48).not_to be_compatible_with(gcc49)
<add> expect(gcc49).not_to be_compatible_with(gcc48)
<add> end
<add>
<add> specify "libstdcxx and libcxx incompatibility" do
<add> expect(clang).not_to be_compatible_with(lcxx)
<add> expect(lcxx).not_to be_compatible_with(clang)
<add> end
<add>
<add> specify "compatibility for non-cxx software" do
<add> expect(purec).to be_compatible_with(clang)
<add> expect(clang).to be_compatible_with(purec)
<add> expect(purec).to be_compatible_with(purec)
<add> expect(purec).to be_compatible_with(gcc48)
<add> expect(gcc48).to be_compatible_with(purec)
<add> end
<add> end
<add>
<add> describe "#apple_compiler?" do
<add> it "returns true for Apple compilers" do
<add> expect(clang).to be_an_apple_compiler
<add> expect(gcc).to be_an_apple_compiler
<add> expect(gcc42).to be_an_apple_compiler
<add> end
<add>
<add> it "returns false for non-Apple compilers" do
<add> expect(gcc48).not_to be_an_apple_compiler
<add> end
<add> end
<add>
<add> describe "#type_string" do
<add> specify "formatting" do
<add> expect(clang.type_string).to eq("libstdc++")
<add> expect(lcxx.type_string).to eq("libc++")
<add> end
<add> end
<add>end
<ide><path>Library/Homebrew/test/stdlib_test.rb
<del>require "testing_env"
<del>require "formula"
<del>require "cxxstdlib"
<del>
<del>class CxxStdlibTests < Homebrew::TestCase
<del> def setup
<del> super
<del> @clang = CxxStdlib.create(:libstdcxx, :clang)
<del> @gcc = CxxStdlib.create(:libstdcxx, :gcc)
<del> @gcc40 = CxxStdlib.create(:libstdcxx, :gcc_4_0)
<del> @gcc42 = CxxStdlib.create(:libstdcxx, :gcc_4_2)
<del> @gcc48 = CxxStdlib.create(:libstdcxx, "gcc-4.8")
<del> @gcc49 = CxxStdlib.create(:libstdcxx, "gcc-4.9")
<del> @lcxx = CxxStdlib.create(:libcxx, :clang)
<del> @purec = CxxStdlib.create(nil, :clang)
<del> end
<del>
<del> def test_apple_libstdcxx_intercompatibility
<del> assert @clang.compatible_with?(@gcc)
<del> assert @clang.compatible_with?(@gcc42)
<del> end
<del>
<del> def test_compatibility_same_compilers_and_type
<del> assert @gcc.compatible_with?(@gcc)
<del> assert @gcc48.compatible_with?(@gcc48)
<del> assert @clang.compatible_with?(@clang)
<del> end
<del>
<del> def test_apple_gnu_libstdcxx_incompatibility
<del> assert !@clang.compatible_with?(@gcc48)
<del> assert !@gcc48.compatible_with?(@clang)
<del> end
<del>
<del> def test_gnu_cross_version_incompatibility
<del> assert !@gcc48.compatible_with?(@gcc49)
<del> assert !@gcc49.compatible_with?(@gcc48)
<del> end
<del>
<del> def test_libstdcxx_libcxx_incompatibility
<del> assert !@clang.compatible_with?(@lcxx)
<del> assert !@lcxx.compatible_with?(@clang)
<del> end
<del>
<del> def test_apple_compiler_reporting
<del> assert_predicate @clang, :apple_compiler?
<del> assert_predicate @gcc, :apple_compiler?
<del> assert_predicate @gcc42, :apple_compiler?
<del> refute_predicate @gcc48, :apple_compiler?
<del> end
<del>
<del> def test_type_string_formatting
<del> assert_equal "libstdc++", @clang.type_string
<del> assert_equal "libc++", @lcxx.type_string
<del> end
<del>
<del> def test_compatibility_for_non_cxx_software
<del> assert @purec.compatible_with?(@clang)
<del> assert @clang.compatible_with?(@purec)
<del> assert @purec.compatible_with?(@purec)
<del> assert @purec.compatible_with?(@gcc48)
<del> assert @gcc48.compatible_with?(@purec)
<del> end
<del>end | 2 |
Text | Text | fix typo in github.com url. | 8430f3e5b62605e3753018dd2e8f8975b41b0a0a | <ide><path>docs/api-guide/renderers.md
<ide> Comma-separated values are a plain-text tabular data format, that can be easily
<ide> [messagepack]: https://msgpack.org/
<ide> [juanriaza]: https://github.com/juanriaza
<ide> [mjumbewu]: https://github.com/mjumbewu
<del>[flipperpa]: https://githuc.com/flipperpa
<add>[flipperpa]: https://github.com/flipperpa
<ide> [wharton]: https://github.com/wharton
<ide> [drf-renderer-xlsx]: https://github.com/wharton/drf-renderer-xlsx
<ide> [vbabiy]: https://github.com/vbabiy | 1 |
Python | Python | fix lr in callback | b55c9da0c65f2de07150424ecede4cadc8583257 | <ide><path>official/vision/image_classification/callbacks.py
<ide> def get_callbacks(model_checkpoint: bool = True,
<ide> callbacks = []
<ide> if model_checkpoint:
<ide> ckpt_full_path = os.path.join(model_dir, 'model.ckpt-{epoch:04d}')
<del> callbacks.append(tf.keras.callbacks.ModelCheckpoint(
<del> ckpt_full_path, save_weights_only=True, verbose=1))
<add> callbacks.append(
<add> tf.keras.callbacks.ModelCheckpoint(
<add> ckpt_full_path, save_weights_only=True, verbose=1))
<ide> if include_tensorboard:
<del> callbacks.append(CustomTensorBoard(
<del> log_dir=model_dir,
<del> track_lr=track_lr,
<del> initial_step=initial_step,
<del> write_images=write_model_weights))
<add> callbacks.append(
<add> CustomTensorBoard(
<add> log_dir=model_dir,
<add> track_lr=track_lr,
<add> initial_step=initial_step,
<add> write_images=write_model_weights))
<ide> if time_history:
<del> callbacks.append(keras_utils.TimeHistory(
<del> batch_size,
<del> log_steps,
<del> logdir=model_dir if include_tensorboard else None))
<add> callbacks.append(
<add> keras_utils.TimeHistory(
<add> batch_size,
<add> log_steps,
<add> logdir=model_dir if include_tensorboard else None))
<ide> return callbacks
<ide>
<ide>
<ide> class CustomTensorBoard(tf.keras.callbacks.TensorBoard):
<ide> - Global learning rate
<ide>
<ide> Attributes:
<del> log_dir: the path of the directory where to save the log files to be
<del> parsed by TensorBoard.
<add> log_dir: the path of the directory where to save the log files to be parsed
<add> by TensorBoard.
<ide> track_lr: `bool`, whether or not to track the global learning rate.
<ide> initial_step: the initial step, used for preemption recovery.
<del> **kwargs: Additional arguments for backwards compatibility. Possible key
<del> is `period`.
<add> **kwargs: Additional arguments for backwards compatibility. Possible key is
<add> `period`.
<ide> """
<add>
<ide> # TODO(b/146499062): track params, flops, log lr, l2 loss,
<ide> # classification loss
<ide>
<ide> def _calculate_metrics(self) -> MutableMapping[str, Any]:
<ide>
<ide> def _calculate_lr(self) -> int:
<ide> """Calculates the learning rate given the current step."""
<del> lr = self._get_base_optimizer().lr
<del> if callable(lr):
<del> lr = lr(self.step)
<del> return get_scalar_from_tensor(lr)
<add> return get_scalar_from_tensor(
<add> self._get_base_optimizer()._decayed_lr(var_dtype=tf.float32))
<ide>
<ide> def _get_base_optimizer(self) -> tf.keras.optimizers.Optimizer:
<ide> """Get the base optimizer used by the current model.""" | 1 |
PHP | PHP | remove problematic field | 20fb859d73c2cf9f3381668ece8f8fbd99f1522b | <ide><path>tests/TestCase/Database/QueryTest.php
<ide> public function testCastResults()
<ide> $this->loadFixtures('Profiles');
<ide> $query = new Query($this->connection);
<ide> $fields = [
<del> 'id' => 'integer',
<ide> 'user_id' => 'integer',
<ide> 'is_active' => 'boolean'
<ide> ];
<ide> public function testCastResults()
<ide> ->where(['id' => 1])
<ide> ->execute()
<ide> ->fetchAll('assoc');
<del> $this->assertSame([['id' => 1, 'user_id' => 1, 'is_active' => false, 'a' => 1]], $results);
<add> $this->assertSame([['user_id' => 1, 'is_active' => false, 'a' => 1]], $results);
<ide> }
<ide>
<ide> /** | 1 |
Python | Python | fix slow dpr test | d51302cca0eb46f32a067aedc91ffee3edc29acb | <ide><path>tests/test_modeling_dpr.py
<ide> def test_inference_no_head(self):
<ide> def test_reader_inference(self):
<ide> tokenizer = DPRReaderTokenizer.from_pretrained("facebook/dpr-reader-single-nq-base")
<ide> model = DPRReader.from_pretrained("facebook/dpr-reader-single-nq-base")
<add> model.to(torch_device)
<ide>
<ide> encoded_inputs = tokenizer(
<ide> questions="What is love ?",
<ide> def test_reader_inference(self):
<ide> padding=True,
<ide> return_tensors="pt",
<ide> )
<add> encoded_inputs.to(torch_device)
<ide>
<ide> outputs = model(**encoded_inputs)
<ide> | 1 |
Mixed | Javascript | adjust object literal newlines for lint rule | 996b85b5c2e59262dfaeb017d882ea4324371729 | <ide><path>doc/api/http2.md
<ide> will be emitted.
<ide> const body = 'hello world';
<ide> response.writeHead(200, {
<ide> 'Content-Length': Buffer.byteLength(body),
<del> 'Content-Type': 'text/plain; charset=utf-8' });
<add> 'Content-Type': 'text/plain; charset=utf-8',
<add>});
<ide> ```
<ide>
<ide> `Content-Length` is given in bytes not characters. The
<ide><path>lib/internal/async_hooks.js
<ide> const { resource_symbol, owner_symbol } = internalBinding('symbols');
<ide> // Each constant tracks how many callbacks there are for any given step of
<ide> // async execution. These are tracked so if the user didn't include callbacks
<ide> // for a given step, that step can bail out early.
<del>const { kInit, kBefore, kAfter, kDestroy, kTotals, kPromiseResolve,
<del> kCheck, kExecutionAsyncId, kAsyncIdCounter, kTriggerAsyncId,
<del> kDefaultTriggerAsyncId, kStackLength, kUsesExecutionAsyncResource
<add>const {
<add> kInit, kBefore, kAfter, kDestroy, kTotals, kPromiseResolve,
<add> kCheck, kExecutionAsyncId, kAsyncIdCounter, kTriggerAsyncId,
<add> kDefaultTriggerAsyncId, kStackLength, kUsesExecutionAsyncResource,
<ide> } = async_wrap.constants;
<ide>
<ide> const { async_id_symbol,
<ide><path>lib/internal/encoding.js
<ide> ObjectDefineProperties(
<ide> 'encode': { enumerable: true },
<ide> 'encodeInto': { enumerable: true },
<ide> 'encoding': { enumerable: true },
<del> [SymbolToStringTag]: {
<del> configurable: true,
<del> value: 'TextEncoder'
<del> } });
<add> [SymbolToStringTag]: { configurable: true, value: 'TextEncoder' },
<add> });
<ide>
<ide> const TextDecoder =
<ide> internalBinding('config').hasIntl ?
<ide><path>lib/internal/http2/core.js
<ide> const {
<ide> } = require('internal/validators');
<ide> const fsPromisesInternal = require('internal/fs/promises');
<ide> const { utcDate } = require('internal/http');
<del>const { onServerStream,
<del> Http2ServerRequest,
<del> Http2ServerResponse,
<add>const {
<add> Http2ServerRequest,
<add> Http2ServerResponse,
<add> onServerStream,
<ide> } = require('internal/http2/compat');
<ide>
<ide> const {
<ide><path>test/doctool/test-doctool-json.js
<ide> const testData = [
<ide> { 'version': 'v4.2.0',
<ide> 'pr-url': 'https://github.com/nodejs/node/pull/3276',
<ide> 'description': 'The `error` parameter can now be ' +
<del> 'an arrow function.'
<del> }
<add> 'an arrow function.' }
<ide> ]
<ide> },
<ide> desc: '<p>Describe <code>Foobar II</code> in more detail ' +
<ide><path>test/node-api/test_threadsafe_function/test.js
<ide> function testWithJSMarshaller({
<ide> quitAfter,
<ide> abort,
<ide> maxQueueSize,
<del> launchSecondary }) {
<add> launchSecondary,
<add>}) {
<ide> return new Promise((resolve) => {
<ide> const array = [];
<ide> binding[threadStarter](function testCallback(value) {
<ide><path>test/parallel/test-dns.js
<ide> assert.throws(() => {
<ide> expire: 1800,
<ide> minttl: 3333333333
<ide> },
<del> ]
<del> },
<add> ] },
<ide>
<ide> { method: 'resolve4',
<ide> options: { ttl: true },
<del> answers: [ { type: 'A', address: '1.2.3.4', ttl: 3333333333 } ]
<del> },
<add> answers: [ { type: 'A', address: '1.2.3.4', ttl: 3333333333 } ] },
<ide>
<ide> { method: 'resolve6',
<ide> options: { ttl: true },
<del> answers: [ { type: 'AAAA', address: '::42', ttl: 3333333333 } ]
<del> },
<add> answers: [ { type: 'AAAA', address: '::42', ttl: 3333333333 } ] },
<ide>
<ide> { method: 'resolveSoa',
<ide> answers: [
<ide> assert.throws(() => {
<ide> expire: 1800,
<ide> minttl: 3333333333
<ide> }
<del> ]
<del> },
<add> ] },
<ide> ];
<ide>
<ide> const server = dgram.createSocket('udp4');
<ide><path>test/parallel/test-fs-open-flags.js
<ide> const { O_APPEND = 0,
<ide> O_SYNC = 0,
<ide> O_DSYNC = 0,
<ide> O_TRUNC = 0,
<del> O_WRONLY = 0
<del>} = fs.constants;
<add> O_WRONLY = 0 } = fs.constants;
<ide>
<ide> const { stringToFlags } = require('internal/fs/utils');
<ide>
<ide><path>test/parallel/test-fs-readfile.js
<ide> tmpdir.refresh();
<ide>
<ide> const fileInfo = [
<ide> { name: path.join(tmpdir.path, `${prefix}-1K.txt`),
<del> len: 1024,
<del> },
<add> len: 1024 },
<ide> { name: path.join(tmpdir.path, `${prefix}-64K.txt`),
<del> len: 64 * 1024,
<del> },
<add> len: 64 * 1024 },
<ide> { name: path.join(tmpdir.path, `${prefix}-64KLessOne.txt`),
<del> len: (64 * 1024) - 1,
<del> },
<add> len: (64 * 1024) - 1 },
<ide> { name: path.join(tmpdir.path, `${prefix}-1M.txt`),
<del> len: 1 * 1024 * 1024,
<del> },
<add> len: 1 * 1024 * 1024 },
<ide> { name: path.join(tmpdir.path, `${prefix}-1MPlusOne.txt`),
<del> len: (1 * 1024 * 1024) + 1,
<del> },
<add> len: (1 * 1024 * 1024) + 1 },
<ide> ];
<ide>
<ide> // Populate each fileInfo (and file) with unique fill.
<ide><path>test/parallel/test-http2-max-settings.js
<ide> server.listen(0, common.mustCall(() => {
<ide> // The actual settings values do not matter.
<ide> headerTableSize: 1000,
<ide> enablePush: false,
<del> } });
<add> },
<add> });
<ide>
<ide> client.on('error', common.mustCall(() => {
<ide> server.close();
<ide><path>test/parallel/test-inspector-esm.js
<ide> async function testBreakpoint(session) {
<ide> 'params': { 'lineNumber': 7,
<ide> 'url': session.scriptURL(),
<ide> 'columnNumber': 0,
<del> 'condition': ''
<del> }
<del> },
<add> 'condition': '' } },
<ide> { 'method': 'Debugger.resume' },
<ide> ];
<ide> await session.send(commands);
<ide> const { scriptSource } = await session.send({
<ide> 'method': 'Debugger.getScriptSource',
<del> 'params': { 'scriptId': session.mainScriptId } });
<add> 'params': { 'scriptId': session.mainScriptId },
<add> });
<ide> assert(scriptSource && (scriptSource.includes(session.script())),
<ide> `Script source is wrong: ${scriptSource}`);
<ide>
<ide><path>test/parallel/test-npm-install.js
<ide> const env = { ...process.env,
<ide> NPM_CONFIG_TMP: path.join(npmSandbox, 'npm-tmp'),
<ide> NPM_CONFIG_AUDIT: false,
<ide> NPM_CONFIG_UPDATE_NOTIFIER: false,
<del> HOME: homeDir,
<del>};
<add> HOME: homeDir };
<ide>
<ide> exec(`${process.execPath} ${npmPath} install`, {
<ide> cwd: installDir,
<ide><path>test/parallel/test-path-parse-format.js
<ide> const trailingTests = [
<ide> dir: 'D:\\foo\\\\',
<ide> base: 'bar.baz',
<ide> ext: '.baz',
<del> name: 'bar'
<del> }
<add> name: 'bar' }
<ide> ]
<ide> ]
<ide> ],
<ide><path>test/parallel/test-process-env-allowed-flags-are-documented.js
<ide> for (const line of [...nodeOptionsLines, ...v8OptionsLines]) {
<ide>
<ide> // Filter out options that are conditionally present.
<ide> const conditionalOpts = [
<del> { include: common.hasCrypto,
<add> {
<add> include: common.hasCrypto,
<ide> filter: (opt) => {
<ide> return [
<ide> '--openssl-config',
<ide> const conditionalOpts = [
<ide> '--secure-heap',
<ide> '--secure-heap-min',
<ide> ].includes(opt);
<del> } },
<del> {
<add> }
<add> }, {
<ide> // We are using openssl_is_fips from the configuration because it could be
<ide> // the case that OpenSSL is FIPS compatible but fips has not been enabled
<ide> // (starting node with --enable-fips). If we use common.hasFipsCrypto
<ide> // that would only tells us if fips has been enabled, but in this case we
<ide> // want to check options which will be available regardless of whether fips
<ide> // is enabled at runtime or not.
<ide> include: process.config.variables.openssl_is_fips,
<del> filter: (opt) => opt.includes('-fips') },
<del> { include: common.hasIntl,
<del> filter: (opt) => opt === '--icu-data-dir' },
<del> { include: process.features.inspector,
<del> filter: (opt) => opt.startsWith('--inspect') || opt === '--debug-port' },
<add> filter: (opt) => opt.includes('-fips')
<add> }, {
<add> include: common.hasIntl,
<add> filter: (opt) => opt === '--icu-data-dir'
<add> }, {
<add> include: process.features.inspector,
<add> filter: (opt) => opt.startsWith('--inspect') || opt === '--debug-port'
<add> },
<ide> ];
<ide> documented.forEach((opt) => {
<ide> conditionalOpts.forEach(({ include, filter }) => {
<ide><path>test/parallel/test-readline-interface.js
<ide> function getInterface(options) {
<ide> const rli = new readline.Interface({
<ide> input: fi,
<ide> output: fi,
<del> ...options });
<add> ...options,
<add> });
<ide> return [rli, fi];
<ide> }
<ide>
<ide><path>test/parallel/test-stdin-script-child.js
<ide> const { spawn } = require('child_process');
<ide> for (const args of [[], ['-']]) {
<ide> const child = spawn(process.execPath, args, {
<ide> env: { ...process.env,
<del> NODE_DEBUG: process.argv[2]
<del> }
<add> NODE_DEBUG: process.argv[2] }
<ide> });
<ide> const wanted = `${child.pid}\n`;
<ide> let found = '';
<ide><path>test/parallel/test-tls-check-server-identity.js
<ide> const tests = [
<ide> cert: { subject: { CN: '*n.b.com' } },
<ide> error: 'Host: \n.b.com. is not cert\'s CN: *n.b.com'
<ide> },
<del> { host: 'b.a.com', cert: {
<del> subjectaltname: 'DNS:omg.com',
<del> subject: { CN: '*.a.com' } },
<add> { host: 'b.a.com',
<add> cert: {
<add> subjectaltname: 'DNS:omg.com',
<add> subject: { CN: '*.a.com' },
<add> },
<ide> error: 'Host: b.a.com. is not in the cert\'s altnames: ' +
<del> 'DNS:omg.com'
<del> },
<add> 'DNS:omg.com' },
<ide> {
<ide> host: 'b.a.com',
<ide> cert: { subject: { CN: 'b*b.a.com' } },
<ide><path>test/parallel/test-tls-client-verify.js
<ide> const testCases = [
<ide> { ok: true, key: 'agent1-key', cert: 'agent1-cert' },
<ide> { ok: false, key: 'agent2-key', cert: 'agent2-cert' },
<ide> { ok: false, key: 'agent3-key', cert: 'agent3-cert' }
<del> ]
<del> },
<add> ] },
<ide>
<ide> { ca: [],
<ide> key: 'agent2-key',
<ide> const testCases = [
<ide> { ok: false, key: 'agent1-key', cert: 'agent1-cert' },
<ide> { ok: false, key: 'agent2-key', cert: 'agent2-cert' },
<ide> { ok: false, key: 'agent3-key', cert: 'agent3-cert' }
<del> ]
<del> },
<add> ] },
<ide>
<ide> { ca: ['ca1-cert', 'ca2-cert'],
<ide> key: 'agent2-key',
<ide> const testCases = [
<ide> { ok: true, key: 'agent1-key', cert: 'agent1-cert' },
<ide> { ok: false, key: 'agent2-key', cert: 'agent2-cert' },
<ide> { ok: true, key: 'agent3-key', cert: 'agent3-cert' }
<del> ]
<del> }
<add> ] }
<ide> ];
<ide>
<ide>
<ide><path>test/parallel/test-tls-server-verify.js
<ide> const testCases =
<ide> { name: 'agent2', shouldReject: false, shouldAuth: false },
<ide> { name: 'agent3', shouldReject: false, shouldAuth: false },
<ide> { name: 'nocert', shouldReject: false, shouldAuth: false }
<del> ]
<del> },
<add> ] },
<ide>
<ide> { title: 'Allow both authed and unauthed connections with CA1',
<ide> requestCert: true,
<ide> const testCases =
<ide> { name: 'agent2', shouldReject: false, shouldAuth: false },
<ide> { name: 'agent3', shouldReject: false, shouldAuth: false },
<ide> { name: 'nocert', shouldReject: false, shouldAuth: false }
<del> ]
<del> },
<add> ] },
<ide>
<ide> { title: 'Do not request certs at connection. Do that later',
<ide> requestCert: false,
<ide> const testCases =
<ide> { name: 'agent2', shouldReject: false, shouldAuth: false },
<ide> { name: 'agent3', shouldReject: false, shouldAuth: false },
<ide> { name: 'nocert', shouldReject: false, shouldAuth: false }
<del> ]
<del> },
<add> ] },
<ide>
<ide> { title: 'Allow only authed connections with CA1',
<ide> requestCert: true,
<ide> const testCases =
<ide> { name: 'agent2', shouldReject: true },
<ide> { name: 'agent3', shouldReject: true },
<ide> { name: 'nocert', shouldReject: true }
<del> ]
<del> },
<add> ] },
<ide>
<ide> { title: 'Allow only authed connections with CA1 and CA2',
<ide> requestCert: true,
<ide> const testCases =
<ide> { name: 'agent2', shouldReject: true },
<ide> { name: 'agent3', shouldReject: false, shouldAuth: true },
<ide> { name: 'nocert', shouldReject: true }
<del> ]
<del> },
<add> ] },
<ide>
<ide>
<ide> { title: 'Allow only certs signed by CA2 but not in the CRL',
<ide> const testCases =
<ide> // Agent4 has a cert in the CRL.
<ide> { name: 'agent4', shouldReject: true, shouldAuth: false },
<ide> { name: 'nocert', shouldReject: true }
<del> ]
<del> }
<add> ] }
<ide> ];
<ide>
<ide> function filenamePEM(n) {
<ide><path>test/parallel/test-trace-events-async-hooks-dynamic.js
<ide> const proc = cp.spawnSync(
<ide> cwd: tmpdir.path,
<ide> env: { ...process.env,
<ide> 'NODE_DEBUG_NATIVE': 'tracing',
<del> 'NODE_DEBUG': 'tracing'
<del> }
<add> 'NODE_DEBUG': 'tracing' }
<ide> });
<ide>
<ide> console.log('process exit with signal:', proc.signal);
<ide><path>test/parallel/test-trace-events-async-hooks-worker.js
<ide> const proc = cp.spawnSync(
<ide> cwd: tmpdir.path,
<ide> env: { ...process.env,
<ide> 'NODE_DEBUG_NATIVE': 'tracing',
<del> 'NODE_DEBUG': 'tracing'
<del> }
<add> 'NODE_DEBUG': 'tracing' }
<ide> });
<ide>
<ide> console.log('process exit with signal:', proc.signal);
<ide><path>test/parallel/test-webcrypto-encrypt-decrypt-rsa.js
<ide> async function importVectorKey(
<ide> return { publicKey, privateKey };
<ide> }
<ide>
<del>async function testDecryption({
<del> ciphertext,
<del> algorithm,
<del> plaintext,
<del> hash,
<del> publicKeyBuffer,
<del> privateKeyBuffer }) {
<add>async function testDecryption({ ciphertext,
<add> algorithm,
<add> plaintext,
<add> hash,
<add> publicKeyBuffer,
<add> privateKeyBuffer }) {
<ide> if (ciphertext === undefined)
<ide> return;
<ide>
<ide> async function testEncryption(
<ide> encodedPlaintext);
<ide> }
<ide>
<del>async function testEncryptionLongPlaintext({
<del> algorithm,
<del> plaintext,
<del> hash,
<del> publicKeyBuffer,
<del> privateKeyBuffer }) {
<add>async function testEncryptionLongPlaintext({ algorithm,
<add> plaintext,
<add> hash,
<add> publicKeyBuffer,
<add> privateKeyBuffer }) {
<ide> const {
<ide> publicKey,
<ide> } = await importVectorKey(
<ide> async function testEncryptionLongPlaintext({
<ide> });
<ide> }
<ide>
<del>async function testEncryptionWrongKey({
<del> algorithm,
<del> plaintext,
<del> hash,
<del> publicKeyBuffer,
<del> privateKeyBuffer }) {
<add>async function testEncryptionWrongKey({ algorithm,
<add> plaintext,
<add> hash,
<add> publicKeyBuffer,
<add> privateKeyBuffer }) {
<ide> const {
<ide> privateKey,
<ide> } = await importVectorKey(
<ide> async function testEncryptionWrongKey({
<ide> });
<ide> }
<ide>
<del>async function testEncryptionBadUsage({
<del> algorithm,
<del> plaintext,
<del> hash,
<del> publicKeyBuffer,
<del> privateKeyBuffer }) {
<add>async function testEncryptionBadUsage({ algorithm,
<add> plaintext,
<add> hash,
<add> publicKeyBuffer,
<add> privateKeyBuffer }) {
<ide> const {
<ide> publicKey,
<ide> } = await importVectorKey(
<ide> async function testEncryptionBadUsage({
<ide> });
<ide> }
<ide>
<del>async function testDecryptionWrongKey({
<del> ciphertext,
<del> algorithm,
<del> hash,
<del> publicKeyBuffer,
<del> privateKeyBuffer }) {
<add>async function testDecryptionWrongKey({ ciphertext,
<add> algorithm,
<add> hash,
<add> publicKeyBuffer,
<add> privateKeyBuffer }) {
<ide> if (ciphertext === undefined)
<ide> return;
<ide>
<ide> async function testDecryptionWrongKey({
<ide> });
<ide> }
<ide>
<del>async function testDecryptionBadUsage({
<del> ciphertext,
<del> algorithm,
<del> hash,
<del> publicKeyBuffer,
<del> privateKeyBuffer }) {
<add>async function testDecryptionBadUsage({ ciphertext,
<add> algorithm,
<add> hash,
<add> publicKeyBuffer,
<add> privateKeyBuffer }) {
<ide> if (ciphertext === undefined)
<ide> return;
<ide>
<ide><path>test/parallel/test-webcrypto-encrypt-decrypt.js
<ide> const { subtle, getRandomValues } = require('crypto').webcrypto;
<ide> length: 256
<ide> }, true, ['encrypt', 'decrypt']);
<ide>
<del> const ciphertext = await subtle.encrypt({
<del> name: 'AES-CTR', counter, length: 64 }, key, buf);
<add> const ciphertext = await subtle.encrypt(
<add> { name: 'AES-CTR', counter, length: 64 }, key, buf,
<add> );
<ide>
<del> const plaintext = await subtle.decrypt({
<del> name: 'AES-CTR', counter, length: 64 }, key, ciphertext);
<add> const plaintext = await subtle.decrypt(
<add> { name: 'AES-CTR', counter, length: 64 }, key, ciphertext,
<add> );
<ide>
<ide> assert.strictEqual(
<ide> Buffer.from(plaintext).toString('hex'),
<ide> const { subtle, getRandomValues } = require('crypto').webcrypto;
<ide> length: 256
<ide> }, true, ['encrypt', 'decrypt']);
<ide>
<del> const ciphertext = await subtle.encrypt({
<del> name: 'AES-CBC', iv }, key, buf);
<add> const ciphertext = await subtle.encrypt(
<add> { name: 'AES-CBC', iv }, key, buf,
<add> );
<ide>
<del> const plaintext = await subtle.decrypt({
<del> name: 'AES-CBC', iv }, key, ciphertext);
<add> const plaintext = await subtle.decrypt(
<add> { name: 'AES-CBC', iv }, key, ciphertext,
<add> );
<ide>
<ide> assert.strictEqual(
<ide> Buffer.from(plaintext).toString('hex'),
<ide> const { subtle, getRandomValues } = require('crypto').webcrypto;
<ide> length: 256
<ide> }, true, ['encrypt', 'decrypt']);
<ide>
<del> const ciphertext = await subtle.encrypt({
<del> name: 'AES-GCM', iv }, key, buf);
<add> const ciphertext = await subtle.encrypt(
<add> { name: 'AES-GCM', iv }, key, buf,
<add> );
<ide>
<del> const plaintext = await subtle.decrypt({
<del> name: 'AES-GCM', iv }, key, ciphertext);
<add> const plaintext = await subtle.decrypt(
<add> { name: 'AES-GCM', iv }, key, ciphertext,
<add> );
<ide>
<ide> assert.strictEqual(
<ide> Buffer.from(plaintext).toString('hex'),
<ide><path>test/parallel/test-webcrypto-keygen.js
<ide> const vectors = {
<ide> 'sign',
<ide> 'verify'
<ide> ],
<del> mandatoryUsages: ['sign'] },
<add> mandatoryUsages: ['sign'],
<add> },
<ide> 'RSA-PSS': {
<ide> algorithm: {
<ide> modulusLength: 1024,
<ide><path>test/parallel/test-webcrypto-sign-verify-ecdsa.js
<ide> const { subtle } = require('crypto').webcrypto;
<ide>
<ide> const vectors = require('../fixtures/crypto/ecdsa')();
<ide>
<del>async function testVerify({
<del> name,
<del> hash,
<del> namedCurve,
<del> publicKeyBuffer,
<del> privateKeyBuffer,
<del> signature,
<del> plaintext }) {
<add>async function testVerify({ name,
<add> hash,
<add> namedCurve,
<add> publicKeyBuffer,
<add> privateKeyBuffer,
<add> signature,
<add> plaintext }) {
<ide> const [
<ide> publicKey,
<ide> noVerifyPublicKey,
<ide> async function testVerify({
<ide> });
<ide> }
<ide>
<del>async function testSign({
<del> name,
<del> hash,
<del> namedCurve,
<del> publicKeyBuffer,
<del> privateKeyBuffer,
<del> signature,
<del> plaintext }) {
<add>async function testSign({ name,
<add> hash,
<add> namedCurve,
<add> publicKeyBuffer,
<add> privateKeyBuffer,
<add> signature,
<add> plaintext }) {
<ide> const [
<ide> publicKey,
<ide> noSignPrivateKey,
<ide><path>test/parallel/test-webcrypto-sign-verify-hmac.js
<ide> const { subtle } = require('crypto').webcrypto;
<ide>
<ide> const vectors = require('../fixtures/crypto/hmac')();
<ide>
<del>async function testVerify({
<del> hash,
<del> keyBuffer,
<del> signature,
<del> plaintext }) {
<add>async function testVerify({ hash,
<add> keyBuffer,
<add> signature,
<add> plaintext }) {
<ide> const name = 'HMAC';
<ide> const [
<ide> key,
<ide> async function testVerify({
<ide> });
<ide> }
<ide>
<del>async function testSign({
<del> hash,
<del> keyBuffer,
<del> signature,
<del> plaintext }) {
<add>async function testSign({ hash,
<add> keyBuffer,
<add> signature,
<add> plaintext }) {
<ide> const name = 'HMAC';
<ide> const [
<ide> key,
<ide><path>test/parallel/test-webcrypto-sign-verify-node-dsa.js
<ide> const { subtle } = require('crypto').webcrypto;
<ide>
<ide> const dsa = require('../fixtures/crypto/dsa');
<ide>
<del>async function testVerify({
<del> algorithm,
<del> hash,
<del> publicKeyBuffer,
<del> privateKeyBuffer,
<del> signature,
<del> plaintext }) {
<add>async function testVerify({ algorithm,
<add> hash,
<add> publicKeyBuffer,
<add> privateKeyBuffer,
<add> signature,
<add> plaintext }) {
<ide> const [
<ide> publicKey,
<ide> noVerifyPublicKey,
<ide> async function testSign({
<ide> publicKeyBuffer,
<ide> privateKeyBuffer,
<ide> signature,
<del> plaintext }) {
<add> plaintext,
<add>}) {
<ide> const [
<ide> publicKey,
<ide> noSignPrivateKey,
<ide><path>test/parallel/test-webcrypto-sign-verify-rsa.js
<ide> async function testVerify({
<ide> publicKeyBuffer,
<ide> privateKeyBuffer,
<ide> signature,
<del> plaintext }) {
<add> plaintext,
<add>}) {
<ide> const [
<ide> publicKey,
<ide> noVerifyPublicKey,
<ide> async function testSign({
<ide> publicKeyBuffer,
<ide> privateKeyBuffer,
<ide> signature,
<del> plaintext }) {
<add> plaintext,
<add>}) {
<ide> const [
<ide> publicKey,
<ide> noSignPrivateKey,
<ide><path>test/parallel/test-webcrypto-wrap-unwrap.js
<ide> async function generateKeysToWrap() {
<ide> return [{
<ide> algorithm: params.algorithm,
<ide> usages: params.usages,
<del> key: keys }];
<add> key: keys,
<add> }];
<ide> }));
<ide>
<ide> return allkeys.flat();
<ide><path>test/sequential/test-inspector-break-when-eval.js
<ide> async function breakOnLine(session) {
<ide> 'params': { 'lineNumber': 9,
<ide> 'url': pathToFileURL(script).toString(),
<ide> 'columnNumber': 0,
<del> 'condition': ''
<del> }
<del> },
<add> 'condition': '' } },
<ide> { 'method': 'Runtime.evaluate',
<ide> 'params': { 'expression': 'sum()',
<ide> 'objectGroup': 'console',
<ide> async function breakOnLine(session) {
<ide> 'returnByValue': false,
<ide> 'generatePreview': true,
<ide> 'userGesture': true,
<del> 'awaitPromise': false
<del> }
<del> }
<add> 'awaitPromise': false } }
<ide> ];
<ide> session.send(commands);
<ide> await session.waitForBreakOnLine(9, pathToFileURL(script).toString());
<ide><path>test/sequential/test-inspector-port-cluster.js
<ide> function spawnPrimary({ execArgv, workers, clusterSettings = {} }) {
<ide> env: { ...process.env,
<ide> workers: JSON.stringify(workers),
<ide> clusterSettings: JSON.stringify(clusterSettings),
<del> testProcess: true
<del> },
<add> testProcess: true },
<ide> execArgv: execArgv.concat(['--expose-internals'])
<ide> }).on('exit', common.mustCall((code, signal) => {
<ide> checkExitCode(code, signal);
<ide><path>test/sequential/test-inspector.js
<ide> async function testBreakpoint(session) {
<ide> 'params': { 'lineNumber': 5,
<ide> 'url': session.scriptURL(),
<ide> 'columnNumber': 0,
<del> 'condition': ''
<del> }
<del> },
<add> 'condition': '' } },
<ide> { 'method': 'Debugger.resume' },
<ide> ];
<ide> await session.send(commands);
<ide> const { scriptSource } = await session.send({
<ide> 'method': 'Debugger.getScriptSource',
<del> 'params': { 'scriptId': session.mainScriptId } });
<add> 'params': { 'scriptId': session.mainScriptId },
<add> });
<ide> assert(scriptSource && (scriptSource.includes(session.script())),
<ide> `Script source is wrong: ${scriptSource}`);
<ide>
<ide><path>test/sequential/test-stream2-stderr-sync.js
<ide> function child2() {
<ide> const socket = new net.Socket({
<ide> fd: 2,
<ide> readable: false,
<del> writable: true });
<add> writable: true,
<add> });
<ide> socket.write('child 2\n');
<ide> socket.write('foo\n');
<ide> socket.write('bar\n'); | 33 |
PHP | PHP | unload plugins after use | e0f6112f3f1ceed25792cac8c026f26e8f08e5ec | <ide><path>tests/TestCase/Http/SessionTest.php
<ide> */
<ide> namespace Cake\Test\TestCase\Network;
<ide>
<add>use Cake\Core\Plugin;
<ide> use Cake\Http\Session;
<ide> use Cake\Http\Session\CacheSession;
<ide> use Cake\Http\Session\DatabaseSession;
<ide><path>tests/TestCase/Mailer/EmailTest.php
<ide> public function testSendRenderThemed()
<ide> $this->assertContains('Message-ID: ', $result['headers']);
<ide> $this->assertContains('To: ', $result['headers']);
<ide> $this->assertContains('/test_theme/img/test.jpg', $result['message']);
<add> Plugin::unload();
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/ORM/AssociationTest.php
<ide> public function testTargetPlugin()
<ide> $this->assertSame('TestPlugin.ThisAssociationName', $table->getRegistryAlias());
<ide> $this->assertSame('comments', $table->getTable());
<ide> $this->assertSame('ThisAssociationName', $table->getAlias());
<add> Plugin::unload();
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/ORM/Locator/TableLocatorTest.php
<ide> public function setUp()
<ide> $this->_locator = new TableLocator;
<ide> }
<ide>
<add> /**
<add> * tearDown
<add> *
<add> * @return void
<add> */
<add> public function tearDown()
<add> {
<add> Plugin::unload();
<add> parent::tearDown();
<add> }
<add>
<ide> /**
<ide> * Test config() method.
<ide> *
<ide><path>tests/TestCase/ORM/QueryRegressionTest.php
<ide> public function testPluginAssociationQueryGeneration()
<ide> $result->author->id,
<ide> 'No SQL error and author exists.'
<ide> );
<add> Plugin::unload();
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/ORM/ResultSetTest.php
<ide> public function testSourceOnContainAssociations()
<ide> })->first();
<ide> $this->assertEquals('TestPlugin.Comments', $result->getSource());
<ide> $this->assertEquals('TestPlugin.Authors', $result->_matchingData['Authors']->getSource());
<add> Plugin::unload();
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/ORM/TableTest.php
<ide> public function tearDown()
<ide> {
<ide> parent::tearDown();
<ide> $this->getTableLocator()->clear();
<add> Plugin::unload();
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/Routing/Middleware/AssetMiddlewareTest.php
<ide> class AssetMiddlewareTest extends TestCase
<ide> public function setUp()
<ide> {
<ide> parent::setUp();
<del> $this->loadPlugins('TestPlugin');
<del> $this->loadPlugins('Company/TestPluginThree');
<add> $this->loadPlugins(['TestPlugin', 'Company/TestPluginThree']);
<add> }
<add>
<add> /**
<add> * tearDown
<add> *
<add> * @return void
<add> */
<add> public function tearDown()
<add> {
<add> Plugin::unload();
<add> parent::tearDown();
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/Routing/RequestActionTraitTest.php
<ide> public function tearDown()
<ide> parent::tearDown();
<ide> DispatcherFactory::clear();
<ide> Router::reload();
<add> Plugin::unload();
<ide>
<ide> error_reporting($this->errorLevel);
<ide> }
<ide><path>tests/TestCase/Routing/RouteBuilderTest.php
<ide> public function setUp()
<ide> public function tearDown()
<ide> {
<ide> parent::tearDown();
<del> Plugin::unload('TestPlugin');
<add> Plugin::unload();
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/Routing/RouterTest.php
<ide> public function testMapResourcesWithExtension()
<ide> public function testMapResourcesConnectOptions()
<ide> {
<ide> $this->deprecated(function () {
<del> $this->loadPlugins('TestPlugin');
<add> Plugin::load('TestPlugin');
<ide> Router::mapResources('Posts', [
<ide> 'connectOptions' => [
<ide> 'routeClass' => 'TestPlugin.TestRoute',
<ide><path>tests/TestCase/Shell/Task/AssetsTaskTest.php
<ide> public function tearDown()
<ide> */
<ide> public function testSymlink()
<ide> {
<del> $this->loadPlugins('TestPlugin');
<del> $this->loadPlugins('Company/TestPluginThree');
<add> $this->loadPlugins(['TestPlugin', 'Company/TestPluginThree']);
<ide>
<ide> $this->Task->symlink();
<ide>
<ide> public function testForPluginWithoutWebroot()
<ide> */
<ide> public function testSymlinkingSpecifiedPlugin()
<ide> {
<del> $this->loadPlugins('TestPlugin');
<del> $this->loadPlugins('Company/TestPluginThree');
<add> $this->loadPlugins(['TestPlugin', 'Company/TestPluginThree']);
<ide>
<ide> $this->Task->symlink('TestPlugin');
<ide>
<ide> public function testSymlinkingSpecifiedPlugin()
<ide> */
<ide> public function testCopy()
<ide> {
<del> $this->loadPlugins('TestPlugin');
<del> $this->loadPlugins('Company/TestPluginThree');
<add> $this->loadPlugins(['TestPlugin', 'Company/TestPluginThree']);
<ide>
<ide> $this->Task->copy();
<ide>
<ide> public function testRemoveSymlink()
<ide> );
<ide> }
<ide>
<del> $this->loadPlugins('TestPlugin');
<del> $this->loadPlugins('Company/TestPluginThree');
<add> $this->loadPlugins(['TestPlugin', 'Company/TestPluginThree']);
<ide>
<ide> mkdir(WWW_ROOT . 'company');
<ide>
<ide> public function testRemoveSymlink()
<ide> */
<ide> public function testRemoveFolder()
<ide> {
<del> $this->loadPlugins('TestPlugin');
<del> $this->loadPlugins('Company/TestPluginThree');
<add> $this->loadPlugins(['TestPlugin', 'Company/TestPluginThree']);
<ide>
<ide> $this->Task->copy();
<ide>
<ide> public function testRemoveFolder()
<ide> */
<ide> public function testOverwrite()
<ide> {
<del> $this->loadPlugins('TestPlugin');
<del> $this->loadPlugins('Company/TestPluginThree');
<add> $this->loadPlugins(['TestPlugin', 'Company/TestPluginThree']);
<ide>
<ide> $path = WWW_ROOT . 'test_plugin';
<ide>
<ide><path>tests/TestCase/TestSuite/FixtureManagerTest.php
<ide> public function tearDown()
<ide> {
<ide> parent::tearDown();
<ide> Log::reset();
<add> Plugin::unload();
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/TestSuite/TestCaseTest.php
<ide> public function testGetMockForModelWithPlugin()
<ide> $TestPluginAuthors = $this->getMockForModel('TestPlugin.Authors', ['doSomething']);
<ide> $this->assertInstanceOf('TestPlugin\Model\Table\AuthorsTable', $TestPluginAuthors);
<ide> $this->assertEquals('TestPlugin\Model\Entity\Author', $TestPluginAuthors->getEntityClass());
<add> Plugin::unload();
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/View/CellTest.php
<ide> public function setUp()
<ide> public function tearDown()
<ide> {
<ide> parent::tearDown();
<del> Plugin::unload('TestPlugin');
<del> Plugin::unload('TestTheme');
<add> Plugin::unload();
<ide> unset($this->View);
<ide> }
<ide>
<ide><path>tests/TestCase/View/Helper/FlashHelperTest.php
<ide> */
<ide> namespace Cake\Test\TestCase\View\Helper;
<ide>
<add>use Cake\Core\Plugin;
<ide> use Cake\Http\ServerRequest;
<ide> use Cake\Http\Session;
<ide> use Cake\TestSuite\TestCase;
<ide> public function tearDown()
<ide> {
<ide> parent::tearDown();
<ide> unset($this->View, $this->Flash);
<add> Plugin::unload();
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/View/Helper/NumberHelperTest.php
<ide> public function setUp()
<ide> public function tearDown()
<ide> {
<ide> parent::tearDown();
<add> Plugin::unload();
<ide> static::setAppNamespace($this->_appNamespace);
<ide> unset($this->View);
<ide> }
<ide><path>tests/TestCase/View/Helper/TextHelperTest.php
<ide> public function testEngineOverride()
<ide> $this->loadPlugins('TestPlugin');
<ide> $Text = new TextHelperTestObject($this->View, ['engine' => 'TestPlugin.TestPluginEngine']);
<ide> $this->assertInstanceOf('TestPlugin\Utility\TestPluginEngine', $Text->engine());
<del> Plugin::unload('TestPlugin');
<add> Plugin::unload();
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/View/StringTemplateTest.php
<ide>
<ide> use Cake\TestSuite\TestCase;
<ide> use Cake\View\StringTemplate;
<add>use Cake\Core\Plugin;
<ide>
<ide> class StringTemplateTest extends TestCase
<ide> {
<ide> public function testLoadPlugin()
<ide> $this->loadPlugins('TestPlugin');
<ide> $this->assertNull($this->template->load('TestPlugin.test_templates'));
<ide> $this->assertEquals('<em>{{text}}</em>', $this->template->get('italic'));
<add> Plugin::unload();
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/View/Widget/WidgetLocatorTest.php
<ide> */
<ide> namespace Cake\Test\TestCase\View\Widget;
<ide>
<add>use Cake\Core\Plugin;
<ide> use Cake\TestSuite\TestCase;
<ide> use Cake\View\StringTemplate;
<ide> use Cake\View\View;
<ide> public function testAddPluginWidgetsFromConfigInConstructor()
<ide> ];
<ide> $inputs = new WidgetLocator($this->templates, $this->view, $widgets);
<ide> $this->assertInstanceOf('Cake\View\Widget\LabelWidget', $inputs->get('text'));
<add> Plugin::unload();
<ide> }
<ide>
<ide> /** | 20 |
PHP | PHP | use better variable names | 5dfc55f42e63458339699250962c68ab6d7e5388 | <ide><path>src/TestSuite/Fixture/FixtureManager.php
<ide> protected function _aliasConnections()
<ide> $map['test_' . $connection] = $connection;
<ide> }
<ide> }
<del> foreach ($map as $alias => $connection) {
<del> ConnectionManager::alias($alias, $connection);
<add> foreach ($map as $testConnection => $normal) {
<add> ConnectionManager::alias($testConnection, $normal);
<ide> }
<ide> }
<ide> | 1 |
Python | Python | fix integer division issue and tests | 6c990fbdd58e969d179e3d721d85c9f0ea3a6005 | <ide><path>numpy/linalg/linalg.py
<ide> def eig(a):
<ide> w = wr+1j*wi
<ide> v = array(vr, w.dtype)
<ide> ind = flatnonzero(wi != 0.0) # indices of complex e-vals
<del> for i in range(len(ind)/2):
<add> for i in range(len(ind)//2):
<ide> v[ind[2*i]] = vr[ind[2*i]] + 1j*vr[ind[2*i+1]]
<ide> v[ind[2*i+1]] = vr[ind[2*i]] - 1j*vr[ind[2*i+1]]
<ide> result_t = _complexType(result_t)
<ide><path>numpy/linalg/tests/test_build.py
<ide> from numpy.linalg import lapack_lite
<ide> from numpy.testing import TestCase, dec
<ide>
<add>from numpy.compat import asbytes_nested
<add>
<ide> class FindDependenciesLdd:
<ide> def __init__(self):
<ide> self.cmd = ['ldd']
<ide> class TestF77Mismatch(TestCase):
<ide> def test_lapack(self):
<ide> f = FindDependenciesLdd()
<ide> deps = f.grep_dependencies(lapack_lite.__file__,
<del> ['libg2c', 'libgfortran'])
<add> asbytes_nested(['libg2c', 'libgfortran']))
<ide> self.failIf(len(deps) > 1,
<ide> """Both g77 and gfortran runtimes linked in lapack_lite ! This is likely to
<ide> cause random crashes and wrong results. See numpy INSTALL.txt for more | 2 |
Text | Text | add v2.18.1 to changelog.md | 8222f4eafd2a1ff305cb1e465b359badb77097fb | <ide><path>CHANGELOG.md
<ide> - [#16036](https://github.com/emberjs/ember.js/pull/16036) [CLEANUP] Convert ember-metal accessors tests to new style
<ide> - [#16023](https://github.com/emberjs/ember.js/pull/16023) Make event dispatcher work without jQuery
<ide>
<add>### 2.18.1 (February 13, 2018)
<add>
<add>- [#16174](https://github.com/emberjs/ember.js/pull/16174) [BUGFIX] Enable _some_ recovery of errors thrown during render.
<add>- [#16241](https://github.com/emberjs/ember.js/pull/16241) [BUGFIX] Avoid excessively calling Glimmer AST transforms.
<add>
<ide> ### 2.18.0 (January 1, 2018)
<ide>
<ide> - [95b449](https://github.com/emberjs/ember.js/commit/95b4499b3667712a202bef834268e23867fc8842) [BUGFIX] Ensure `Ember.run.cancel` while the run loop is flushing works properly. | 1 |
Go | Go | update code from pr post rebase | a11bee44d74235ec436c1b2272dc9f718497f88c | <ide><path>networkdriver/lxc/driver.go
<ide> func setupIPTables(addr net.Addr, icc bool) error {
<ide> natArgs := []string{"POSTROUTING", "-t", "nat", "-s", addr.String(), "!", "-d", addr.String(), "-j", "MASQUERADE"}
<ide>
<ide> if !iptables.Exists(natArgs...) {
<del> if output, err := iptables.Raw(append([]string{"-A"}, natArgs...)...); err != nil {
<add> if output, err := iptables.Raw(append([]string{"-I"}, natArgs...)...); err != nil {
<ide> return fmt.Errorf("Unable to enable network bridge NAT: %s", err)
<ide> } else if len(output) != 0 {
<ide> return fmt.Errorf("Error iptables postrouting: %s", output) | 1 |
Javascript | Javascript | fix potential memory leak when unmounting | e2f094614f8e8dd5111f19690ad32cea00d407f2 | <ide><path>src/core/ReactCompositeComponent.js
<ide> var ReactCompositeComponentMixin = {
<ide>
<ide> this._defaultProps = null;
<ide>
<del> ReactComponent.Mixin.unmountComponent.call(this);
<ide> this._renderedComponent.unmountComponent();
<ide> this._renderedComponent = null;
<ide>
<add> ReactComponent.Mixin.unmountComponent.call(this);
<add>
<ide> if (this.refs) {
<ide> this.refs = null;
<ide> }
<ide><path>src/core/ReactDOMComponent.js
<ide> ReactDOMComponent.Mixin = {
<ide> * @internal
<ide> */
<ide> unmountComponent: function() {
<add> this.unmountChildren();
<ide> ReactEventEmitter.deleteAllListeners(this._rootNodeID);
<ide> ReactComponent.Mixin.unmountComponent.call(this);
<del> this.unmountChildren();
<ide> }
<ide>
<ide> };
<ide><path>src/core/__tests__/ReactCompositeComponent-test.js
<ide> var MorphingComponent;
<ide> var ChildUpdates;
<ide> var React;
<add>var ReactComponent;
<ide> var ReactCurrentOwner;
<ide> var ReactPropTypes;
<ide> var ReactTestUtils;
<ide> describe('ReactCompositeComponent', function() {
<ide>
<ide> reactComponentExpect = require('reactComponentExpect');
<ide> React = require('React');
<add> ReactComponent = require('ReactComponent');
<ide> ReactCurrentOwner = require('ReactCurrentOwner');
<ide> ReactDoNotBindDeprecated = require('ReactDoNotBindDeprecated');
<ide> ReactPropTypes = require('ReactPropTypes');
<ide> describe('ReactCompositeComponent', function() {
<ide> });
<ide> });
<ide>
<add> it('should call componentWillUnmount before unmounting', function() {
<add> var container = document.createElement('div');
<add> var innerUnmounted = false;
<add>
<add> spyOn(ReactComponent, 'unmountIDFromEnvironment').andCallThrough();
<add>
<add> var Component = React.createClass({
<add> render: function() {
<add> return <div>
<add> <Inner />
<add> </div>;
<add> }
<add> });
<add> var Inner = React.createClass({
<add> componentWillUnmount: function() {
<add> // It's important that unmountIDFromEnvironment (which clears
<add> // ReactMount's node cache) be called after any component lifecycle
<add> // methods, because a componentWillMount implementation is likely call
<add> // this.getDOMNode(), which will repopulate the node cache after it's
<add> // been cleared, causing a memory leak.
<add> expect(ReactComponent.unmountIDFromEnvironment.callCount).toBe(0);
<add> innerUnmounted = true;
<add> },
<add> render: function() {
<add> return <div />;
<add> }
<add> });
<add>
<add> React.renderComponent(<Component />, container);
<add> React.unmountComponentAtNode(container);
<add> expect(innerUnmounted).toBe(true);
<add>
<add> // <Component />, <Inner />, and both <div /> elements each call
<add> // unmountIDFromEnvironment, for a total of 4.
<add> expect(ReactComponent.unmountIDFromEnvironment.callCount).toBe(4);
<add> });
<add>
<ide> it('should detect valid CompositeComponent classes', function() {
<ide> var Component = React.createClass({
<ide> render: function() { | 3 |
Javascript | Javascript | improve coverage of lib/os.js | 35b445d089682aa08f18e383cbc7acc9e5ac956d | <ide><path>test/parallel/test-os.js
<ide> assert.strictEqual(`${os.endianness}`, os.endianness());
<ide> assert.strictEqual(`${os.tmpdir}`, os.tmpdir());
<ide> assert.strictEqual(`${os.arch}`, os.arch());
<ide> assert.strictEqual(`${os.platform}`, os.platform());
<add>assert.strictEqual(`${os.version}`, os.version());
<ide>
<ide> assert.strictEqual(+os.totalmem, os.totalmem());
<ide> | 1 |
Python | Python | add more tests for the s3 driver | 5e82e5e39e8a8beac3f106a134d14ee1ca93fa59 | <ide><path>test/storage/test_s3.py
<ide> # See the License for the specific language governing permissions and
<ide> # limitations under the License.
<ide>
<add>import os
<ide> import sys
<ide> import httplib
<ide> import unittest
<ide> from libcloud.storage.base import Container, Object
<ide> from libcloud.storage.types import ContainerDoesNotExistError
<ide> from libcloud.storage.types import ContainerIsNotEmptyError
<add>from libcloud.storage.types import ContainerDoesNotExistError
<ide> from libcloud.storage.types import InvalidContainerNameError
<add>from libcloud.storage.types import ObjectDoesNotExistError
<add>from libcloud.storage.types import ObjectHashMismatchError
<ide> from libcloud.storage.drivers.s3 import S3StorageDriver, S3USWestStorageDriver
<ide> from libcloud.storage.drivers.s3 import S3EUWestStorageDriver
<ide> from libcloud.storage.drivers.s3 import S3APSEStorageDriver
<ide> from libcloud.storage.drivers.s3 import S3APNEStorageDriver
<ide> from libcloud.storage.drivers.dummy import DummyIterator
<ide>
<del>from test import MockHttp, MockRawResponse # pylint: disable-msg=E0611
<add>from test import StorageMockHttp, MockRawResponse # pylint: disable-msg=E0611
<ide> from test.file_fixtures import StorageFileFixtures # pylint: disable-msg=E0611
<ide>
<ide> class S3Tests(unittest.TestCase):
<ide> def setUp(self):
<ide> S3MockRawResponse.type = None
<ide> self.driver = S3StorageDriver('dummy', 'dummy')
<ide>
<del> def test_invalid_credts(self):
<add> def test_invalid_credentials(self):
<ide> S3MockHttp.type = 'UNAUTHORIZED'
<ide> try:
<ide> self.driver.list_containers()
<ide> def test_list_containers_empty(self):
<ide> containers = self.driver.list_containers()
<ide> self.assertEqual(len(containers), 0)
<ide>
<del> def test_list_containers(self):
<add> def test_list_containers_success(self):
<ide> S3MockHttp.type = 'list_containers'
<ide> containers = self.driver.list_containers()
<ide> self.assertEqual(len(containers), 2)
<ide> def test_list_container_objects_empty(self):
<ide> objects = self.driver.list_container_objects(container=container)
<ide> self.assertEqual(len(objects), 0)
<ide>
<del> def test_list_container_objects(self):
<add> def test_list_container_objects_success(self):
<ide> S3MockHttp.type = None
<ide> container = Container(name='test_container', extra={},
<ide> driver=self.driver)
<ide> def test_get_container_doesnt_exist(self):
<ide> else:
<ide> self.fail('Exception was not thrown')
<ide>
<del> def test_get_container(self):
<add> def test_get_container_success(self):
<ide> S3MockHttp.type = 'list_containers'
<ide> container = self.driver.get_container(container_name='test1')
<ide> self.assertTrue(container.name, 'test1')
<ide> def test_get_object_container_doesnt_exist(self):
<ide> else:
<ide> self.fail('Exception was not thrown')
<ide>
<del> def test_get_object(self):
<add> def test_get_object_success(self):
<ide> # This method makes two requests which makes mocking the response a bit
<ide> # trickier
<ide> S3MockHttp.type = 'list_containers'
<ide> def test_create_container_already_exists(self):
<ide> else:
<ide> self.fail('Exception was not thrown')
<ide>
<del> def test_create_container(self):
<add> def test_create_container_success(self):
<ide> # success
<ide> S3MockHttp.type = None
<ide> container = self.driver.create_container(container_name='new_container')
<ide> def test_delete_container_not_empty(self):
<ide> S3MockHttp.type = None
<ide> self.assertTrue(self.driver.delete_container(container=container))
<ide>
<del> def test_delete_container(self):
<add> def test_delete_container_not_found(self):
<add> S3MockHttp.type = 'NOT_FOUND'
<add> container = Container(name='foo_bar_container', extra={}, driver=self)
<add> try:
<add> self.driver.delete_container(container=container)
<add> except ContainerDoesNotExistError:
<add> pass
<add> else:
<add> self.fail('Container does not exist but an exception was not thrown')
<add>
<add> def test_delete_container_success(self):
<ide> # success
<del> container = Container(name='new_container', extra=None, driver=self)
<ide> S3MockHttp.type = None
<add> container = Container(name='new_container', extra=None, driver=self)
<ide> self.assertTrue(self.driver.delete_container(container=container))
<ide>
<del> def test_upload_object(self):
<del> pass
<add> def test_upload_object_invalid_hash1(self):
<add> # Invalid hash is detected on the amazon side and BAD_REQUEST is
<add> # returned
<add> def upload_file(self, response, file_path, chunked=False,
<add> calculate_hash=True):
<add> return True, 'hash343hhash89h932439jsaa89', 1000
<add>
<add> S3MockRawResponse.type = 'INVALID_HASH1'
<add>
<add> old_func = S3StorageDriver._upload_file
<add> S3StorageDriver._upload_file = upload_file
<add> file_path = os.path.abspath(__file__)
<add> container = Container(name='foo_bar_container', extra={}, driver=self)
<add> object_name = 'foo_test_upload'
<add> try:
<add> self.driver.upload_object(file_path=file_path, container=container,
<add> object_name=object_name,
<add> file_hash='0cc175b9c0f1b6a831c399e269772661')
<add> except ObjectHashMismatchError:
<add> pass
<add> else:
<add> self.fail(
<add> 'Invalid hash was returned but an exception was not thrown')
<add> finally:
<add> S3StorageDriver._upload_file = old_func
<add>
<add> def test_upload_object_invalid_hash2(self):
<add> # Invalid hash is detected when comparing hash provided in the response
<add> # ETag header
<add> def upload_file(self, response, file_path, chunked=False,
<add> calculate_hash=True):
<add> return True, '0cc175b9c0f1b6a831c399e269772661', 1000
<add>
<add> S3MockRawResponse.type = 'INVALID_HASH2'
<add>
<add> old_func = S3StorageDriver._upload_file
<add> S3StorageDriver._upload_file = upload_file
<add> file_path = os.path.abspath(__file__)
<add> container = Container(name='foo_bar_container', extra={}, driver=self)
<add> object_name = 'foo_test_upload'
<add> try:
<add> self.driver.upload_object(file_path=file_path, container=container,
<add> object_name=object_name,
<add> file_hash='0cc175b9c0f1b6a831c399e269772661')
<add> except ObjectHashMismatchError:
<add> pass
<add> else:
<add> self.fail(
<add> 'Invalid hash was returned but an exception was not thrown')
<add> finally:
<add> S3StorageDriver._upload_file = old_func
<add>
<add> def test_upload_object_success(self):
<add> def upload_file(self, response, file_path, chunked=False,
<add> calculate_hash=True):
<add> return True, '0cc175b9c0f1b6a831c399e269772661', 1000
<add>
<add> old_func = S3StorageDriver._upload_file
<add> S3StorageDriver._upload_file = upload_file
<add> file_path = os.path.abspath(__file__)
<add> container = Container(name='foo_bar_container', extra={}, driver=self)
<add> object_name = 'foo_test_upload'
<add> obj = self.driver.upload_object(file_path=file_path, container=container,
<add> object_name=object_name,
<add> file_hash='0cc175b9c0f1b6a831c399e269772661')
<add> self.assertEqual(obj.name, 'foo_test_upload')
<add> self.assertEqual(obj.size, 1000)
<add> S3StorageDriver._upload_file = old_func
<ide>
<ide> def test_upload_object_via_stream(self):
<ide> try:
<ide> def test_upload_object_via_stream(self):
<ide> else:
<ide> self.fail('Exception was not thrown')
<ide>
<del> def test_delete_object(self):
<del> container = Container(name='foo_bar_container', extra={}, driver=self)
<del> result = self.driver.delete_container(container=container)
<del> self.assertTrue(result)
<del>
<del> def test_delete_container_not_found(self):
<add> def test_delete_object_not_found(self):
<ide> S3MockHttp.type = 'NOT_FOUND'
<ide> container = Container(name='foo_bar_container', extra={}, driver=self)
<add> obj = Object(name='foo_bar_object', size=1234, hash=None, extra=None,
<add> meta_data=None, container=container, driver=self.driver)
<ide> try:
<del> self.driver.delete_container(container=container)
<del> except ContainerDoesNotExistError:
<add> self.driver.delete_object(obj=obj)
<add> except ObjectDoesNotExistError:
<ide> pass
<ide> else:
<del> self.fail('Container does not exist but an exception was not thrown')
<add> self.fail('Exception was not thrown')
<add>
<add> def test_delete_object_success(self):
<add> container = Container(name='foo_bar_container', extra={}, driver=self)
<add> obj = Object(name='foo_bar_object', size=1234, hash=None, extra=None,
<add> meta_data=None, container=container, driver=self.driver)
<add>
<add> result = self.driver.delete_object(obj=obj)
<add> self.assertTrue(result)
<ide>
<ide> class S3USWestTests(S3Tests):
<ide> def setUp(self):
<ide> def setUp(self):
<ide> S3MockRawResponse.type = None
<ide> self.driver = S3APNEStorageDriver('dummy', 'dummy')
<ide>
<del>class S3MockHttp(MockHttp):
<add>class S3MockHttp(StorageMockHttp):
<ide>
<ide> fixtures = StorageFileFixtures('s3')
<ide> base_headers = {}
<ide> def _foo_bar_container_NOT_FOUND(self, method, url, body, headers):
<ide> headers,
<ide> httplib.responses[httplib.OK])
<ide>
<add> def _foo_bar_container_foo_bar_object_NOT_FOUND(self, method, url, body, headers):
<add> # test_delete_object_not_found
<add> return (httplib.NOT_FOUND,
<add> body,
<add> headers,
<add> httplib.responses[httplib.OK])
<add>
<add> def _foo_bar_container_foo_bar_object(self, method, url, body, headers):
<add> # test_delete_object
<add> return (httplib.NO_CONTENT,
<add> body,
<add> headers,
<add> httplib.responses[httplib.OK])
<add>
<ide> class S3MockRawResponse(MockRawResponse):
<ide>
<ide> fixtures = StorageFileFixtures('s3')
<ide>
<add> def _foo_bar_container_foo_test_upload_INVALID_HASH1(self, method, url, body, headers):
<add> body = ''
<add> # test_upload_object_invalid_hash1
<add> return (httplib.BAD_REQUEST,
<add> body,
<add> headers,
<add> httplib.responses[httplib.OK])
<add>
<add> def _foo_bar_container_foo_test_upload_INVALID_HASH2(self, method, url, body, headers):
<add> # test_upload_object_invalid_hash2
<add> body = ''
<add> headers = { 'etag': '"hash343hhash89h932439jsaa89"'}
<add> return (httplib.OK,
<add> body,
<add> headers,
<add> httplib.responses[httplib.OK])
<add>
<add> def _foo_bar_container_foo_test_upload(self, method, url, body, headers):
<add> # test_upload_object_success
<add> body = ''
<add> headers = { 'etag': '"0cc175b9c0f1b6a831c399e269772661"'}
<add> return (httplib.OK,
<add> body,
<add> headers,
<add> httplib.responses[httplib.OK])
<add>
<ide> if __name__ == '__main__':
<ide> sys.exit(unittest.main()) | 1 |
Text | Text | create list for commonly edited files in prs | f02e4b90a2c1cdcb0b0d20bcfa75bac8d991d66b | <ide><path>doc/guides/contributing/pull-requests.md
<ide> $ git checkout -b my-branch -t upstream/master
<ide> ### Step 3: Code
<ide>
<ide> The vast majority of Pull Requests opened against the `nodejs/node`
<del>repository includes changes to either the C/C++ code contained in the `src`
<del>directory, the JavaScript code contained in the `lib` directory, the
<del>documentation in `docs/api` or tests within the `test` directory.
<add>repository includes changes to one or more of the following:
<add> - the C/C++ code contained in the `src` directory
<add> - the JavaScript code contained in the `lib` directory
<add> - the documentation in `doc/api`
<add> - tests within the `test` directory.
<ide>
<ide> If you are modifying code, please be sure to run `make lint` from time to
<ide> time to ensure that the changes follow the Node.js code style guide. | 1 |
Text | Text | add documentation for new static 500 behavior | e9a1d8fd4237b978ccb6e9637d9f4bcddff0c526 | <ide><path>docs/advanced-features/custom-error-page.md
<ide> export default function Custom404() {
<ide>
<ide> ## 500 Page
<ide>
<del>By default Next.js provides a 500 error page that matches the default 404 page’s style. This page is not statically optimized as it allows server-side errors to be reported. This is why 404 and 500 (other errors) are separated.
<add>Server-rendering an error page for every visit adds complexity to responding to errors. To help users get responses to errors as fast as possible, Next.js provides a static 500 page by default without having to add any additional files.
<ide>
<del>### Customizing The Error Page
<add>### Customizing The 500 Page
<add>
<add>To customize the 500 page you can create a `pages/500.js` file. This file is statically generated at build time.
<add>
<add>```jsx
<add>// pages/500.js
<add>export default function Custom500() {
<add> return <h1>500 - Server-side error occurred</h1>
<add>}
<add>```
<add>
<add>> **Note**: You can use [`getStaticProps`](/docs/basic-features/data-fetching.md#getstaticprops-static-generation) inside this page if you need to fetch data at build time.
<add>
<add>### More Advanced Error Page Customizing
<ide>
<ide> 500 errors are handled both client-side and server-side by the `Error` component. If you wish to override it, define the file `pages/_error.js` and add the following code:
<ide> | 1 |
Ruby | Ruby | add doc for joins and improve includes doc | 8e7470adcbce7c278f668ea671dfbcaf089114eb | <ide><path>activerecord/lib/active_record/relation/query_methods.rb
<ide> def create_with_value
<ide> #
<ide> # allows you to access the +address+ attribute of the +User+ model without
<ide> # firing an additional query. This will often result in a
<del> # performance improvement over a simple +join+
<add> # performance improvement over a simple +join+.
<add> #
<add> # === conditions
<add> #
<add> # If you want to add conditions to your included models you'll have
<add> # to explicitly reference them. For example:
<add> #
<add> # User.includes(:posts).where('posts.name = ?', 'example')
<add> #
<add> # Will throw an error, but this will work:
<add> #
<add> # User.includes(:posts).where('posts.name = ?', 'example').references(:posts)
<ide> def includes(*args)
<ide> args.empty? ? self : spawn.includes!(*args)
<ide> end
<ide> def select!(value)
<ide> # User.group(:name)
<ide> # => SELECT "users".* FROM "users" GROUP BY name
<ide> #
<del> # Returns an array with distinct records based on the `group` attribute:
<add> # Returns an array with distinct records based on the +group+ attribute:
<ide> #
<ide> # User.select([:id, :name])
<ide> # => [#<User id: 1, name: "Oscar">, #<User id: 2, name: "Oscar">, #<User id: 3, name: "Foo">
<ide> def reorder!(*args)
<ide> self
<ide> end
<ide>
<add> # Performs a joins on +args+:
<add> #
<add> # User.joins(:posts)
<add> # => SELECT "users".* FROM "users" INNER JOIN "posts" ON "posts"."user_id" = "users"."id"
<ide> def joins(*args)
<ide> args.compact.blank? ? self : spawn.joins!(*args)
<ide> end | 1 |
Javascript | Javascript | configure react-test-renderer as a secondary | ebbd2214326ca6a5f24f46ea7eaffc6cfabbbe1c | <ide><path>packages/react-art/src/__tests__/ReactART-test.js
<ide> const Circle = require('react-art/Circle');
<ide> const Rectangle = require('react-art/Rectangle');
<ide> const Wedge = require('react-art/Wedge');
<ide>
<add>// Isolate the noop renderer
<add>jest.resetModules();
<add>const ReactNoop = require('react-noop-renderer');
<add>
<ide> let Group;
<ide> let Shape;
<ide> let Surface;
<ide> describe('ReactART', () => {
<ide> const CurrentRendererContext = React.createContext(null);
<ide>
<ide> function Yield(props) {
<del> testRenderer.unstable_yield(props.value);
<add> ReactNoop.yield(props.value);
<ide> return null;
<ide> }
<ide>
<ide> describe('ReactART', () => {
<ide>
<ide> // Using test renderer instead of the DOM renderer here because async
<ide> // testing APIs for the DOM renderer don't exist.
<del> const testRenderer = ReactTestRenderer.create(
<add> ReactNoop.render(
<ide> <CurrentRendererContext.Provider value="Test">
<ide> <Yield value="A" />
<ide> <Yield value="B" />
<ide> <LogCurrentRenderer />
<ide> <Yield value="C" />
<ide> </CurrentRendererContext.Provider>,
<del> {
<del> unstable_isAsync: true,
<del> },
<ide> );
<ide>
<del> testRenderer.unstable_flushThrough(['A']);
<add> ReactNoop.flushThrough(['A']);
<ide>
<ide> ReactDOM.render(
<ide> <Surface>
<ide> describe('ReactART', () => {
<ide> expect(ops).toEqual([null, 'ART']);
<ide>
<ide> ops = [];
<del> expect(testRenderer.unstable_flushAll()).toEqual(['B', 'C']);
<add> expect(ReactNoop.flush()).toEqual(['B', 'C']);
<ide>
<ide> expect(ops).toEqual(['Test']);
<ide> });
<ide><path>packages/react-test-renderer/src/ReactTestHostConfig.js
<ide> export function createTextInstance(
<ide> };
<ide> }
<ide>
<del>export const isPrimaryRenderer = true;
<add>export const isPrimaryRenderer = false;
<ide> // This approach enables `now` to be mocked by tests,
<ide> // Even after the reconciler has initialized and read host config values.
<ide> export const now = () => TestRendererScheduling.nowImplementation();
<ide><path>packages/react-test-renderer/src/__tests__/ReactTestRenderer-test.internal.js
<ide> const React = require('react');
<ide> const ReactTestRenderer = require('react-test-renderer');
<ide> const prettyFormat = require('pretty-format');
<ide>
<add>// Isolate noop renderer
<add>jest.resetModules();
<add>const ReactNoop = require('react-noop-renderer');
<add>
<ide> // Kind of hacky, but we nullify all the instances to test the tree structure
<ide> // with jasmine's deep equality function, and test the instances separate. We
<ide> // also delete children props because testing them is more annoying and not
<ide> describe('ReactTestRenderer', () => {
<ide> }),
<ide> );
<ide> });
<add>
<add> it('can concurrently render context with a "primary" renderer', () => {
<add> const Context = React.createContext(null);
<add> const Indirection = React.Fragment;
<add> const App = () => (
<add> <Context.Provider>
<add> <Indirection>
<add> <Context.Consumer>{() => null}</Context.Consumer>
<add> </Indirection>
<add> </Context.Provider>
<add> );
<add> ReactNoop.render(<App />);
<add> ReactNoop.flush();
<add> ReactTestRenderer.create(<App />);
<add> });
<ide> }); | 3 |
Ruby | Ruby | relocate files in share/pkgconfig too | be46fc3a48dd277977a988a63461aee2015d9cab | <ide><path>Library/Homebrew/keg_fix_install_names.rb
<ide> def script_files
<ide> def pkgconfig_files
<ide> pkgconfig_files = []
<ide>
<del> # find .pc files, which are stored in lib/pkgconfig
<del> pc_dir = self/'lib/pkgconfig'
<del> if pc_dir.directory?
<del> pc_dir.find do |pn|
<add> %w[lib share].each do |dir|
<add> pcdir = join(dir, "pkgconfig")
<add>
<add> pcdir.find do |pn|
<ide> next if pn.symlink? or pn.directory? or pn.extname != '.pc'
<ide> pkgconfig_files << pn
<del> end
<add> end if pcdir.directory?
<ide> end
<ide>
<ide> pkgconfig_files | 1 |
PHP | PHP | use interface instead of scalar check | 8a78dd5ab987d22f1aa02ca2248f17d68f41e907 | <ide><path>src/ORM/Behavior/TreeBehavior.php
<ide> */
<ide> namespace Cake\ORM\Behavior;
<ide>
<add>use Cake\Datasource\EntityInterface;
<ide> use Cake\Datasource\Exception\RecordNotFoundException;
<ide> use Cake\Event\Event;
<ide> use Cake\ORM\Behavior;
<ide> public function getLevel($entity)
<ide> {
<ide> $primaryKey = $this->_getPrimaryKey();
<ide> $id = $entity;
<del> if (!is_scalar($entity)) {
<add> if ($entity instanceof EntityInterface) {
<ide> $id = $entity->get($primaryKey);
<ide> }
<ide> $config = $this->config(); | 1 |
PHP | PHP | fix event fake dispatch method not returning | 44cccf2b27350ad41e93496ba435d1e255b9b8d6 | <ide><path>src/Illuminate/Support/Testing/Fakes/EventFake.php
<ide> public function dispatch($event, $payload = [], $halt = false)
<ide> if ($this->shouldFakeEvent($name, $payload)) {
<ide> $this->events[$name][] = func_get_args();
<ide> } else {
<del> $this->dispatcher->dispatch($event, $payload, $halt);
<add> return $this->dispatcher->dispatch($event, $payload, $halt);
<ide> }
<ide> }
<ide>
<ide><path>tests/Integration/Events/EventFakeTest.php
<ide> public function testNonFakedEventGetsProperlyDispatched()
<ide>
<ide> Event::assertNotDispatched(NonImportantEvent::class);
<ide> }
<add>
<add> public function testNonFakedHaltedEventGetsProperlyDispatchedAndReturnsResponse()
<add> {
<add> Event::fake(NonImportantEvent::class);
<add> Event::listen('test', function () {
<add> // one
<add> });
<add> Event::listen('test', function () {
<add> return 'two';
<add> });
<add> Event::listen('test', function () {
<add> $this->fail('should not be called');
<add> });
<add>
<add> $this->assertEquals('two', Event::until('test'));
<add>
<add> Event::assertNotDispatched(NonImportantEvent::class);
<add> }
<ide> }
<ide>
<ide> class Post extends Model | 2 |
Ruby | Ruby | register mailer tests for minitest's spec dsl | 0ce383db58b4718147433ca0e06d7e88efd6c184 | <ide><path>actionmailer/lib/action_mailer/test_case.rb
<ide> def initialize(name)
<ide> end
<ide>
<ide> class TestCase < ActiveSupport::TestCase
<add>
<add> # Use AM::TestCase for the base class when describing a mailer
<add> register_spec_type(self) do |desc|
<add> Class === desc && desc < ActionMailer::Base
<add> end
<add> register_spec_type(/Mailer( ?Test)?\z/i, self)
<add>
<ide> module Behavior
<ide> extend ActiveSupport::Concern
<ide>
<ide><path>actionmailer/test/spec_type_test.rb
<add>require 'abstract_unit'
<add>
<add>class NotificationMailer < ActionMailer::Base; end
<add>class Notifications < ActionMailer::Base; end
<add>
<add>class SpecTypeTest < ActiveSupport::TestCase
<add> def assert_mailer actual
<add> assert_equal ActionMailer::TestCase, actual
<add> end
<add>
<add> def refute_mailer actual
<add> refute_equal ActionMailer::TestCase, actual
<add> end
<add>
<add> def test_spec_type_resolves_for_class_constants
<add> assert_mailer MiniTest::Spec.spec_type(NotificationMailer)
<add> assert_mailer MiniTest::Spec.spec_type(Notifications)
<add> end
<add>
<add> def test_spec_type_resolves_for_matching_strings
<add> assert_mailer MiniTest::Spec.spec_type("WidgetMailer")
<add> assert_mailer MiniTest::Spec.spec_type("WidgetMailerTest")
<add> assert_mailer MiniTest::Spec.spec_type("Widget Mailer Test")
<add> # And is not case sensitive
<add> assert_mailer MiniTest::Spec.spec_type("widgetmailer")
<add> assert_mailer MiniTest::Spec.spec_type("widgetmailertest")
<add> assert_mailer MiniTest::Spec.spec_type("widget mailer test")
<add> end
<add>
<add> def test_spec_type_wont_match_non_space_characters
<add> refute_mailer MiniTest::Spec.spec_type("Widget Mailer\tTest")
<add> refute_mailer MiniTest::Spec.spec_type("Widget Mailer\rTest")
<add> refute_mailer MiniTest::Spec.spec_type("Widget Mailer\nTest")
<add> refute_mailer MiniTest::Spec.spec_type("Widget Mailer\fTest")
<add> refute_mailer MiniTest::Spec.spec_type("Widget MailerXTest")
<add> end
<add>end | 2 |
Ruby | Ruby | improve documentation for add_lib_to_load_paths! | 9e081caee74e6d08035a8835899dcc566536a871 | <ide><path>railties/lib/rails/application.rb
<ide> def method_missing(*args, &block)
<ide>
<ide> delegate :middleware, :to => :config
<ide>
<del> def add_lib_to_load_paths!
<add> # This method is called just after an application inherits from Rails::Application,
<add> # allowing the developer to load classes in lib and use them during application
<add> # configuration.
<add> #
<add> # class MyApplication < Rails::Application
<add> # require "my_backend" # in lib/my_backend
<add> # config.i18n.backend = MyBackend
<add> # end
<add> #
<add> # Notice this method takes into consideration the default root path. So if you
<add> # are changing config.root inside your application definition or having a custom
<add> # Rails application, you will need to add lib to $LOAD_PATH on your own in case
<add> # you need to load files in lib/ during the application configuration as well.
<add> def add_lib_to_load_paths! #:nodoc:
<ide> path = config.root.join('lib').to_s
<ide> $LOAD_PATH.unshift(path) if File.exists?(path)
<ide> end
<ide>
<del> def require_environment!
<add> def require_environment! #:nodoc:
<ide> environment = paths.config.environment.to_a.first
<ide> require environment if environment
<ide> end
<ide>
<del> def eager_load!
<add> def eager_load! #:nodoc:
<ide> railties.all(&:eager_load!)
<ide> super
<ide> end | 1 |
Go | Go | add static routes to the remote driver | 5c153bd0183cb8b48c5bc5ee18e33dd21435b168 | <ide><path>libnetwork/drivers/remote/driver.go
<ide> func (d *driver) Join(nid, eid types.UUID, sboxKey string, jinfo driverapi.JoinI
<ide> return errorWithRollback(fmt.Sprintf("failed to set gateway IPv6: %v", addr), d.Leave(nid, eid))
<ide> }
<ide> }
<add> if len(res.StaticRoutes) > 0 {
<add> routes, err := res.parseStaticRoutes()
<add> if err != nil {
<add> return err
<add> }
<add> for _, route := range routes {
<add> if jinfo.AddStaticRoute(route.Destination, route.RouteType, route.NextHop, route.InterfaceID) != nil {
<add> return errorWithRollback(fmt.Sprintf("failed to set static route: %v", route), d.Leave(nid, eid))
<add> }
<add> }
<add> }
<ide> if jinfo.SetHostsPath(res.HostsPath) != nil {
<ide> return errorWithRollback(fmt.Sprintf("failed to set hosts path: %s", res.HostsPath), d.Leave(nid, eid))
<ide> }
<ide><path>libnetwork/drivers/remote/driver_test.go
<ide> type testEndpoint struct {
<ide> gatewayIPv6 string
<ide> resolvConfPath string
<ide> hostsPath string
<add> nextHop string
<add> destination string
<add> routeType int
<ide> }
<ide>
<ide> func (test *testEndpoint) Interfaces() []driverapi.InterfaceInfo {
<ide> func compareIPs(t *testing.T, kind string, shouldBe string, supplied net.IP) {
<ide> }
<ide> }
<ide>
<add>func compareIPNets(t *testing.T, kind string, shouldBe string, supplied net.IPNet) {
<add> _, net, _ := net.ParseCIDR(shouldBe)
<add> if net == nil {
<add> t.Fatalf(`Invalid IP network to test against: "%s"`, shouldBe)
<add> }
<add> if !types.CompareIPNet(net, &supplied) {
<add> t.Fatalf(`%s IP networks are not equal: expected "%s", got %v`, kind, shouldBe, supplied)
<add> }
<add>}
<add>
<ide> func (test *testEndpoint) SetGateway(ipv4 net.IP) error {
<ide> compareIPs(test.t, "Gateway", test.gateway, ipv4)
<ide> return nil
<ide> func (test *testEndpoint) SetNames(src string, dst string) error {
<ide> }
<ide>
<ide> func (test *testEndpoint) AddStaticRoute(destination *net.IPNet, routeType int, nextHop net.IP, interfaceID int) error {
<del> //TODO
<add> compareIPNets(test.t, "Destination", test.destination, *destination)
<add> compareIPs(test.t, "NextHop", test.nextHop, nextHop)
<add>
<add> if test.routeType != routeType {
<add> test.t.Fatalf(`Wrong RouteType; expected "%d", got "%d"`, test.routeType, routeType)
<add> }
<add>
<add> if test.id != interfaceID {
<add> test.t.Fatalf(`Wrong InterfaceID; expected "%d", got "%d"`, test.id, interfaceID)
<add> }
<add>
<ide> return nil
<ide> }
<ide>
<ide> func TestRemoteDriver(t *testing.T) {
<ide> gatewayIPv6: "2001:DB8::1",
<ide> hostsPath: "/here/comes/the/host/path",
<ide> resolvConfPath: "/there/goes/the/resolv/conf",
<add> destination: "10.0.0.0/8",
<add> nextHop: "10.0.0.1",
<add> routeType: 1,
<ide> }
<ide>
<ide> mux := http.NewServeMux()
<ide> func TestRemoteDriver(t *testing.T) {
<ide> "DstName": ep.dst,
<ide> },
<ide> },
<add> "StaticRoutes": []map[string]interface{}{
<add> map[string]interface{}{
<add> "Destination": ep.destination,
<add> "RouteType": ep.routeType,
<add> "InterfaceID": ep.id,
<add> "NextHop": ep.nextHop,
<add> },
<add> },
<ide> }
<ide> })
<ide> handle(t, mux, "Leave", func(msg map[string]interface{}) interface{} {
<ide><path>libnetwork/drivers/remote/messages.go
<ide> package remote
<ide>
<del>import "net"
<add>import (
<add> "fmt"
<add> "net"
<add>
<add> "github.com/docker/libnetwork/types"
<add>)
<ide>
<ide> type response struct {
<ide> Err string
<ide> type endpointInterface struct {
<ide> MacAddress string
<ide> }
<ide>
<add>type staticRoute struct {
<add> Destination string
<add> RouteType int
<add> NextHop string
<add> InterfaceID int
<add>}
<add>
<ide> type createEndpointResponse struct {
<ide> response
<ide> Interfaces []*endpointInterface
<ide> type iface struct {
<ide> }
<ide>
<ide> func (r *createEndpointResponse) parseInterfaces() ([]*iface, error) {
<del> var (
<del> ifaces = make([]*iface, len(r.Interfaces))
<del> )
<add> var ifaces = make([]*iface, len(r.Interfaces))
<ide> for i, inIf := range r.Interfaces {
<ide> var err error
<ide> outIf := &iface{ID: inIf.ID}
<ide> func (r *createEndpointResponse) parseInterfaces() ([]*iface, error) {
<ide> return ifaces, nil
<ide> }
<ide>
<add>func (r *joinResponse) parseStaticRoutes() ([]*types.StaticRoute, error) {
<add> var routes = make([]*types.StaticRoute, len(r.StaticRoutes))
<add> for i, inRoute := range r.StaticRoutes {
<add> var err error
<add> outRoute := &types.StaticRoute{InterfaceID: inRoute.InterfaceID, RouteType: inRoute.RouteType}
<add>
<add> if inRoute.Destination != "" {
<add> if outRoute.Destination, err = toAddr(inRoute.Destination); err != nil {
<add> return nil, err
<add> }
<add> }
<add>
<add> if inRoute.NextHop != "" {
<add> outRoute.NextHop = net.ParseIP(inRoute.NextHop)
<add> if outRoute.NextHop == nil {
<add> return nil, fmt.Errorf("failed to parse nexthop IP %s", inRoute.NextHop)
<add> }
<add> }
<add>
<add> routes[i] = outRoute
<add> }
<add> return routes, nil
<add>}
<add>
<ide> type deleteEndpointRequest struct {
<ide> NetworkID string
<ide> EndpointID string
<ide> type joinResponse struct {
<ide> InterfaceNames []*ifaceName
<ide> Gateway string
<ide> GatewayIPv6 string
<add> StaticRoutes []*staticRoute
<ide> HostsPath string
<ide> ResolvConfPath string
<ide> } | 3 |
Mixed | Text | add check for writeable directory | 8cd6bd3fc3ee7b7eee887092a086cfb1e9c639db | <ide><path>errors/build-dir-not-writeable.md
<add># Build directory not writeable
<add>
<add>#### Why This Error Occurred
<add>
<add>The filesystem does not allow writing to the specified directory. A common cause for this error is starting a [custom server](https://github.com/zeit/next.js#custom-server-and-routing) in development mode on a production server, for example, [now.sh](https://zeit.co) which [doesn't allow you to write to the filesystem after your app is built](https://zeit.co/docs/deployment-types/node#file-system-specifications).
<add>
<add>#### Possible Ways to Fix It
<add>
<add>When using a custom server with a server file, for example called `server.js`, make sure you update the scripts key in `package.json` to:
<add>
<add>```json
<add>{
<add> "scripts": {
<add> "dev": "node server.js",
<add> "build": "next build",
<add> "start": "NODE_ENV=production node server.js"
<add> }
<add>}
<add>```
<add>
<add>and the custom server starts Next in production mode when `NODE_ENV` is `production`
<add>
<add>```js
<add>const dev = process.env.NODE_ENV !== 'production'
<add>const app = next({ dev })
<add>```
<add>
<add>### Useful Links
<add>
<add>- [Custom Server documentation + examples](https://github.com/zeit/next.js#custom-server-and-routing)
<ide><path>readme.md
<ide> export default ({ url }) =>
<ide> </ul>
<ide> </details></p>
<ide>
<del>Typically you start your next server with `next start`. It's possible, however, to start a server 100% programmatically in order to customize routes, use route patterns, etc
<add>Typically you start your next server with `next start`. It's possible, however, to start a server 100% programmatically in order to customize routes, use route patterns, etc.
<add>
<add>When using a custom server with a server file, for example called `server.js`, make sure you update the scripts key in `package.json` to:
<add>
<add>```json
<add>{
<add> "scripts": {
<add> "dev": "node server.js",
<add> "build": "next build",
<add> "start": "NODE_ENV=production node server.js"
<add> }
<add>}
<add>```
<ide>
<ide> This example makes `/a` resolve to `./pages/b`, and `/b` resolve to `./pages/a`:
<ide>
<ide><path>server/build/index.js
<ide> import md5File from 'md5-file/promise'
<ide> export default async function build (dir, conf = null) {
<ide> const buildId = uuid.v4()
<ide> const buildDir = join(tmpdir(), uuid.v4())
<add>
<add> try {
<add> await fs.access(buildDir, fs.constants.W_OK)
<add> } catch (err) {
<add> console.error(`> Failed, build directory is not writeable. https://err.sh/zeit/next.js/build-dir-not-writeable`)
<add> throw err
<add> }
<add>
<ide> const compiler = await webpack(dir, { buildId, buildDir, conf })
<ide>
<ide> try { | 3 |
Mixed | Ruby | add params.member? to mimic hash behavior | 34b6e8c0aee3fc3691e0ed8fb30de157f977e1c8 | <ide><path>actionpack/CHANGELOG.md
<add>* Add `params.member?` to mimic Hash behavior
<add>
<add> *Younes Serraj*
<add>
<ide> * `process_action.action_controller` notifications now include the following in their payloads:
<ide>
<ide> * `:request` - the `ActionDispatch::Request`
<ide><path>actionpack/lib/action_controller/metal/strong_parameters.rb
<ide> class Parameters
<ide> #
<ide> # Returns true if the given key is present in the parameters.
<ide>
<add> ##
<add> # :method: member?
<add> #
<add> # :call-seq:
<add> # member?(key)
<add> #
<add> # Returns true if the given key is present in the parameters.
<add>
<ide> ##
<ide> # :method: keys
<ide> #
<ide> class Parameters
<ide> # values()
<ide> #
<ide> # Returns a new array of the values of the parameters.
<del> delegate :keys, :key?, :has_key?, :values, :has_value?, :value?, :empty?, :include?,
<add> delegate :keys, :key?, :has_key?, :member?, :values, :has_value?, :value?, :empty?, :include?,
<ide> :as_json, :to_s, :each_key, to: :@parameters
<ide>
<ide> # By default, never raise an UnpermittedParameters exception if these
<ide><path>actionpack/test/controller/parameters/accessors_test.rb
<ide> class ParametersAccessorsTest < ActiveSupport::TestCase
<ide> assert_not @params.key?(:address)
<ide> end
<ide>
<add> test "member? returns true if the given key is present in the params" do
<add> assert @params.member?(:person)
<add> end
<add>
<add> test "member? returns false if the given key is not present in the params" do
<add> assert_not @params.member?(:address)
<add> end
<add>
<ide> test "keys returns an array of the keys of the params" do
<ide> assert_equal ["person"], @params.keys
<ide> assert_equal ["age", "name", "addresses"], @params[:person].keys | 3 |
PHP | PHP | remove package tag | 92fdf77fdbd5ad0fb29edc959a9f66d05eea662c | <ide><path>lib/Cake/Core/App.php
<ide> <?php
<ide> /**
<del> * App class
<del> *
<ide> * PHP 5
<ide> *
<ide> * CakePHP(tm) : Rapid Development Framework (http://cakephp.org) | 1 |
Ruby | Ruby | remove unecessary require | 5bda6e09156be417691a95baa2733e733500206b | <ide><path>activerecord/lib/active_record/transactions.rb
<del>require 'thread'
<del>
<ide> module ActiveRecord
<ide> # See ActiveRecord::Transactions::ClassMethods for documentation.
<ide> module Transactions | 1 |
Python | Python | add fk between xcom and task instance | 71c980a8ffb3563bf16d8a23a58de54c9e8cf556 | <ide><path>airflow/cli/commands/task_command.py
<ide> import os
<ide> import textwrap
<ide> from contextlib import contextmanager, redirect_stderr, redirect_stdout
<del>from typing import List, Optional
<add>from typing import List, Optional, Tuple, Union
<ide>
<ide> from pendulum.parsing.exceptions import ParserError
<ide> from sqlalchemy.orm.exc import NoResultFound
<ide> from airflow.models.baseoperator import BaseOperator
<ide> from airflow.models.dag import DAG
<ide> from airflow.models.dagrun import DagRun
<del>from airflow.models.xcom import IN_MEMORY_RUN_ID
<ide> from airflow.ti_deps.dep_context import DepContext
<ide> from airflow.ti_deps.dependencies_deps import SCHEDULER_QUEUED_DEPS
<add>from airflow.typing_compat import Literal
<ide> from airflow.utils import cli as cli_utils
<ide> from airflow.utils.cli import (
<ide> get_dag,
<ide> from airflow.utils.log.logging_mixin import StreamLogWriter
<ide> from airflow.utils.net import get_hostname
<ide> from airflow.utils.session import NEW_SESSION, create_session, provide_session
<add>from airflow.utils.state import DagRunState
<add>
<add>CreateIfNecessary = Union[Literal[False], Literal["db"], Literal["memory"]]
<add>
<add>
<add>def _generate_temporary_run_id() -> str:
<add> """Generate a ``run_id`` for a DAG run that will be created temporarily.
<add>
<add> This is used mostly by ``airflow task test`` to create a DAG run that will
<add> be deleted after the task is run.
<add> """
<add> return f"__airflow_temporary_run_{timezone.utcnow().isoformat()}__"
<ide>
<ide>
<ide> def _get_dag_run(
<ide> *,
<ide> dag: DAG,
<ide> exec_date_or_run_id: str,
<del> create_if_necessary: bool,
<add> create_if_necessary: CreateIfNecessary,
<ide> session: Session,
<del>) -> DagRun:
<add>) -> Tuple[DagRun, bool]:
<ide> """Try to retrieve a DAG run from a string representing either a run ID or logical date.
<ide>
<ide> This checks DAG runs like this:
<ide> def _get_dag_run(
<ide> """
<ide> dag_run = dag.get_dagrun(run_id=exec_date_or_run_id, session=session)
<ide> if dag_run:
<del> return dag_run
<add> return dag_run, False
<ide>
<ide> try:
<ide> execution_date: Optional[datetime.datetime] = timezone.parse(exec_date_or_run_id)
<ide> except (ParserError, TypeError):
<ide> execution_date = None
<ide>
<ide> try:
<del> return (
<add> dag_run = (
<ide> session.query(DagRun)
<ide> .filter(DagRun.dag_id == dag.dag_id, DagRun.execution_date == execution_date)
<ide> .one()
<ide> def _get_dag_run(
<ide> raise DagRunNotFound(
<ide> f"DagRun for {dag.dag_id} with run_id or execution_date of {exec_date_or_run_id!r} not found"
<ide> ) from None
<add> else:
<add> return dag_run, False
<ide>
<ide> if execution_date is not None:
<del> return DagRun(dag.dag_id, run_id=IN_MEMORY_RUN_ID, execution_date=execution_date)
<del> return DagRun(dag.dag_id, run_id=exec_date_or_run_id, execution_date=timezone.utcnow())
<add> dag_run_execution_date = execution_date
<add> else:
<add> dag_run_execution_date = timezone.utcnow()
<add> if create_if_necessary == "memory":
<add> dag_run = DagRun(dag.dag_id, run_id=exec_date_or_run_id, execution_date=dag_run_execution_date)
<add> return dag_run, True
<add> elif create_if_necessary == "db":
<add> dag_run = dag.create_dagrun(
<add> state=DagRunState.QUEUED,
<add> execution_date=dag_run_execution_date,
<add> run_id=_generate_temporary_run_id(),
<add> session=session,
<add> )
<add> return dag_run, True
<add> raise ValueError(f"unknown create_if_necessary value: {create_if_necessary!r}")
<ide>
<ide>
<ide> @provide_session
<ide> def _get_ti(
<ide> exec_date_or_run_id: str,
<ide> map_index: int,
<ide> *,
<del> create_if_necessary: bool = False,
<add> create_if_necessary: CreateIfNecessary = False,
<ide> session: Session = NEW_SESSION,
<del>) -> TaskInstance:
<add>) -> Tuple[TaskInstance, bool]:
<ide> """Get the task instance through DagRun.run_id, if that fails, get the TI the old way"""
<ide> if task.is_mapped:
<ide> if map_index < 0:
<ide> raise RuntimeError("No map_index passed to mapped task")
<ide> elif map_index >= 0:
<ide> raise RuntimeError("map_index passed to non-mapped task")
<del> dag_run = _get_dag_run(
<add> dag_run, dr_created = _get_dag_run(
<ide> dag=task.dag,
<ide> exec_date_or_run_id=exec_date_or_run_id,
<ide> create_if_necessary=create_if_necessary,
<ide> def _get_ti(
<ide> else:
<ide> ti = ti_or_none
<ide> ti.refresh_from_task(task)
<del> return ti
<add> return ti, dr_created
<ide>
<ide>
<ide> def _run_task_by_selected_method(args, dag: DAG, ti: TaskInstance) -> None:
<ide> def task_run(args, dag=None):
<ide> # Use DAG from parameter
<ide> pass
<ide> task = dag.get_task(task_id=args.task_id)
<del> ti = _get_ti(task, args.execution_date_or_run_id, args.map_index)
<add> ti, _ = _get_ti(task, args.execution_date_or_run_id, args.map_index)
<ide> ti.init_run_context(raw=args.raw)
<ide>
<ide> hostname = get_hostname()
<ide> def task_failed_deps(args):
<ide> """
<ide> dag = get_dag(args.subdir, args.dag_id)
<ide> task = dag.get_task(task_id=args.task_id)
<del> ti = _get_ti(task, args.execution_date_or_run_id, args.map_index)
<add> ti, _ = _get_ti(task, args.execution_date_or_run_id, args.map_index)
<ide>
<ide> dep_context = DepContext(deps=SCHEDULER_QUEUED_DEPS)
<ide> failed_deps = list(ti.get_failed_dep_statuses(dep_context=dep_context))
<ide> def task_state(args):
<ide> """
<ide> dag = get_dag(args.subdir, args.dag_id)
<ide> task = dag.get_task(task_id=args.task_id)
<del> ti = _get_ti(task, args.execution_date_or_run_id, args.map_index)
<add> ti, _ = _get_ti(task, args.execution_date_or_run_id, args.map_index)
<ide> print(ti.current_state())
<ide>
<ide>
<ide> def task_test(args, dag=None):
<ide> if task.params:
<ide> task.params.validate()
<ide>
<del> ti = _get_ti(task, args.execution_date_or_run_id, args.map_index, create_if_necessary=True)
<add> ti, dr_created = _get_ti(task, args.execution_date_or_run_id, args.map_index, create_if_necessary="db")
<ide>
<ide> try:
<ide> if args.dry_run:
<ide> def task_test(args, dag=None):
<ide> # Make sure to reset back to normal. When run for CLI this doesn't
<ide> # matter, but it does for test suite
<ide> logging.getLogger('airflow.task').propagate = False
<add> if dr_created:
<add> with create_session() as session:
<add> session.delete(ti.dag_run)
<ide>
<ide>
<ide> @cli_utils.action_cli(check_db=False)
<ide> def task_render(args):
<ide> """Renders and displays templated fields for a given task"""
<ide> dag = get_dag(args.subdir, args.dag_id)
<ide> task = dag.get_task(task_id=args.task_id)
<del> ti = _get_ti(task, args.execution_date_or_run_id, args.map_index, create_if_necessary=True)
<add> ti, _ = _get_ti(task, args.execution_date_or_run_id, args.map_index, create_if_necessary="memory")
<ide> ti.render_templates()
<ide> for attr in task.__class__.template_fields:
<ide> print(
<ide><path>airflow/migrations/versions/0102_c306b5b5ae4a_switch_xcom_table_to_use_run_id.py
<ide> def upgrade():
<ide> with op.batch_alter_table("xcom") as batch_op:
<ide> batch_op.create_primary_key("xcom_pkey", ["dag_run_id", "task_id", "map_index", "key"])
<ide> batch_op.create_index("idx_xcom_key", ["key"])
<del> batch_op.create_index("idx_xcom_ti_id", ["dag_id", "run_id", "task_id", "map_index"])
<add> batch_op.create_foreign_key(
<add> "xcom_task_instance_fkey",
<add> "task_instance",
<add> ["dag_id", "task_id", "run_id", "map_index"],
<add> ["dag_id", "task_id", "run_id", "map_index"],
<add> ondelete="CASCADE",
<add> )
<ide>
<ide>
<ide> def downgrade():
<ide><path>airflow/models/taskinstance.py
<ide> def xcom_push(
<ide> task_id=self.task_id,
<ide> dag_id=self.dag_id,
<ide> run_id=self.run_id,
<add> map_index=self.map_index,
<ide> session=session,
<ide> )
<ide>
<ide><path>airflow/models/xcom.py
<ide> from typing import TYPE_CHECKING, Any, Iterable, List, Optional, Type, Union, cast, overload
<ide>
<ide> import pendulum
<del>from sqlalchemy import Column, Index, Integer, LargeBinary, String
<add>from sqlalchemy import Column, ForeignKeyConstraint, Index, Integer, LargeBinary, String
<ide> from sqlalchemy.ext.associationproxy import association_proxy
<ide> from sqlalchemy.orm import Query, Session, reconstructor, relationship
<ide> from sqlalchemy.orm.exc import NoResultFound
<ide> MAX_XCOM_SIZE = 49344
<ide> XCOM_RETURN_KEY = 'return_value'
<ide>
<del># Stand-in value for 'airflow task test' generating a temporary in-memory DAG
<del># run without storing it in the database.
<del>IN_MEMORY_RUN_ID = "__airflow_in_memory_dagrun__"
<del>
<ide> if TYPE_CHECKING:
<ide> from airflow.models.taskinstance import TaskInstanceKey
<ide>
<ide> class BaseXCom(Base, LoggingMixin):
<ide> value = Column(LargeBinary)
<ide> timestamp = Column(UtcDateTime, default=timezone.utcnow, nullable=False)
<ide>
<add> __table_args__ = (
<add> # Ideally we should create a unique index over (key, dag_id, task_id, run_id),
<add> # but it goes over MySQL's index length limit. So we instead index 'key'
<add> # separately, and enforce uniqueness with DagRun.id instead.
<add> Index("idx_xcom_key", key),
<add> ForeignKeyConstraint(
<add> [dag_id, task_id, run_id, map_index],
<add> [
<add> "task_instance.dag_id",
<add> "task_instance.task_id",
<add> "task_instance.run_id",
<add> "task_instance.map_index",
<add> ],
<add> name="xcom_task_instance_fkey",
<add> ondelete="CASCADE",
<add> ),
<add> )
<add>
<ide> dag_run = relationship(
<ide> "DagRun",
<del> primaryjoin="""and_(
<del> BaseXCom.dag_id == foreign(DagRun.dag_id),
<del> BaseXCom.run_id == foreign(DagRun.run_id),
<del> )""",
<add> primaryjoin="BaseXCom.dag_run_id == foreign(DagRun.id)",
<ide> uselist=False,
<ide> lazy="joined",
<ide> passive_deletes="all",
<ide> )
<ide> execution_date = association_proxy("dag_run", "execution_date")
<ide>
<del> __table_args__ = (
<del> # Ideally we should create a unique index over (key, dag_id, task_id, run_id),
<del> # but it goes over MySQL's index length limit. So we instead create indexes
<del> # separately, and enforce uniqueness with DagRun.id instead.
<del> Index("idx_xcom_key", key),
<del> Index("idx_xcom_ti_id", dag_id, task_id, run_id, map_index),
<del> )
<del>
<ide> @reconstructor
<ide> def init_on_load(self):
<ide> """
<ide> def set(
<ide> )
<ide> except NoResultFound:
<ide> raise ValueError(f"DAG run not found on DAG {dag_id!r} at {execution_date}") from None
<del> elif run_id == IN_MEMORY_RUN_ID:
<del> dag_run_id = -1
<ide> else:
<ide> dag_run_id = session.query(DagRun.id).filter_by(dag_id=dag_id, run_id=run_id).scalar()
<ide> if dag_run_id is None:
<ide> def set(
<ide> cls.run_id == run_id,
<ide> cls.task_id == task_id,
<ide> cls.dag_id == dag_id,
<add> cls.map_index == map_index,
<ide> ).delete()
<ide> new = cast(Any, cls)( # Work around Mypy complaining model not defining '__init__'.
<ide> dag_run_id=dag_run_id,
<ide> def set(
<ide> run_id=run_id,
<ide> task_id=task_id,
<ide> dag_id=dag_id,
<add> map_index=map_index,
<ide> )
<ide> session.add(new)
<ide> session.flush()
<ide> def get_many(
<ide> if execution_date is not None:
<ide> query = query.filter(DagRun.execution_date <= execution_date)
<ide> else:
<del> # This returns an empty query result for IN_MEMORY_RUN_ID,
<del> # but that is impossible to implement. Sorry?
<ide> dr = session.query(DagRun.execution_date).filter(DagRun.run_id == run_id).subquery()
<ide> query = query.filter(cls.execution_date <= dr.c.execution_date)
<ide> elif execution_date is not None:
<ide><path>tests/api_connexion/endpoints/test_xcom_endpoint.py
<ide> import pytest
<ide> from parameterized import parameterized
<ide>
<del>from airflow.models import DagModel, DagRun as DR, XCom
<add>from airflow.models import DagModel, DagRun, TaskInstance, XCom
<add>from airflow.operators.dummy import DummyOperator
<ide> from airflow.security import permissions
<ide> from airflow.utils.dates import parse_execution_date
<ide> from airflow.utils.session import create_session
<ide> def test_should_respond_200(self):
<ide> execution_date = '2005-04-02T00:00:00+00:00'
<ide> xcom_key = 'test-xcom-key'
<ide> execution_date_parsed = parse_execution_date(execution_date)
<del> dag_run_id = DR.generate_run_id(DagRunType.MANUAL, execution_date_parsed)
<del> self._create_xcom_entry(dag_id, dag_run_id, execution_date_parsed, task_id, xcom_key)
<add> run_id = DagRun.generate_run_id(DagRunType.MANUAL, execution_date_parsed)
<add> self._create_xcom_entry(dag_id, run_id, execution_date_parsed, task_id, xcom_key)
<ide> response = self.client.get(
<del> f"/api/v1/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries/{xcom_key}",
<add> f"/api/v1/dags/{dag_id}/dagRuns/{run_id}/taskInstances/{task_id}/xcomEntries/{xcom_key}",
<ide> environ_overrides={'REMOTE_USER': "test"},
<ide> )
<ide> assert 200 == response.status_code
<ide> def test_should_raises_401_unauthenticated(self):
<ide> execution_date = '2005-04-02T00:00:00+00:00'
<ide> xcom_key = 'test-xcom-key'
<ide> execution_date_parsed = parse_execution_date(execution_date)
<del> dag_run_id = DR.generate_run_id(DagRunType.MANUAL, execution_date_parsed)
<del> self._create_xcom_entry(dag_id, dag_run_id, execution_date_parsed, task_id, xcom_key)
<add> run_id = DagRun.generate_run_id(DagRunType.MANUAL, execution_date_parsed)
<add> self._create_xcom_entry(dag_id, run_id, execution_date_parsed, task_id, xcom_key)
<ide> response = self.client.get(
<del> f"/api/v1/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries/{xcom_key}"
<add> f"/api/v1/dags/{dag_id}/dagRuns/{run_id}/taskInstances/{task_id}/xcomEntries/{xcom_key}"
<ide> )
<ide>
<ide> assert_401(response)
<ide> def test_should_raise_403_forbidden(self):
<ide> execution_date = '2005-04-02T00:00:00+00:00'
<ide> xcom_key = 'test-xcom-key'
<ide> execution_date_parsed = parse_execution_date(execution_date)
<del> dag_run_id = DR.generate_run_id(DagRunType.MANUAL, execution_date_parsed)
<add> run_id = DagRun.generate_run_id(DagRunType.MANUAL, execution_date_parsed)
<ide>
<del> self._create_xcom_entry(dag_id, dag_run_id, execution_date_parsed, task_id, xcom_key)
<add> self._create_xcom_entry(dag_id, run_id, execution_date_parsed, task_id, xcom_key)
<ide> response = self.client.get(
<del> f"/api/v1/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries/{xcom_key}",
<add> f"/api/v1/dags/{dag_id}/dagRuns/{run_id}/taskInstances/{task_id}/xcomEntries/{xcom_key}",
<ide> environ_overrides={'REMOTE_USER': "test_no_permissions"},
<ide> )
<ide> assert response.status_code == 403
<ide>
<del> def _create_xcom_entry(self, dag_id, dag_run_id, execution_date, task_id, xcom_key):
<add> def _create_xcom_entry(self, dag_id, run_id, execution_date, task_id, xcom_key):
<ide> with create_session() as session:
<del> dagrun = DR(
<add> dagrun = DagRun(
<ide> dag_id=dag_id,
<del> run_id=dag_run_id,
<add> run_id=run_id,
<ide> execution_date=execution_date,
<ide> start_date=execution_date,
<ide> run_type=DagRunType.MANUAL,
<ide> )
<ide> session.add(dagrun)
<add> ti = TaskInstance(DummyOperator(task_id=task_id), run_id=run_id)
<add> ti.dag_id = dag_id
<add> session.add(ti)
<ide> XCom.set(
<ide> key=xcom_key,
<ide> value="TEST_VALUE",
<del> run_id=dag_run_id,
<add> run_id=run_id,
<ide> task_id=task_id,
<ide> dag_id=dag_id,
<ide> )
<ide> def test_should_respond_200(self):
<ide> task_id = 'test-task-id'
<ide> execution_date = '2005-04-02T00:00:00+00:00'
<ide> execution_date_parsed = parse_execution_date(execution_date)
<del> dag_run_id = DR.generate_run_id(DagRunType.MANUAL, execution_date_parsed)
<add> run_id = DagRun.generate_run_id(DagRunType.MANUAL, execution_date_parsed)
<ide>
<del> self._create_xcom_entries(dag_id, dag_run_id, execution_date_parsed, task_id)
<add> self._create_xcom_entries(dag_id, run_id, execution_date_parsed, task_id)
<ide> response = self.client.get(
<del> f"/api/v1/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries",
<add> f"/api/v1/dags/{dag_id}/dagRuns/{run_id}/taskInstances/{task_id}/xcomEntries",
<ide> environ_overrides={'REMOTE_USER': "test"},
<ide> )
<ide>
<ide> def test_should_respond_200_with_tilde_and_access_to_all_dags(self):
<ide> task_id_1 = 'test-task-id-1'
<ide> execution_date = '2005-04-02T00:00:00+00:00'
<ide> execution_date_parsed = parse_execution_date(execution_date)
<del> dag_run_id_1 = DR.generate_run_id(DagRunType.MANUAL, execution_date_parsed)
<del> self._create_xcom_entries(dag_id_1, dag_run_id_1, execution_date_parsed, task_id_1)
<add> run_id_1 = DagRun.generate_run_id(DagRunType.MANUAL, execution_date_parsed)
<add> self._create_xcom_entries(dag_id_1, run_id_1, execution_date_parsed, task_id_1)
<ide>
<ide> dag_id_2 = 'test-dag-id-2'
<ide> task_id_2 = 'test-task-id-2'
<del> dag_run_id_2 = DR.generate_run_id(DagRunType.MANUAL, execution_date_parsed)
<del> self._create_xcom_entries(dag_id_2, dag_run_id_2, execution_date_parsed, task_id_2)
<add> run_id_2 = DagRun.generate_run_id(DagRunType.MANUAL, execution_date_parsed)
<add> self._create_xcom_entries(dag_id_2, run_id_2, execution_date_parsed, task_id_2)
<ide>
<ide> response = self.client.get(
<ide> "/api/v1/dags/~/dagRuns/~/taskInstances/~/xcomEntries",
<ide> def test_should_respond_200_with_tilde_and_granular_dag_access(self):
<ide> task_id_1 = 'test-task-id-1'
<ide> execution_date = '2005-04-02T00:00:00+00:00'
<ide> execution_date_parsed = parse_execution_date(execution_date)
<del> dag_run_id_1 = DR.generate_run_id(DagRunType.MANUAL, execution_date_parsed)
<add> dag_run_id_1 = DagRun.generate_run_id(DagRunType.MANUAL, execution_date_parsed)
<ide> self._create_xcom_entries(dag_id_1, dag_run_id_1, execution_date_parsed, task_id_1)
<ide>
<ide> dag_id_2 = 'test-dag-id-2'
<ide> task_id_2 = 'test-task-id-2'
<del> dag_run_id_2 = DR.generate_run_id(DagRunType.MANUAL, execution_date_parsed)
<del> self._create_xcom_entries(dag_id_2, dag_run_id_2, execution_date_parsed, task_id_2)
<add> run_id_2 = DagRun.generate_run_id(DagRunType.MANUAL, execution_date_parsed)
<add> self._create_xcom_entries(dag_id_2, run_id_2, execution_date_parsed, task_id_2)
<ide> self._create_invalid_xcom_entries(execution_date_parsed)
<ide> response = self.client.get(
<ide> "/api/v1/dags/~/dagRuns/~/taskInstances/~/xcomEntries",
<ide> def test_should_raises_401_unauthenticated(self):
<ide> task_id = 'test-task-id'
<ide> execution_date = '2005-04-02T00:00:00+00:00'
<ide> execution_date_parsed = parse_execution_date(execution_date)
<del> dag_run_id = DR.generate_run_id(DagRunType.MANUAL, execution_date_parsed)
<del> self._create_xcom_entries(dag_id, dag_run_id, execution_date_parsed, task_id)
<add> run_id = DagRun.generate_run_id(DagRunType.MANUAL, execution_date_parsed)
<add> self._create_xcom_entries(dag_id, run_id, execution_date_parsed, task_id)
<ide>
<ide> response = self.client.get(
<del> f"/api/v1/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries"
<add> f"/api/v1/dags/{dag_id}/dagRuns/{run_id}/taskInstances/{task_id}/xcomEntries"
<ide> )
<ide>
<ide> assert_401(response)
<ide>
<del> def _create_xcom_entries(self, dag_id, dag_run_id, execution_date, task_id):
<add> def _create_xcom_entries(self, dag_id, run_id, execution_date, task_id):
<ide> with create_session() as session:
<ide> dag = DagModel(dag_id=dag_id)
<ide> session.add(dag)
<del>
<del> dagrun = DR(
<add> dagrun = DagRun(
<ide> dag_id=dag_id,
<del> run_id=dag_run_id,
<add> run_id=run_id,
<ide> execution_date=execution_date,
<ide> start_date=execution_date,
<ide> run_type=DagRunType.MANUAL,
<ide> )
<ide> session.add(dagrun)
<add> ti = TaskInstance(DummyOperator(task_id=task_id), run_id=run_id)
<add> ti.dag_id = dag_id
<add> session.add(ti)
<add>
<ide> for i in [1, 2]:
<ide> XCom.set(
<ide> key=f'test-xcom-key-{i}',
<ide> value="TEST",
<del> run_id=dag_run_id,
<add> run_id=run_id,
<ide> task_id=task_id,
<ide> dag_id=dag_id,
<ide> )
<ide> def _create_invalid_xcom_entries(self, execution_date):
<ide> with create_session() as session:
<ide> dag = DagModel(dag_id="invalid_dag")
<ide> session.add(dag)
<del> dagrun = DR(
<add> dagrun = DagRun(
<ide> dag_id="invalid_dag",
<ide> run_id="invalid_run_id",
<ide> execution_date=execution_date + timedelta(days=1),
<ide> start_date=execution_date,
<ide> run_type=DagRunType.MANUAL,
<ide> )
<ide> session.add(dagrun)
<del> dagrun1 = DR(
<add> dagrun1 = DagRun(
<ide> dag_id="invalid_dag",
<ide> run_id="not_this_run_id",
<ide> execution_date=execution_date,
<ide> start_date=execution_date,
<ide> run_type=DagRunType.MANUAL,
<ide> )
<ide> session.add(dagrun1)
<add> ti = TaskInstance(DummyOperator(task_id="invalid_task"), run_id="not_this_run_id")
<add> ti.dag_id = "invalid_dag"
<add> session.add(ti)
<ide> for i in [1, 2]:
<ide> XCom.set(
<ide> key=f'invalid-xcom-key-{i}',
<ide> def setup_method(self):
<ide> self.task_id = 'test-task-id'
<ide> self.execution_date = '2005-04-02T00:00:00+00:00'
<ide> self.execution_date_parsed = parse_execution_date(self.execution_date)
<del> self.dag_run_id = DR.generate_run_id(DagRunType.MANUAL, self.execution_date_parsed)
<add> self.run_id = DagRun.generate_run_id(DagRunType.MANUAL, self.execution_date_parsed)
<ide>
<ide> @parameterized.expand(
<ide> [
<ide> def setup_method(self):
<ide> def test_handle_limit_offset(self, query_params, expected_xcom_ids):
<ide> url = "/api/v1/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries?{query_params}"
<ide> url = url.format(
<del> dag_id=self.dag_id, dag_run_id=self.dag_run_id, task_id=self.task_id, query_params=query_params
<add> dag_id=self.dag_id, dag_run_id=self.run_id, task_id=self.task_id, query_params=query_params
<ide> )
<ide> with create_session() as session:
<del> dagrun = DR(
<add> dagrun = DagRun(
<ide> dag_id=self.dag_id,
<del> run_id=self.dag_run_id,
<add> run_id=self.run_id,
<ide> execution_date=self.execution_date_parsed,
<ide> start_date=self.execution_date_parsed,
<ide> run_type=DagRunType.MANUAL,
<ide> )
<ide> session.add(dagrun)
<add> ti = TaskInstance(DummyOperator(task_id=self.task_id), run_id=self.run_id)
<add> ti.dag_id = self.dag_id
<add> session.add(ti)
<ide>
<ide> with create_session() as session:
<ide> for i in range(1, 11):
<ide> xcom = XCom(
<ide> dag_run_id=dagrun.id,
<ide> key=f"TEST_XCOM_KEY{i}",
<ide> value=b"null",
<del> run_id=self.dag_run_id,
<add> run_id=self.run_id,
<ide> task_id=self.task_id,
<ide> dag_id=self.dag_id,
<ide> timestamp=self.execution_date_parsed,
<ide><path>tests/models/test_taskinstance.py
<ide> def test_are_dependents_done(
<ide> session.flush()
<ide> assert ti.are_dependents_done(session) == expected_are_dependents_done
<ide>
<del> def test_xcom_pull(self, create_task_instance):
<del> """
<del> Test xcom_pull, using different filtering methods.
<del> """
<del> ti1 = create_task_instance(
<del> dag_id='test_xcom',
<del> task_id='test_xcom_1',
<del> start_date=timezone.datetime(2016, 6, 1, 0, 0, 0),
<del> )
<add> def test_xcom_pull(self, dag_maker):
<add> """Test xcom_pull, using different filtering methods."""
<add> with dag_maker(dag_id="test_xcom") as dag:
<add> task_1 = DummyOperator(task_id="test_xcom_1")
<add> task_2 = DummyOperator(task_id="test_xcom_2")
<add>
<add> dagrun = dag_maker.create_dagrun(start_date=timezone.datetime(2016, 6, 1, 0, 0, 0))
<add> ti1 = dagrun.get_task_instance(task_1.task_id)
<ide>
<ide> # Push a value
<ide> ti1.xcom_push(key='foo', value='bar')
<ide> def test_xcom_pull(self, create_task_instance):
<ide> XCom.set(
<ide> key='foo',
<ide> value='baz',
<del> task_id='test_xcom_2',
<del> dag_id=ti1.dag_id,
<del> execution_date=ti1.execution_date,
<add> task_id=task_2.task_id,
<add> dag_id=dag.dag_id,
<add> execution_date=dagrun.execution_date,
<ide> )
<ide>
<ide> # Pull with no arguments
<ide><path>tests/models/test_xcom.py
<ide> from unittest.mock import MagicMock
<ide>
<ide> import pytest
<add>from sqlalchemy.orm import Session
<ide>
<ide> from airflow.configuration import conf
<ide> from airflow.models.dagrun import DagRun, DagRunType
<del>from airflow.models.taskinstance import TaskInstanceKey
<del>from airflow.models.xcom import IN_MEMORY_RUN_ID, XCOM_RETURN_KEY, BaseXCom, XCom, resolve_xcom_backend
<add>from airflow.models.taskinstance import TaskInstance, TaskInstanceKey
<add>from airflow.models.xcom import XCOM_RETURN_KEY, BaseXCom, XCom, resolve_xcom_backend
<add>from airflow.operators.dummy import DummyOperator
<ide> from airflow.settings import json
<ide> from airflow.utils import timezone
<ide> from airflow.utils.session import create_session
<ide> def reset_db():
<ide>
<ide>
<ide> @pytest.fixture()
<del>def dag_run_factory(request, session):
<del> def func(dag_id, execution_date):
<add>def task_instance_factory(request, session: Session):
<add> def func(*, dag_id, task_id, execution_date):
<add> run_id = DagRun.generate_run_id(DagRunType.SCHEDULED, execution_date)
<ide> run = DagRun(
<ide> dag_id=dag_id,
<ide> run_type=DagRunType.SCHEDULED,
<del> run_id=DagRun.generate_run_id(DagRunType.SCHEDULED, execution_date),
<add> run_id=run_id,
<ide> execution_date=execution_date,
<ide> )
<ide> session.add(run)
<add> ti = TaskInstance(DummyOperator(task_id=task_id), run_id=run_id)
<add> ti.dag_id = dag_id
<add> session.add(ti)
<ide> session.commit()
<ide>
<del> def delete_dagrun():
<del> session.query(DagRun).filter(DagRun.id == run.id).delete()
<add> def cleanup_database():
<add> # This should also clear task instances by cascading.
<add> session.query(DagRun).filter_by(id=run.id).delete()
<ide> session.commit()
<ide>
<del> request.addfinalizer(delete_dagrun)
<del> return run
<add> request.addfinalizer(cleanup_database)
<add> return ti
<ide>
<del> yield func
<add> return func
<add>
<add>
<add>@pytest.fixture()
<add>def task_instance(task_instance_factory):
<add> return task_instance_factory(
<add> dag_id="dag",
<add> task_id="task_1",
<add> execution_date=timezone.datetime(2021, 12, 3, 4, 56),
<add> )
<ide>
<ide>
<ide> @pytest.fixture()
<del>def dag_run(dag_run_factory):
<del> return dag_run_factory(dag_id="dag", execution_date=timezone.datetime(2021, 12, 3, 4, 56))
<add>def task_instances(session, task_instance):
<add> ti2 = TaskInstance(DummyOperator(task_id="task_2"), run_id=task_instance.run_id)
<add> ti2.dag_id = task_instance.dag_id
<add> session.add(ti2)
<add> session.commit()
<add> return task_instance, ti2 # ti2 will be cleaned up automatically with the DAG run.
<ide>
<ide>
<ide> class TestXCom:
<ide> def test_resolve_xcom_class_fallback_to_basexcom_no_config(self):
<ide> assert issubclass(cls, BaseXCom)
<ide> assert cls.serialize_value([1]) == b"[1]"
<ide>
<del> def test_xcom_deserialize_with_json_to_pickle_switch(self, dag_run, session):
<add> def test_xcom_deserialize_with_json_to_pickle_switch(self, task_instance, session):
<ide> ti_key = TaskInstanceKey(
<del> dag_id=dag_run.dag_id,
<del> task_id="test_task3",
<del> run_id=dag_run.run_id,
<add> dag_id=task_instance.dag_id,
<add> task_id=task_instance.task_id,
<add> run_id=task_instance.run_id,
<ide> )
<ide> with conf_vars({("core", "enable_xcom_pickling"): "False"}):
<ide> XCom.set(
<ide> key="xcom_test3",
<ide> value={"key": "value"},
<del> dag_id=dag_run.dag_id,
<del> task_id="test_task3",
<del> run_id=dag_run.run_id,
<add> dag_id=task_instance.dag_id,
<add> task_id=task_instance.task_id,
<add> run_id=task_instance.run_id,
<ide> session=session,
<ide> )
<ide> with conf_vars({("core", "enable_xcom_pickling"): "True"}):
<ide> ret_value = XCom.get_value(key="xcom_test3", ti_key=ti_key, session=session)
<ide> assert ret_value == {"key": "value"}
<ide>
<del> def test_xcom_deserialize_with_pickle_to_json_switch(self, dag_run, session):
<add> def test_xcom_deserialize_with_pickle_to_json_switch(self, task_instance, session):
<ide> with conf_vars({("core", "enable_xcom_pickling"): "True"}):
<ide> XCom.set(
<ide> key="xcom_test3",
<ide> value={"key": "value"},
<del> dag_id=dag_run.dag_id,
<del> task_id="test_task3",
<del> run_id=dag_run.run_id,
<add> dag_id=task_instance.dag_id,
<add> task_id=task_instance.task_id,
<add> run_id=task_instance.run_id,
<ide> session=session,
<ide> )
<ide> with conf_vars({("core", "enable_xcom_pickling"): "False"}):
<ide> ret_value = XCom.get_one(
<ide> key="xcom_test3",
<del> dag_id=dag_run.dag_id,
<del> task_id="test_task3",
<del> run_id=dag_run.run_id,
<add> dag_id=task_instance.dag_id,
<add> task_id=task_instance.task_id,
<add> run_id=task_instance.run_id,
<ide> session=session,
<ide> )
<ide> assert ret_value == {"key": "value"}
<ide>
<ide> @conf_vars({("core", "xcom_enable_pickling"): "False"})
<del> def test_xcom_disable_pickle_type_fail_on_non_json(self, dag_run, session):
<add> def test_xcom_disable_pickle_type_fail_on_non_json(self, task_instance, session):
<ide> class PickleRce:
<ide> def __reduce__(self):
<ide> return os.system, ("ls -alt",)
<ide> def __reduce__(self):
<ide> XCom.set(
<ide> key="xcom_test3",
<ide> value=PickleRce(),
<del> dag_id=dag_run.dag_id,
<del> task_id="test_task3",
<del> run_id=dag_run.run_id,
<add> dag_id=task_instance.dag_id,
<add> task_id=task_instance.task_id,
<add> run_id=task_instance.run_id,
<ide> session=session,
<ide> )
<ide>
<ide> def test_xcom_init_on_load_uses_orm_deserialize_value(self, mock_orm_deserialize
<ide> mock_orm_deserialize.assert_called_once_with()
<ide>
<ide> @conf_vars({("core", "xcom_backend"): "tests.models.test_xcom.CustomXCom"})
<del> def test_get_one_custom_backend_no_use_orm_deserialize_value(self, dag_run, session):
<add> def test_get_one_custom_backend_no_use_orm_deserialize_value(self, task_instance, session):
<ide> """Test that XCom.get_one does not call orm_deserialize_value"""
<ide> XCom = resolve_xcom_backend()
<ide> XCom.set(
<ide> key=XCOM_RETURN_KEY,
<ide> value={"key": "value"},
<del> dag_id=dag_run.dag_id,
<del> task_id="test_task",
<del> run_id=dag_run.run_id,
<add> dag_id=task_instance.dag_id,
<add> task_id=task_instance.task_id,
<add> run_id=task_instance.run_id,
<ide> session=session,
<ide> )
<ide>
<ide> value = XCom.get_one(
<del> dag_id=dag_run.dag_id,
<del> task_id="test_task",
<del> run_id=dag_run.run_id,
<add> dag_id=task_instance.dag_id,
<add> task_id=task_instance.task_id,
<add> run_id=task_instance.run_id,
<ide> session=session,
<ide> )
<ide> assert value == {"key": "value"}
<ide> XCom.orm_deserialize_value.assert_not_called()
<ide>
<ide> @conf_vars({("core", "enable_xcom_pickling"): 'False'})
<ide> @mock.patch('airflow.models.xcom.conf.getimport')
<del> def test_set_serialize_call_old_signature(self, get_import, session):
<add> def test_set_serialize_call_old_signature(self, get_import, task_instance):
<ide> """
<ide> When XCom.serialize_value takes only param ``value``, other kwargs should be ignored.
<ide> """
<ide> def serialize_value(value, **kwargs):
<ide>
<ide> get_import.return_value = OldSignatureXCom
<ide>
<del> kwargs = dict(
<del> value={"my_xcom_key": "my_xcom_value"},
<add> XCom = resolve_xcom_backend()
<add> XCom.set(
<ide> key=XCOM_RETURN_KEY,
<del> dag_id="test_dag",
<del> task_id="test_task",
<del> run_id=IN_MEMORY_RUN_ID,
<add> value={"my_xcom_key": "my_xcom_value"},
<add> dag_id=task_instance.dag_id,
<add> task_id=task_instance.task_id,
<add> run_id=task_instance.run_id,
<ide> )
<del>
<del> XCom = resolve_xcom_backend()
<del> XCom.set(**kwargs)
<del> serialize_watcher.assert_called_once_with(value=kwargs['value'])
<add> serialize_watcher.assert_called_once_with(value={"my_xcom_key": "my_xcom_value"})
<ide>
<ide> @conf_vars({("core", "enable_xcom_pickling"): 'False'})
<ide> @mock.patch('airflow.models.xcom.conf.getimport')
<del> def test_set_serialize_call_current_signature(self, get_import, session):
<add> def test_set_serialize_call_current_signature(self, get_import, task_instance):
<ide> """
<ide> When XCom.serialize_value includes params execution_date, key, dag_id, task_id and run_id,
<ide> then XCom.set should pass all of them.
<ide> def serialize_value(
<ide>
<ide> get_import.return_value = CurrentSignatureXCom
<ide>
<del> kwargs = dict(
<add> XCom = resolve_xcom_backend()
<add> XCom.set(
<add> key=XCOM_RETURN_KEY,
<ide> value={"my_xcom_key": "my_xcom_value"},
<add> dag_id=task_instance.dag_id,
<add> task_id=task_instance.task_id,
<add> run_id=task_instance.run_id,
<add> map_index=-1,
<add> )
<add> serialize_watcher.assert_called_once_with(
<ide> key=XCOM_RETURN_KEY,
<del> dag_id="test_dag",
<del> task_id="test_task",
<del> run_id=IN_MEMORY_RUN_ID,
<add> value={"my_xcom_key": "my_xcom_value"},
<add> dag_id=task_instance.dag_id,
<add> task_id=task_instance.task_id,
<add> run_id=task_instance.run_id,
<ide> map_index=-1,
<ide> )
<del> expected = {**kwargs, 'run_id': '__airflow_in_memory_dagrun__'}
<del> XCom = resolve_xcom_backend()
<del> XCom.set(**kwargs)
<del> serialize_watcher.assert_called_once_with(**expected)
<ide>
<ide>
<ide> @pytest.fixture(
<ide> def setup_xcom_pickling(request):
<ide>
<ide> @pytest.fixture()
<ide> def push_simple_json_xcom(session):
<del> def func(*, dag_run: DagRun, task_id: str, key: str, value):
<add> def func(*, ti: TaskInstance, key: str, value):
<ide> return XCom.set(
<ide> key=key,
<ide> value=value,
<del> dag_id=dag_run.dag_id,
<del> task_id=task_id,
<del> run_id=dag_run.run_id,
<add> dag_id=ti.dag_id,
<add> task_id=ti.task_id,
<add> run_id=ti.run_id,
<ide> session=session,
<ide> )
<ide>
<ide> def func(*, dag_run: DagRun, task_id: str, key: str, value):
<ide> @pytest.mark.usefixtures("setup_xcom_pickling")
<ide> class TestXComGet:
<ide> @pytest.fixture()
<del> def setup_for_xcom_get_one(self, dag_run, push_simple_json_xcom):
<del> push_simple_json_xcom(dag_run=dag_run, task_id="task_id_1", key="xcom_1", value={"key": "value"})
<add> def setup_for_xcom_get_one(self, task_instance, push_simple_json_xcom):
<add> push_simple_json_xcom(ti=task_instance, key="xcom_1", value={"key": "value"})
<ide>
<ide> @pytest.mark.usefixtures("setup_for_xcom_get_one")
<del> def test_xcom_get_one(self, session, dag_run):
<add> def test_xcom_get_one(self, session, task_instance):
<ide> stored_value = XCom.get_one(
<ide> key="xcom_1",
<del> dag_id=dag_run.dag_id,
<del> task_id="task_id_1",
<del> run_id=dag_run.run_id,
<add> dag_id=task_instance.dag_id,
<add> task_id=task_instance.task_id,
<add> run_id=task_instance.run_id,
<ide> session=session,
<ide> )
<ide> assert stored_value == {"key": "value"}
<ide>
<ide> @pytest.mark.usefixtures("setup_for_xcom_get_one")
<del> def test_xcom_get_one_with_execution_date(self, session, dag_run):
<add> def test_xcom_get_one_with_execution_date(self, session, task_instance):
<ide> with pytest.deprecated_call():
<ide> stored_value = XCom.get_one(
<ide> key="xcom_1",
<del> dag_id=dag_run.dag_id,
<del> task_id="task_id_1",
<del> execution_date=dag_run.logical_date,
<add> dag_id=task_instance.dag_id,
<add> task_id=task_instance.task_id,
<add> execution_date=task_instance.execution_date,
<ide> session=session,
<ide> )
<ide> assert stored_value == {"key": "value"}
<ide>
<ide> @pytest.fixture()
<del> def dag_runs_for_xcom_get_one_from_prior_date(self, dag_run_factory, push_simple_json_xcom):
<add> def tis_for_xcom_get_one_from_prior_date(self, task_instance_factory, push_simple_json_xcom):
<ide> date1 = timezone.datetime(2021, 12, 3, 4, 56)
<del> dr1 = dag_run_factory(dag_id="dag", execution_date=date1)
<del> dr2 = dag_run_factory(dag_id="dag", execution_date=date1 + datetime.timedelta(days=1))
<add> ti1 = task_instance_factory(dag_id="dag", execution_date=date1, task_id="task_1")
<add> ti2 = task_instance_factory(
<add> dag_id="dag",
<add> execution_date=date1 + datetime.timedelta(days=1),
<add> task_id="task_1",
<add> )
<ide>
<ide> # The earlier run pushes an XCom, but not the later run, but the later
<ide> # run can get this earlier XCom with ``include_prior_dates``.
<del> push_simple_json_xcom(dag_run=dr1, task_id="task_1", key="xcom_1", value={"key": "value"})
<add> push_simple_json_xcom(ti=ti1, key="xcom_1", value={"key": "value"})
<ide>
<del> return dr1, dr2
<add> return ti1, ti2
<ide>
<del> def test_xcom_get_one_from_prior_date(self, session, dag_runs_for_xcom_get_one_from_prior_date):
<del> _, dr2 = dag_runs_for_xcom_get_one_from_prior_date
<add> def test_xcom_get_one_from_prior_date(self, session, tis_for_xcom_get_one_from_prior_date):
<add> _, ti2 = tis_for_xcom_get_one_from_prior_date
<ide> retrieved_value = XCom.get_one(
<del> run_id=dr2.run_id,
<add> run_id=ti2.run_id,
<ide> key="xcom_1",
<ide> task_id="task_1",
<ide> dag_id="dag",
<ide> def test_xcom_get_one_from_prior_date(self, session, dag_runs_for_xcom_get_one_f
<ide> def test_xcom_get_one_from_prior_with_execution_date(
<ide> self,
<ide> session,
<del> dag_runs_for_xcom_get_one_from_prior_date,
<add> tis_for_xcom_get_one_from_prior_date,
<ide> ):
<del> _, dr2 = dag_runs_for_xcom_get_one_from_prior_date
<add> _, ti2 = tis_for_xcom_get_one_from_prior_date
<ide> with pytest.deprecated_call():
<ide> retrieved_value = XCom.get_one(
<del> execution_date=dr2.execution_date,
<add> execution_date=ti2.execution_date,
<ide> key="xcom_1",
<ide> task_id="task_1",
<ide> dag_id="dag",
<ide> def test_xcom_get_one_from_prior_with_execution_date(
<ide> assert retrieved_value == {"key": "value"}
<ide>
<ide> @pytest.fixture()
<del> def setup_for_xcom_get_many_single_argument_value(self, dag_run, push_simple_json_xcom):
<del> push_simple_json_xcom(dag_run=dag_run, task_id="task_id_1", key="xcom_1", value={"key": "value"})
<add> def setup_for_xcom_get_many_single_argument_value(self, task_instance, push_simple_json_xcom):
<add> push_simple_json_xcom(ti=task_instance, key="xcom_1", value={"key": "value"})
<ide>
<ide> @pytest.mark.usefixtures("setup_for_xcom_get_many_single_argument_value")
<del> def test_xcom_get_many_single_argument_value(self, session, dag_run):
<add> def test_xcom_get_many_single_argument_value(self, session, task_instance):
<ide> stored_xcoms = XCom.get_many(
<ide> key="xcom_1",
<del> dag_ids=dag_run.dag_id,
<del> task_ids="task_id_1",
<del> run_id=dag_run.run_id,
<add> dag_ids=task_instance.dag_id,
<add> task_ids=task_instance.task_id,
<add> run_id=task_instance.run_id,
<ide> session=session,
<ide> ).all()
<ide> assert len(stored_xcoms) == 1
<ide> assert stored_xcoms[0].key == "xcom_1"
<ide> assert stored_xcoms[0].value == {"key": "value"}
<ide>
<ide> @pytest.mark.usefixtures("setup_for_xcom_get_many_single_argument_value")
<del> def test_xcom_get_many_single_argument_value_with_execution_date(self, session, dag_run):
<add> def test_xcom_get_many_single_argument_value_with_execution_date(self, session, task_instance):
<ide> with pytest.deprecated_call():
<ide> stored_xcoms = XCom.get_many(
<del> execution_date=dag_run.logical_date,
<add> execution_date=task_instance.execution_date,
<ide> key="xcom_1",
<del> dag_ids=dag_run.dag_id,
<del> task_ids="task_id_1",
<add> dag_ids=task_instance.dag_id,
<add> task_ids=task_instance.task_id,
<ide> session=session,
<ide> ).all()
<ide> assert len(stored_xcoms) == 1
<ide> assert stored_xcoms[0].key == "xcom_1"
<ide> assert stored_xcoms[0].value == {"key": "value"}
<ide>
<ide> @pytest.fixture()
<del> def setup_for_xcom_get_many_multiple_tasks(self, dag_run, push_simple_json_xcom):
<del> push_simple_json_xcom(dag_run=dag_run, key="xcom_1", value={"key1": "value1"}, task_id="task_id_1")
<del> push_simple_json_xcom(dag_run=dag_run, key="xcom_1", value={"key2": "value2"}, task_id="task_id_2")
<add> def setup_for_xcom_get_many_multiple_tasks(self, task_instances, push_simple_json_xcom):
<add> ti1, ti2 = task_instances
<add> push_simple_json_xcom(ti=ti1, key="xcom_1", value={"key1": "value1"})
<add> push_simple_json_xcom(ti=ti2, key="xcom_1", value={"key2": "value2"})
<ide>
<ide> @pytest.mark.usefixtures("setup_for_xcom_get_many_multiple_tasks")
<del> def test_xcom_get_many_multiple_tasks(self, session, dag_run):
<add> def test_xcom_get_many_multiple_tasks(self, session, task_instance):
<ide> stored_xcoms = XCom.get_many(
<ide> key="xcom_1",
<del> dag_ids=dag_run.dag_id,
<del> task_ids=["task_id_1", "task_id_2"],
<del> run_id=dag_run.run_id,
<add> dag_ids=task_instance.dag_id,
<add> task_ids=["task_1", "task_2"],
<add> run_id=task_instance.run_id,
<ide> session=session,
<ide> )
<ide> sorted_values = [x.value for x in sorted(stored_xcoms, key=operator.attrgetter("task_id"))]
<ide> assert sorted_values == [{"key1": "value1"}, {"key2": "value2"}]
<ide>
<ide> @pytest.mark.usefixtures("setup_for_xcom_get_many_multiple_tasks")
<del> def test_xcom_get_many_multiple_tasks_with_execution_date(self, session, dag_run):
<add> def test_xcom_get_many_multiple_tasks_with_execution_date(self, session, task_instance):
<ide> with pytest.deprecated_call():
<ide> stored_xcoms = XCom.get_many(
<del> execution_date=dag_run.logical_date,
<add> execution_date=task_instance.execution_date,
<ide> key="xcom_1",
<del> dag_ids=dag_run.dag_id,
<del> task_ids=["task_id_1", "task_id_2"],
<add> dag_ids=task_instance.dag_id,
<add> task_ids=["task_1", "task_2"],
<ide> session=session,
<ide> )
<ide> sorted_values = [x.value for x in sorted(stored_xcoms, key=operator.attrgetter("task_id"))]
<ide> assert sorted_values == [{"key1": "value1"}, {"key2": "value2"}]
<ide>
<ide> @pytest.fixture()
<del> def dag_runs_for_xcom_get_many_from_prior_dates(self, dag_run_factory, push_simple_json_xcom):
<add> def tis_for_xcom_get_many_from_prior_dates(self, task_instance_factory, push_simple_json_xcom):
<ide> date1 = timezone.datetime(2021, 12, 3, 4, 56)
<ide> date2 = date1 + datetime.timedelta(days=1)
<del> dr1 = dag_run_factory(dag_id="dag", execution_date=date1)
<del> dr2 = dag_run_factory(dag_id="dag", execution_date=date2)
<del> push_simple_json_xcom(dag_run=dr1, task_id="task_1", key="xcom_1", value={"key1": "value1"})
<del> push_simple_json_xcom(dag_run=dr2, task_id="task_1", key="xcom_1", value={"key2": "value2"})
<del> return dr1, dr2
<del>
<del> def test_xcom_get_many_from_prior_dates(self, session, dag_runs_for_xcom_get_many_from_prior_dates):
<del> dr1, dr2 = dag_runs_for_xcom_get_many_from_prior_dates
<add> ti1 = task_instance_factory(dag_id="dag", task_id="task_1", execution_date=date1)
<add> ti2 = task_instance_factory(dag_id="dag", task_id="task_1", execution_date=date2)
<add> push_simple_json_xcom(ti=ti1, key="xcom_1", value={"key1": "value1"})
<add> push_simple_json_xcom(ti=ti2, key="xcom_1", value={"key2": "value2"})
<add> return ti1, ti2
<add>
<add> def test_xcom_get_many_from_prior_dates(self, session, tis_for_xcom_get_many_from_prior_dates):
<add> ti1, ti2 = tis_for_xcom_get_many_from_prior_dates
<ide> stored_xcoms = XCom.get_many(
<del> run_id=dr2.run_id,
<add> run_id=ti2.run_id,
<ide> key="xcom_1",
<ide> dag_ids="dag",
<ide> task_ids="task_1",
<ide> def test_xcom_get_many_from_prior_dates(self, session, dag_runs_for_xcom_get_man
<ide>
<ide> # The retrieved XComs should be ordered by logical date, latest first.
<ide> assert [x.value for x in stored_xcoms] == [{"key2": "value2"}, {"key1": "value1"}]
<del> assert [x.execution_date for x in stored_xcoms] == [dr2.logical_date, dr1.logical_date]
<add> assert [x.execution_date for x in stored_xcoms] == [ti2.execution_date, ti1.execution_date]
<ide>
<ide> def test_xcom_get_many_from_prior_dates_with_execution_date(
<ide> self,
<ide> session,
<del> dag_runs_for_xcom_get_many_from_prior_dates,
<add> tis_for_xcom_get_many_from_prior_dates,
<ide> ):
<del> dr1, dr2 = dag_runs_for_xcom_get_many_from_prior_dates
<add> ti1, ti2 = tis_for_xcom_get_many_from_prior_dates
<ide> with pytest.deprecated_call():
<ide> stored_xcoms = XCom.get_many(
<del> execution_date=dr2.execution_date,
<add> execution_date=ti2.execution_date,
<ide> key="xcom_1",
<ide> dag_ids="dag",
<ide> task_ids="task_1",
<ide> def test_xcom_get_many_from_prior_dates_with_execution_date(
<ide>
<ide> # The retrieved XComs should be ordered by logical date, latest first.
<ide> assert [x.value for x in stored_xcoms] == [{"key2": "value2"}, {"key1": "value1"}]
<del> assert [x.execution_date for x in stored_xcoms] == [dr2.logical_date, dr1.logical_date]
<add> assert [x.execution_date for x in stored_xcoms] == [ti2.execution_date, ti1.execution_date]
<ide>
<ide>
<ide> @pytest.mark.usefixtures("setup_xcom_pickling")
<ide> class TestXComSet:
<del> def test_xcom_set(self, session, dag_run):
<add> def test_xcom_set(self, session, task_instance):
<ide> XCom.set(
<ide> key="xcom_1",
<ide> value={"key": "value"},
<del> dag_id=dag_run.dag_id,
<del> task_id="task_1",
<del> run_id=dag_run.run_id,
<add> dag_id=task_instance.dag_id,
<add> task_id=task_instance.task_id,
<add> run_id=task_instance.run_id,
<ide> session=session,
<ide> )
<ide> stored_xcoms = session.query(XCom).all()
<ide> assert stored_xcoms[0].key == "xcom_1"
<ide> assert stored_xcoms[0].value == {"key": "value"}
<ide> assert stored_xcoms[0].dag_id == "dag"
<ide> assert stored_xcoms[0].task_id == "task_1"
<del> assert stored_xcoms[0].execution_date == dag_run.logical_date
<add> assert stored_xcoms[0].execution_date == task_instance.execution_date
<ide>
<del> def test_xcom_set_with_execution_date(self, session, dag_run):
<add> def test_xcom_set_with_execution_date(self, session, task_instance):
<ide> with pytest.deprecated_call():
<ide> XCom.set(
<ide> key="xcom_1",
<ide> value={"key": "value"},
<del> dag_id=dag_run.dag_id,
<del> task_id="task_1",
<del> execution_date=dag_run.execution_date,
<add> dag_id=task_instance.dag_id,
<add> task_id=task_instance.task_id,
<add> execution_date=task_instance.execution_date,
<ide> session=session,
<ide> )
<ide> stored_xcoms = session.query(XCom).all()
<ide> assert stored_xcoms[0].key == "xcom_1"
<ide> assert stored_xcoms[0].value == {"key": "value"}
<ide> assert stored_xcoms[0].dag_id == "dag"
<ide> assert stored_xcoms[0].task_id == "task_1"
<del> assert stored_xcoms[0].execution_date == dag_run.logical_date
<add> assert stored_xcoms[0].execution_date == task_instance.execution_date
<ide>
<ide> @pytest.fixture()
<del> def setup_for_xcom_set_again_replace(self, dag_run, push_simple_json_xcom):
<del> push_simple_json_xcom(dag_run=dag_run, task_id="task_1", key="xcom_1", value={"key1": "value1"})
<add> def setup_for_xcom_set_again_replace(self, task_instance, push_simple_json_xcom):
<add> push_simple_json_xcom(ti=task_instance, key="xcom_1", value={"key1": "value1"})
<ide>
<ide> @pytest.mark.usefixtures("setup_for_xcom_set_again_replace")
<del> def test_xcom_set_again_replace(self, session, dag_run):
<add> def test_xcom_set_again_replace(self, session, task_instance):
<ide> assert session.query(XCom).one().value == {"key1": "value1"}
<ide> XCom.set(
<ide> key="xcom_1",
<ide> value={"key2": "value2"},
<del> dag_id=dag_run.dag_id,
<add> dag_id=task_instance.dag_id,
<ide> task_id="task_1",
<del> run_id=dag_run.run_id,
<add> run_id=task_instance.run_id,
<ide> session=session,
<ide> )
<ide> assert session.query(XCom).one().value == {"key2": "value2"}
<ide>
<ide> @pytest.mark.usefixtures("setup_for_xcom_set_again_replace")
<del> def test_xcom_set_again_replace_with_execution_date(self, session, dag_run):
<add> def test_xcom_set_again_replace_with_execution_date(self, session, task_instance):
<ide> assert session.query(XCom).one().value == {"key1": "value1"}
<ide> with pytest.deprecated_call():
<ide> XCom.set(
<ide> key="xcom_1",
<ide> value={"key2": "value2"},
<del> dag_id=dag_run.dag_id,
<add> dag_id=task_instance.dag_id,
<ide> task_id="task_1",
<del> execution_date=dag_run.logical_date,
<add> execution_date=task_instance.execution_date,
<ide> session=session,
<ide> )
<ide> assert session.query(XCom).one().value == {"key2": "value2"}
<ide> def test_xcom_set_again_replace_with_execution_date(self, session, dag_run):
<ide> @pytest.mark.usefixtures("setup_xcom_pickling")
<ide> class TestXComClear:
<ide> @pytest.fixture()
<del> def setup_for_xcom_clear(self, dag_run, push_simple_json_xcom):
<del> push_simple_json_xcom(dag_run=dag_run, task_id="task_1", key="xcom_1", value={"key": "value"})
<add> def setup_for_xcom_clear(self, task_instance, push_simple_json_xcom):
<add> push_simple_json_xcom(ti=task_instance, key="xcom_1", value={"key": "value"})
<ide>
<ide> @pytest.mark.usefixtures("setup_for_xcom_clear")
<del> def test_xcom_clear(self, session, dag_run):
<add> def test_xcom_clear(self, session, task_instance):
<ide> assert session.query(XCom).count() == 1
<ide> XCom.clear(
<del> dag_id=dag_run.dag_id,
<del> task_id="task_1",
<del> run_id=dag_run.run_id,
<add> dag_id=task_instance.dag_id,
<add> task_id=task_instance.task_id,
<add> run_id=task_instance.run_id,
<ide> session=session,
<ide> )
<ide> assert session.query(XCom).count() == 0
<ide>
<ide> @pytest.mark.usefixtures("setup_for_xcom_clear")
<del> def test_xcom_clear_with_execution_date(self, session, dag_run):
<add> def test_xcom_clear_with_execution_date(self, session, task_instance):
<ide> assert session.query(XCom).count() == 1
<ide> with pytest.deprecated_call():
<ide> XCom.clear(
<del> dag_id=dag_run.dag_id,
<del> task_id="task_1",
<del> execution_date=dag_run.execution_date,
<add> dag_id=task_instance.dag_id,
<add> task_id=task_instance.task_id,
<add> execution_date=task_instance.execution_date,
<ide> session=session,
<ide> )
<ide> assert session.query(XCom).count() == 0
<ide>
<ide> @pytest.mark.usefixtures("setup_for_xcom_clear")
<del> def test_xcom_clear_different_run(self, session, dag_run):
<add> def test_xcom_clear_different_run(self, session, task_instance):
<ide> XCom.clear(
<del> dag_id=dag_run.dag_id,
<del> task_id="task_1",
<add> dag_id=task_instance.dag_id,
<add> task_id=task_instance.task_id,
<ide> run_id="different_run",
<ide> session=session,
<ide> )
<ide> assert session.query(XCom).count() == 1
<ide>
<ide> @pytest.mark.usefixtures("setup_for_xcom_clear")
<del> def test_xcom_clear_different_execution_date(self, session, dag_run):
<add> def test_xcom_clear_different_execution_date(self, session, task_instance):
<ide> with pytest.deprecated_call():
<ide> XCom.clear(
<del> dag_id=dag_run.dag_id,
<del> task_id="task_1",
<add> dag_id=task_instance.dag_id,
<add> task_id=task_instance.task_id,
<ide> execution_date=timezone.utcnow(),
<ide> session=session,
<ide> )
<ide><path>tests/providers/cncf/kubernetes/operators/test_kubernetes_pod.py
<ide> def setup(self, dag_maker):
<ide> self.await_pod_completion_patch.stop()
<ide> self.client_patch.stop()
<ide>
<del> def run_pod(self, operator, map_index: int = -1) -> k8s.V1Pod:
<add> def run_pod(self, operator: KubernetesPodOperator, map_index: int = -1) -> k8s.V1Pod:
<ide> with self.dag_maker(dag_id='dag') as dag:
<ide> operator.dag = dag
<ide>
<ide> def run_pod(self, operator, map_index: int = -1) -> k8s.V1Pod:
<ide> ti.map_index = map_index
<ide> self.dag_run = dr
<ide> context = ti.get_template_context(session=self.dag_maker.session)
<add> self.dag_maker.session.commit() # So 'execute' can read dr and ti.
<ide>
<ide> remote_pod_mock = MagicMock()
<ide> remote_pod_mock.status.phase = 'Succeeded' | 8 |
Javascript | Javascript | move the case 'latin1' | fc3105511793863ef3ab15ed779f89249bf20204 | <ide><path>lib/internal/util.js
<ide> exports.normalizeEncoding = function normalizeEncoding(enc) {
<ide> case 'utf-8':
<ide> return 'utf8';
<ide> case 'ucs2':
<del> case 'utf16le':
<ide> case 'ucs-2':
<add> case 'utf16le':
<ide> case 'utf-16le':
<ide> return 'utf16le';
<add> case 'latin1':
<ide> case 'binary':
<ide> return 'latin1';
<ide> case 'base64':
<ide> case 'ascii':
<del> case 'latin1':
<ide> case 'hex':
<ide> return enc;
<ide> default: | 1 |
Javascript | Javascript | add missing space in landing page [ci skip] | 9cee3f702a429c5a6f69cb134c1ee197653bc830 | <ide><path>website/src/widgets/landing.js
<ide> const Landing = ({ data }) => {
<ide> <H2>Benchmarks</H2>
<ide> <p>
<ide> In 2015, independent researchers from Emory University and Yahoo! Labs
<del> showed that spaCy offered the
<add> showed that spaCy offered the{' '}
<ide> <strong>fastest syntactic parser in the world</strong> and that its accuracy
<ide> was <strong>within 1% of the best</strong> available (
<ide> <Link to="https://aclweb.org/anthology/P/P15/P15-1038.pdf"> | 1 |
PHP | PHP | add additional entropy to postlink() | df8f5e8f84e1c37cc9d039c2971ef1bfa6ef3496 | <ide><path>lib/Cake/View/Helper/FormHelper.php
<ide> public function postLink($title, $url = null, $options = array(), $confirmMessag
<ide> unset($options['confirm']);
<ide> }
<ide>
<del> $formName = uniqid('post_');
<add> $formName = str_replace('.', '', uniqid('post_', true));
<ide> $formUrl = $this->url($url);
<ide> $formOptions = array(
<ide> 'name' => $formName, | 1 |
Python | Python | add ssl support for smtp | 1d40d2b55663a3cadad446a146cef4ff588f7472 | <ide><path>airflow/configuration.py
<ide> class AirflowConfigException(Exception):
<ide> },
<ide> 'smtp': {
<ide> 'smtp_starttls': True,
<add> 'smtp_ssl': False,
<ide> 'smtp_user': '',
<ide> 'smtp_password': '',
<ide> },
<ide> class AirflowConfigException(Exception):
<ide> # server here
<ide> smtp_host = localhost
<ide> smtp_starttls = True
<add>smtp_ssl = False
<ide> smtp_user = airflow
<ide> smtp_port = 25
<ide> smtp_password = airflow
<ide><path>airflow/utils.py
<ide> def send_MIME_email(e_from, e_to, mime_msg, dryrun=False):
<ide> SMTP_USER = configuration.get('smtp', 'SMTP_USER')
<ide> SMTP_PASSWORD = configuration.get('smtp', 'SMTP_PASSWORD')
<ide> SMTP_STARTTLS = configuration.getboolean('smtp', 'SMTP_STARTTLS')
<add> SMTP_SSL = configuration.getboolean('smtp', 'SMTP_SSL')
<ide>
<ide> if not dryrun:
<del> s = smtplib.SMTP(SMTP_HOST, SMTP_PORT)
<add> s = smtplib.SMTP_SSL(SMTP_HOST, SMTP_PORT) if SMTP_SSL else smtplib.SMTP(SMTP_HOST, SMTP_PORT)
<ide> if SMTP_STARTTLS:
<ide> s.starttls()
<ide> if SMTP_USER and SMTP_PASSWORD: | 2 |
Javascript | Javascript | use non-breaking space in ru_ru example | 45502ac0ec6bd6a1bb43cf0cfddaf3e6c1674fa1 | <ide><path>src/locale/ru-RU.js
<ide> import "locale";
<ide>
<ide> d3.locale.ru_RU = d3.locale({
<ide> decimal: ",",
<del> thousands: " ",
<add> thousands: "\xa0",
<ide> grouping: [3, 3],
<ide> currency: ["", " руб."],
<ide> dateTime: "%A, %e %B %Y г. %X",
<ide><path>test/locale/locale-test.js
<ide> suite.addBatch({
<ide> },
<ide> "formats numbers": function(format) {
<ide> var f = format(",.2f");
<del> assert.equal(f(12345.67), "12 345,67");
<add> assert.equal(f(12345.67), "12 345,67");
<ide> },
<ide> "formats currencies": function(format) {
<ide> var f = format("$,.2f");
<del> assert.equal(f(12345.67), "12 345,67 руб.");
<add> assert.equal(f(12345.67), "12 345,67 руб.");
<ide> }
<ide> },
<ide> | 2 |
Python | Python | revise imdb conv1d example | 6899a9db16e04505b8a17770e7fb068a3c586347 | <ide><path>examples/imdb_conv.py
<ide> from keras.layers.convolutional import Convolution1D, MaxPooling1D
<ide> from keras.datasets import imdb
<ide>
<add>'''
<add> This example demonstrates the use of Convolution1D
<add> for text classification.
<add>
<add> Run on GPU: THEANO_FLAGS=mode=FAST_RUN,device=gpu,floatX=float32 python imdb_cnn.py
<add>
<add> Get to 0.8330 test accuracy after 3 epochs. 100s/epoch on K520 GPU.
<add>'''
<add>
<ide> # set parameters:
<ide> max_features = 5000
<ide> maxlen = 100
<del>batch_size = 10
<add>batch_size = 32
<ide> embedding_dims = 100
<ide> nb_filters = 250
<ide> filter_length = 3
<ide> hidden_dims = 250
<del>nb_epochs = 10
<add>nb_epoch = 3
<ide>
<ide> print("Loading data...")
<ide> (X_train, y_train), (X_test, y_test) = imdb.load_data(nb_words=max_features,
<ide> # we start off with an efficient embedding layer which maps
<ide> # our vocab indices into embedding_dims dimensions
<ide> model.add(Embedding(max_features, embedding_dims))
<add>model.add(Dropout(0.25))
<ide>
<ide> # we add a Convolution1D, which will learn nb_filters
<ide> # word group filters of size filter_length:
<ide> model.add(Flatten())
<ide>
<ide> # Computing the output shape of a conv layer can be tricky;
<del># for an good tutorial, see: http://cs231n.github.io/convolutional-networks/
<del>output_size = nb_filters * (((maxlen - filter_length)/1)+1)/2
<add># for a good tutorial, see: http://cs231n.github.io/convolutional-networks/
<add>output_size = nb_filters * (((maxlen - filter_length) / 1) + 1) / 2
<ide>
<ide> # We add a vanilla hidden layer:
<ide> model.add(Dense(output_size, hidden_dims))
<del>model.add(Dropout(0.5))
<add>model.add(Dropout(0.25))
<ide> model.add(Activation('relu'))
<ide>
<ide> # We project onto a single unit output layer, and squash it with a sigmoid:
<ide> model.add(Dense(hidden_dims, 1))
<ide> model.add(Activation('sigmoid'))
<ide>
<del># we use some mild clipping to protect our gradients from vanishing:
<del>rms = RMSprop(clipnorm=0.1)
<del>model.compile(loss='binary_crossentropy', optimizer=rms, class_mode="binary")
<del>
<del># The performance of this toy model seems comparable to that of the LSTM in imdb_lstm.py,
<del># reaching an acceptable 83.16% on the test data at epoch 10.
<del># One epoch took around 268 seconds on a Quadro K600.
<del>print("Train...")
<del>for e in range(nb_epochs):
<del> print(">>> Epoch ", e+1)
<del> model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=1, show_accuracy=True)
<del> score, acc = model.evaluate(X_test, y_test, batch_size=batch_size, show_accuracy=True)
<del> print('\t- Test loss:', score)
<del> print('\t- Test accuracy:', acc)
<add>model.compile(loss='binary_crossentropy', optimizer='rmsprop', class_mode="binary")
<add>model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, validation_data=(X_test, y_test)) | 1 |
Python | Python | fix bart tests on gpu | 4bf5042240d33286460b83f3dbf9be77500faab3 | <ide><path>src/transformers/modeling_bart.py
<ide> def _resize_final_logits_bias(self, new_num_tokens: int, old_num_tokens: int) ->
<ide> if new_num_tokens <= old_num_tokens:
<ide> new_bias = self.final_logits_bias[:, :new_num_tokens]
<ide> else:
<del> extra_bias = torch.zeros((1, new_num_tokens - old_num_tokens))
<add> extra_bias = torch.zeros((1, new_num_tokens - old_num_tokens), device=self.final_logits_bias.device)
<ide> new_bias = torch.cat([self.final_logits_bias, extra_bias], dim=1)
<ide> self.register_buffer("final_logits_bias", new_bias)
<ide>
<ide><path>tests/test_modeling_bart.py
<ide> def test_positional_emb_weights_against_marian(self):
<ide> # test that forward pass is just a lookup, there is no ignore padding logic
<ide> input_ids = torch.tensor([[4, 10, pad, pad, pad]], dtype=torch.long, device=torch_device)
<ide> no_cache_pad_zero = emb1(input_ids)
<del> self.assertTrue(torch.allclose(torch.Tensor(self.desired_weights), no_cache_pad_zero[:3, :5], atol=1e-3))
<add> self.assertTrue(
<add> torch.allclose(
<add> torch.tensor(self.desired_weights, device=torch_device), no_cache_pad_zero[:3, :5], atol=1e-3
<add> )
<add> ) | 2 |
Javascript | Javascript | apply the review comment to error message | 5f997d3169f1b0d270eea10da9d1cf98ddfc1bcc | <ide><path>examples/js/loaders/GLTFLoader.js
<ide> THREE.GLTFLoader = ( function () {
<ide>
<ide> loader.load( resolveURL( bufferDef.uri, options.path ), resolve, undefined, function () {
<ide>
<del> reject( new Error( 'THREE.GLTFLoader: Failed to load Buffer "' + bufferDef.uri + '".' ) );
<add> reject( new Error( 'THREE.GLTFLoader: Failed to load buffer "' + bufferDef.uri + '".' ) );
<ide>
<ide> } );
<ide> | 1 |
Ruby | Ruby | remove warnings from findermethods | 424b2019830ea4c08e86ba9ff9600aa23a54cb4f | <ide><path>activerecord/lib/active_record/relation/finder_methods.rb
<ide> def forty_two!
<ide> # Person.offset(3).third_to_last # returns the third-to-last object from OFFSET 3
<ide> # Person.where(["user_name = :u", { u: user_name }]).third_to_last
<ide> def third_to_last
<del> find_nth -3
<add> find_nth(-3)
<ide> end
<ide>
<ide> # Same as #third_to_last but raises ActiveRecord::RecordNotFound if no record
<ide> # is found.
<ide> def third_to_last!
<del> find_nth! -3
<add> find_nth!(-3)
<ide> end
<ide>
<ide> # Find the second-to-last record.
<ide> def third_to_last!
<ide> # Person.offset(3).second_to_last # returns the second-to-last object from OFFSET 3
<ide> # Person.where(["user_name = :u", { u: user_name }]).second_to_last
<ide> def second_to_last
<del> find_nth -2
<add> find_nth(-2)
<ide> end
<ide>
<ide> # Same as #second_to_last but raises ActiveRecord::RecordNotFound if no record
<ide> # is found.
<ide> def second_to_last!
<del> find_nth! -2
<add> find_nth!(-2)
<ide> end
<ide>
<ide> # Returns true if a record exists in the table that matches the +id+ or | 1 |
Go | Go | remove symlinks on layer removal for overlay2 | e6f2e7646c813e8225b0bc16d3a0c13c76a9cd97 | <ide><path>daemon/graphdriver/overlay2/overlay.go
<ide> func (d *Driver) getLowerDirs(id string) ([]string, error) {
<ide>
<ide> // Remove cleans the directories that are created for this id.
<ide> func (d *Driver) Remove(id string) error {
<del> if err := os.RemoveAll(d.dir(id)); err != nil && !os.IsNotExist(err) {
<add> dir := d.dir(id)
<add> lid, err := ioutil.ReadFile(path.Join(dir, "link"))
<add> if err == nil {
<add> if err := os.RemoveAll(path.Join(d.home, linkDir, string(lid))); err != nil {
<add> logrus.Debugf("Failed to remove link: %v", err)
<add> }
<add> }
<add>
<add> if err := os.RemoveAll(dir); err != nil && !os.IsNotExist(err) {
<ide> return err
<ide> }
<ide> return nil | 1 |
Text | Text | use dom for link element tests | 50e6afcc144c5cc59ee5c673a720a5f4761b1dc4 | <ide><path>curriculum/challenges/english/14-responsive-web-design-22/learn-css-transforms-by-building-a-penguin/619665c9abd72906f3ad30f9.md
<ide> assert.isAtLeast(title?.textContent?.length, 1);
<ide> Your code should have a `link` element.
<ide>
<ide> ```js
<del>assert(/<link/.test(code))
<add>assert.exists(document.querySelector('link'));
<ide> ```
<ide>
<ide> Your `link` element should be within your `head` element.
<ide>
<ide> ```js
<del>assert(code.match(/<head>[\w\W\s]*<link[\w\W\s]*\/>[\w\W\s]*<\/head>/i))
<add>assert(code.match(/<head>[\w\W\s]*<link[\w\W\s]*\/?>[\w\W\s]*<\/head>/i));
<ide> ```
<ide>
<ide> Your `link` element should have a `rel` attribute with the value `stylesheet`.
<ide>
<ide> ```js
<del>assert.match(code, /<link[\s\S]*?rel=('|"|`)stylesheet\1/)
<add>const link_element = document.querySelector('link');
<add>const rel = link_element.getAttribute("rel");
<add>assert.equal(rel, "stylesheet");
<ide> ```
<ide>
<ide> Your `link` element should have an `href` attribute with the value `styles.css`.
<ide>
<ide> ```js
<del>assert.match(code, /<link[\s\S]*?href=('|"|`)(\.\/)?styles\.css\1/)
<add>const link = document.querySelector('link');
<add>assert.equal(link.dataset.href, "styles.css");
<ide> ```
<ide>
<ide> # --seed-- | 1 |
Ruby | Ruby | pass explicit sort to handle apfs | 795c7170e3e8135557b91e477e48cde98a714643 | <ide><path>Library/Homebrew/cmd/leaves.rb
<ide> module Homebrew
<ide> module_function
<ide>
<ide> def leaves
<del> installed = Formula.installed
<add> installed = Formula.installed.sort
<ide> deps_of_installed = Set.new
<ide>
<ide> installed.each do |f| | 1 |
Go | Go | add container join priority option to endpoint | a93d08aef58ba8f4e57c9e03304bee256b34c699 | <ide><path>libnetwork/controller.go
<ide> import (
<ide> "github.com/docker/libnetwork/datastore"
<ide> "github.com/docker/libnetwork/driverapi"
<ide> "github.com/docker/libnetwork/hostdiscovery"
<del> "github.com/docker/libnetwork/sandbox"
<ide> "github.com/docker/libnetwork/types"
<ide> "github.com/docker/swarm/pkg/store"
<ide> )
<ide> type NetworkController interface {
<ide> // When the function returns true, the walk will stop.
<ide> type NetworkWalker func(nw Network) bool
<ide>
<del>type sandboxData struct {
<del> sandbox sandbox.Sandbox
<del> refCnt int
<del>}
<del>
<ide> type networkTable map[types.UUID]*network
<ide> type endpointTable map[types.UUID]*endpoint
<ide> type sandboxTable map[string]*sandboxData
<ide> func (c *controller) NetworkByID(id string) (Network, error) {
<ide> return nil, ErrNoSuchNetwork(id)
<ide> }
<ide>
<del>func (c *controller) sandboxAdd(key string, create bool) (sandbox.Sandbox, error) {
<del> c.Lock()
<del> defer c.Unlock()
<del>
<del> sData, ok := c.sandboxes[key]
<del> if !ok {
<del> sb, err := sandbox.NewSandbox(key, create)
<del> if err != nil {
<del> return nil, err
<del> }
<del>
<del> sData = &sandboxData{sandbox: sb, refCnt: 1}
<del> c.sandboxes[key] = sData
<del> return sData.sandbox, nil
<del> }
<del>
<del> sData.refCnt++
<del> return sData.sandbox, nil
<del>}
<del>
<del>func (c *controller) sandboxRm(key string) {
<del> c.Lock()
<del> defer c.Unlock()
<del>
<del> sData := c.sandboxes[key]
<del> sData.refCnt--
<del>
<del> if sData.refCnt == 0 {
<del> sData.sandbox.Destroy()
<del> delete(c.sandboxes, key)
<del> }
<del>}
<del>
<del>func (c *controller) sandboxGet(key string) sandbox.Sandbox {
<del> c.Lock()
<del> defer c.Unlock()
<del>
<del> sData, ok := c.sandboxes[key]
<del> if !ok {
<del> return nil
<del> }
<del>
<del> return sData.sandbox
<del>}
<del>
<ide> func (c *controller) loadDriver(networkType string) (driverapi.Driver, error) {
<ide> // Plugins pkg performs lazy loading of plugins that acts as remote drivers.
<ide> // As per the design, this Get call will result in remote driver discovery if there is a corresponding plugin available.
<ide><path>libnetwork/endpoint.go
<ide> import (
<ide> "path/filepath"
<ide> "sync"
<ide>
<del> "github.com/Sirupsen/logrus"
<ide> "github.com/docker/docker/pkg/ioutils"
<ide> "github.com/docker/libnetwork/etchosts"
<ide> "github.com/docker/libnetwork/netlabel"
<ide> type containerConfig struct {
<ide> resolvConfPathConfig
<ide> generic map[string]interface{}
<ide> useDefaultSandBox bool
<add> prio int // higher the value, more the priority
<ide> }
<ide>
<ide> type extraHost struct {
<ide> type endpoint struct {
<ide> name string
<ide> id types.UUID
<ide> network *network
<del> sandboxInfo *sandbox.Info
<ide> iFaces []*endpointInterface
<ide> joinInfo *endpointJoinInfo
<ide> container *containerInfo
<ide> func (ep *endpoint) Join(containerID string, options ...EndpointOption) error {
<ide> container := ep.container
<ide> network := ep.network
<ide> epid := ep.id
<del> joinInfo := ep.joinInfo
<del> ifaces := ep.iFaces
<ide>
<ide> ep.Unlock()
<ide> defer func() {
<ide> func (ep *endpoint) Join(containerID string, options ...EndpointOption) error {
<ide> return err
<ide> }
<ide>
<del> sb, err := ctrlr.sandboxAdd(sboxKey, !container.config.useDefaultSandBox)
<add> sb, err := ctrlr.sandboxAdd(sboxKey, !container.config.useDefaultSandBox, ep)
<ide> if err != nil {
<ide> return err
<ide> }
<ide> defer func() {
<ide> if err != nil {
<del> ctrlr.sandboxRm(sboxKey)
<add> ctrlr.sandboxRm(sboxKey, ep)
<ide> }
<ide> }()
<ide>
<del> for _, i := range ifaces {
<del> iface := &sandbox.Interface{
<del> SrcName: i.srcName,
<del> DstName: i.dstPrefix,
<del> Address: &i.addr,
<del> Routes: i.routes,
<del> }
<del> if i.addrv6.IP.To16() != nil {
<del> iface.AddressIPv6 = &i.addrv6
<del> }
<del> err = sb.AddInterface(iface)
<del> if err != nil {
<del> return err
<del> }
<del> }
<del> // Set up non-interface routes.
<del> for _, r := range ep.joinInfo.StaticRoutes {
<del> err = sb.AddStaticRoute(r)
<del> if err != nil {
<del> return err
<del> }
<del> }
<del>
<del> err = sb.SetGateway(joinInfo.gw)
<del> if err != nil {
<del> return err
<del> }
<del>
<del> err = sb.SetGatewayIPv6(joinInfo.gw6)
<del> if err != nil {
<del> return err
<del> }
<del>
<ide> container.data.SandboxKey = sb.Key()
<ide>
<ide> return nil
<ide> func (ep *endpoint) Leave(containerID string, options ...EndpointOption) error {
<ide>
<ide> err = driver.Leave(n.id, ep.id)
<ide>
<del> sb := ctrlr.sandboxGet(container.data.SandboxKey)
<del> for _, i := range sb.Interfaces() {
<del> // Only remove the interfaces owned by this endpoint from the sandbox.
<del> if ep.hasInterface(i.SrcName) {
<del> err = sb.RemoveInterface(i)
<del> if err != nil {
<del> logrus.Debugf("Remove interface failed: %v", err)
<del> }
<del> }
<del> }
<del>
<del> // Remove non-interface routes.
<del> for _, r := range ep.joinInfo.StaticRoutes {
<del> err = sb.RemoveStaticRoute(r)
<del> if err != nil {
<del> logrus.Debugf("Remove route failed: %v", err)
<del> }
<del> }
<del>
<del> ctrlr.sandboxRm(container.data.SandboxKey)
<add> ctrlr.sandboxRm(container.data.SandboxKey, ep)
<ide>
<ide> return err
<ide> }
<ide> func EndpointOptionGeneric(generic map[string]interface{}) EndpointOption {
<ide> }
<ide> }
<ide>
<add>// JoinOptionPriority function returns an option setter for priority option to
<add>// be passed to endpoint Join method.
<add>func JoinOptionPriority(prio int) EndpointOption {
<add> return func(ep *endpoint) {
<add> ep.container.config.prio = prio
<add> }
<add>}
<add>
<ide> // JoinOptionHostname function returns an option setter for hostname option to
<ide> // be passed to endpoint Join method.
<ide> func JoinOptionHostname(name string) EndpointOption {
<ide><path>libnetwork/sandboxdata.go
<add>package libnetwork
<add>
<add>import (
<add> "container/heap"
<add> "sync"
<add>
<add> "github.com/Sirupsen/logrus"
<add> "github.com/docker/libnetwork/sandbox"
<add>)
<add>
<add>type epHeap []*endpoint
<add>
<add>type sandboxData struct {
<add> sbox sandbox.Sandbox
<add> refCnt int
<add> endpoints epHeap
<add> sync.Mutex
<add>}
<add>
<add>func (eh epHeap) Len() int { return len(eh) }
<add>
<add>func (eh epHeap) Less(i, j int) bool {
<add> eh[i].Lock()
<add> eh[j].Lock()
<add> defer eh[j].Unlock()
<add> defer eh[i].Unlock()
<add>
<add> if eh[i].container.config.prio == eh[j].container.config.prio {
<add> return eh[i].network.Name() < eh[j].network.Name()
<add> }
<add>
<add> return eh[i].container.config.prio > eh[j].container.config.prio
<add>}
<add>
<add>func (eh epHeap) Swap(i, j int) { eh[i], eh[j] = eh[j], eh[i] }
<add>
<add>func (eh *epHeap) Push(x interface{}) {
<add> *eh = append(*eh, x.(*endpoint))
<add>}
<add>
<add>func (eh *epHeap) Pop() interface{} {
<add> old := *eh
<add> n := len(old)
<add> x := old[n-1]
<add> *eh = old[0 : n-1]
<add> return x
<add>}
<add>
<add>func (s *sandboxData) updateGateway(ep *endpoint) error {
<add> sb := s.sandbox()
<add> if err := sb.UnsetGateway(); err != nil {
<add> return err
<add> }
<add>
<add> if err := sb.UnsetGatewayIPv6(); err != nil {
<add> return err
<add> }
<add>
<add> if ep == nil {
<add> return nil
<add> }
<add>
<add> ep.Lock()
<add> joinInfo := ep.joinInfo
<add> ep.Unlock()
<add>
<add> if err := sb.SetGateway(joinInfo.gw); err != nil {
<add> return err
<add> }
<add>
<add> if err := sb.SetGatewayIPv6(joinInfo.gw6); err != nil {
<add> return err
<add> }
<add>
<add> return nil
<add>}
<add>
<add>func (s *sandboxData) addEndpoint(ep *endpoint) error {
<add> ep.Lock()
<add> joinInfo := ep.joinInfo
<add> ifaces := ep.iFaces
<add> ep.Unlock()
<add>
<add> sb := s.sandbox()
<add> for _, i := range ifaces {
<add> iface := &sandbox.Interface{
<add> SrcName: i.srcName,
<add> DstName: i.dstPrefix,
<add> Address: &i.addr,
<add> Routes: i.routes,
<add> }
<add> if i.addrv6.IP.To16() != nil {
<add> iface.AddressIPv6 = &i.addrv6
<add> }
<add>
<add> if err := sb.AddInterface(iface); err != nil {
<add> return err
<add> }
<add> }
<add>
<add> if joinInfo != nil {
<add> // Set up non-interface routes.
<add> for _, r := range ep.joinInfo.StaticRoutes {
<add> if err := sb.AddStaticRoute(r); err != nil {
<add> return err
<add> }
<add> }
<add> }
<add>
<add> s.Lock()
<add> heap.Push(&s.endpoints, ep)
<add> highEp := s.endpoints[0]
<add> s.Unlock()
<add>
<add> if ep == highEp {
<add> if err := s.updateGateway(ep); err != nil {
<add> return err
<add> }
<add> }
<add>
<add> s.Lock()
<add> s.refCnt++
<add> s.Unlock()
<add>
<add> return nil
<add>}
<add>
<add>func (s *sandboxData) rmEndpoint(ep *endpoint) int {
<add> ep.Lock()
<add> joinInfo := ep.joinInfo
<add> ep.Unlock()
<add>
<add> sb := s.sandbox()
<add> for _, i := range sb.Interfaces() {
<add> // Only remove the interfaces owned by this endpoint from the sandbox.
<add> if ep.hasInterface(i.SrcName) {
<add> if err := sb.RemoveInterface(i); err != nil {
<add> logrus.Debugf("Remove interface failed: %v", err)
<add> }
<add> }
<add> }
<add>
<add> // Remove non-interface routes.
<add> for _, r := range joinInfo.StaticRoutes {
<add> if err := sb.RemoveStaticRoute(r); err != nil {
<add> logrus.Debugf("Remove route failed: %v", err)
<add> }
<add> }
<add>
<add> // We don't check if s.endpoints is empty here because
<add> // it should never be empty during a rmEndpoint call and
<add> // if it is we will rightfully panic here
<add> s.Lock()
<add> highEpBefore := s.endpoints[0]
<add> var (
<add> i int
<add> e *endpoint
<add> )
<add> for i, e = range s.endpoints {
<add> if e == ep {
<add> break
<add> }
<add> }
<add> heap.Remove(&s.endpoints, i)
<add> var highEpAfter *endpoint
<add> if len(s.endpoints) > 0 {
<add> highEpAfter = s.endpoints[0]
<add> }
<add>
<add> s.Unlock()
<add>
<add> if highEpBefore != highEpAfter {
<add> s.updateGateway(highEpAfter)
<add> }
<add>
<add> s.Lock()
<add> s.refCnt--
<add> refCnt := s.refCnt
<add> s.Unlock()
<add>
<add> if refCnt == 0 {
<add> s.sandbox().Destroy()
<add> }
<add>
<add> return refCnt
<add>}
<add>
<add>func (s *sandboxData) sandbox() sandbox.Sandbox {
<add> s.Lock()
<add> defer s.Unlock()
<add>
<add> return s.sbox
<add>}
<add>
<add>func (c *controller) sandboxAdd(key string, create bool, ep *endpoint) (sandbox.Sandbox, error) {
<add> c.Lock()
<add> sData, ok := c.sandboxes[key]
<add> c.Unlock()
<add>
<add> if !ok {
<add> sb, err := sandbox.NewSandbox(key, create)
<add> if err != nil {
<add> return nil, err
<add> }
<add>
<add> sData = &sandboxData{
<add> sbox: sb,
<add> endpoints: epHeap{},
<add> }
<add>
<add> heap.Init(&sData.endpoints)
<add> c.Lock()
<add> c.sandboxes[key] = sData
<add> c.Unlock()
<add> }
<add>
<add> if err := sData.addEndpoint(ep); err != nil {
<add> return nil, err
<add> }
<add>
<add> return sData.sandbox(), nil
<add>}
<add>
<add>func (c *controller) sandboxRm(key string, ep *endpoint) {
<add> c.Lock()
<add> sData := c.sandboxes[key]
<add> c.Unlock()
<add>
<add> if sData.rmEndpoint(ep) == 0 {
<add> c.Lock()
<add> delete(c.sandboxes, key)
<add> c.Unlock()
<add> }
<add>}
<add>
<add>func (c *controller) sandboxGet(key string) sandbox.Sandbox {
<add> c.Lock()
<add> sData, ok := c.sandboxes[key]
<add> c.Unlock()
<add>
<add> if !ok {
<add> return nil
<add> }
<add>
<add> return sData.sandbox()
<add>}
<ide><path>libnetwork/sandboxdata_test.go
<add>package libnetwork
<add>
<add>import "testing"
<add>
<add>func createEmptyCtrlr() *controller {
<add> return &controller{sandboxes: sandboxTable{}}
<add>}
<add>
<add>func createEmptyEndpoint() *endpoint {
<add> return &endpoint{
<add> container: &containerInfo{},
<add> joinInfo: &endpointJoinInfo{},
<add> iFaces: []*endpointInterface{},
<add> }
<add>}
<add>
<add>func TestSandboxAddEmpty(t *testing.T) {
<add> ctrlr := createEmptyCtrlr()
<add> ep := createEmptyEndpoint()
<add>
<add> if _, err := ctrlr.sandboxAdd("sandbox1", true, ep); err != nil {
<add> t.Fatal(err)
<add> }
<add>
<add> if ctrlr.sandboxes["sandbox1"].refCnt != 1 {
<add> t.Fatalf("Unexpected sandbox ref count. Expected 1, got %d",
<add> ctrlr.sandboxes["sandbox1"].refCnt)
<add> }
<add>
<add> ctrlr.sandboxRm("sandbox1", ep)
<add> if len(ctrlr.sandboxes) != 0 {
<add> t.Fatalf("controller sandboxes is not empty. len = %d", len(ctrlr.sandboxes))
<add> }
<add>}
<add>
<add>func TestSandboxAddMultiPrio(t *testing.T) {
<add> ctrlr := createEmptyCtrlr()
<add> ep1 := createEmptyEndpoint()
<add> ep2 := createEmptyEndpoint()
<add> ep3 := createEmptyEndpoint()
<add>
<add> ep1.container.config.prio = 1
<add> ep2.container.config.prio = 2
<add> ep3.container.config.prio = 3
<add>
<add> if _, err := ctrlr.sandboxAdd("sandbox1", true, ep1); err != nil {
<add> t.Fatal(err)
<add> }
<add>
<add> if _, err := ctrlr.sandboxAdd("sandbox1", true, ep2); err != nil {
<add> t.Fatal(err)
<add> }
<add>
<add> if _, err := ctrlr.sandboxAdd("sandbox1", true, ep3); err != nil {
<add> t.Fatal(err)
<add> }
<add>
<add> if ctrlr.sandboxes["sandbox1"].refCnt != 3 {
<add> t.Fatalf("Unexpected sandbox ref count. Expected 3, got %d",
<add> ctrlr.sandboxes["sandbox1"].refCnt)
<add> }
<add>
<add> if ctrlr.sandboxes["sandbox1"].endpoints[0] != ep3 {
<add> t.Fatal("Expected ep3 to be at the top of the heap. But did not find ep3 at the top of the heap")
<add> }
<add>
<add> ctrlr.sandboxRm("sandbox1", ep3)
<add>
<add> if ctrlr.sandboxes["sandbox1"].endpoints[0] != ep2 {
<add> t.Fatal("Expected ep2 to be at the top of the heap after removing ep3. But did not find ep2 at the top of the heap")
<add> }
<add>
<add> ctrlr.sandboxRm("sandbox1", ep2)
<add>
<add> if ctrlr.sandboxes["sandbox1"].endpoints[0] != ep1 {
<add> t.Fatal("Expected ep1 to be at the top of the heap after removing ep2. But did not find ep1 at the top of the heap")
<add> }
<add>
<add> // Re-add ep3 back
<add> if _, err := ctrlr.sandboxAdd("sandbox1", true, ep3); err != nil {
<add> t.Fatal(err)
<add> }
<add>
<add> if ctrlr.sandboxes["sandbox1"].endpoints[0] != ep3 {
<add> t.Fatal("Expected ep3 to be at the top of the heap after adding ep3 back. But did not find ep3 at the top of the heap")
<add> }
<add>
<add> ctrlr.sandboxRm("sandbox1", ep3)
<add> ctrlr.sandboxRm("sandbox1", ep1)
<add> if len(ctrlr.sandboxes) != 0 {
<add> t.Fatalf("controller sandboxes is not empty. len = %d", len(ctrlr.sandboxes))
<add> }
<add>}
<add>
<add>func TestSandboxAddSamePrio(t *testing.T) {
<add> ctrlr := createEmptyCtrlr()
<add> ep1 := createEmptyEndpoint()
<add> ep2 := createEmptyEndpoint()
<add>
<add> ep1.network = &network{name: "aaa"}
<add> ep2.network = &network{name: "bbb"}
<add>
<add> if _, err := ctrlr.sandboxAdd("sandbox1", true, ep1); err != nil {
<add> t.Fatal(err)
<add> }
<add>
<add> if _, err := ctrlr.sandboxAdd("sandbox1", true, ep2); err != nil {
<add> t.Fatal(err)
<add> }
<add>
<add> if ctrlr.sandboxes["sandbox1"].refCnt != 2 {
<add> t.Fatalf("Unexpected sandbox ref count. Expected 2, got %d",
<add> ctrlr.sandboxes["sandbox1"].refCnt)
<add> }
<add>
<add> if ctrlr.sandboxes["sandbox1"].endpoints[0] != ep1 {
<add> t.Fatal("Expected ep1 to be at the top of the heap. But did not find ep1 at the top of the heap")
<add> }
<add>
<add> ctrlr.sandboxRm("sandbox1", ep1)
<add>
<add> if ctrlr.sandboxes["sandbox1"].endpoints[0] != ep2 {
<add> t.Fatal("Expected ep2 to be at the top of the heap after removing ep3. But did not find ep2 at the top of the heap")
<add> }
<add>
<add> ctrlr.sandboxRm("sandbox1", ep2)
<add> if len(ctrlr.sandboxes) != 0 {
<add> t.Fatalf("controller sandboxes is not empty. len = %d", len(ctrlr.sandboxes))
<add> }
<add>} | 4 |
Python | Python | change custom message for clarity | 3d25dadbf36930439f6cb5bfa81166c0ddcf0b38 | <ide><path>tests/test_permissions.py
<ide> def has_permission(self, request, view):
<ide>
<ide>
<ide> class BasicPermWithDetail(permissions.BasePermission):
<del> message = 'Custom: You cannot post to this resource'
<add> message = 'Custom: You cannot access this resource'
<ide>
<ide> def has_permission(self, request, view):
<ide> return False
<ide> def has_object_permission(self, request, view, obj):
<ide>
<ide>
<ide> class BasicObjectPermWithDetail(permissions.BasePermission):
<del> message = 'Custom: You cannot post to this resource'
<add> message = 'Custom: You cannot access this resource'
<ide>
<ide> def has_object_permission(self, request, view, obj):
<ide> return False
<ide> def setUp(self):
<ide> User.objects.create_user('username', 'username@example.com', 'password')
<ide> credentials = basic_auth_header('username', 'password')
<ide> self.request = factory.get('/1', format='json', HTTP_AUTHORIZATION=credentials)
<del> self.custom_message = 'Custom: You cannot post to this resource'
<add> self.custom_message = 'Custom: You cannot access this resource'
<ide>
<ide> def test_permission_denied(self):
<ide> response = denied_view(self.request, pk=1) | 1 |
Mixed | Javascript | enable running tests under workers | 229dca3dee552f448dc9026237625ed58e8acfdc | <ide><path>test/async-hooks/init-hooks.js
<ide> 'use strict';
<ide> // Flags: --expose-gc
<ide>
<del>require('../common');
<add>const common = require('../common');
<ide> const assert = require('assert');
<ide> const async_hooks = require('async_hooks');
<ide> const util = require('util');
<ide> class ActivityCollector {
<ide> const stub = { uid, type: 'Unknown', handleIsObject: true };
<ide> this._activities.set(uid, stub);
<ide> return stub;
<add> } else if (!common.isMainThread) {
<add> // Worker threads start main script execution inside of an AsyncWrap
<add> // callback, so we don't yield errors for these.
<add> return null;
<ide> } else {
<ide> const err = new Error(`Found a handle whose ${hook}` +
<ide> ' hook was invoked but not its init hook');
<ide><path>test/async-hooks/test-crypto-pbkdf2.js
<ide> const common = require('../common');
<ide> if (!common.hasCrypto)
<ide> common.skip('missing crypto');
<add>if (!common.isMainThread)
<add> common.skip('Worker bootstrapping works differently -> different async IDs');
<ide>
<ide> const assert = require('assert');
<ide> const tick = require('./tick');
<ide><path>test/async-hooks/test-crypto-randomBytes.js
<ide> const common = require('../common');
<ide> if (!common.hasCrypto)
<ide> common.skip('missing crypto');
<add>if (!common.isMainThread)
<add> common.skip('Worker bootstrapping works differently -> different async IDs');
<ide>
<ide> const assert = require('assert');
<ide> const tick = require('./tick');
<ide><path>test/async-hooks/test-emit-before-after.js
<ide> const spawnSync = require('child_process').spawnSync;
<ide> const async_hooks = require('internal/async_hooks');
<ide> const initHooks = require('./init-hooks');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('Worker bootstrapping works differently -> different async IDs');
<add>
<ide> switch (process.argv[2]) {
<ide> case 'test_invalid_async_id':
<ide> async_hooks.emitBefore(-2, 1);
<ide><path>test/async-hooks/test-enable-disable.js
<ide> const assert = require('assert');
<ide> const tick = require('./tick');
<ide> const initHooks = require('./init-hooks');
<ide> const { checkInvocations } = require('./hook-checks');
<add>
<add>if (!common.isMainThread)
<add> common.skip('Worker bootstrapping works differently -> different timing');
<add>
<ide> // Include "Unknown"s because hook2 will not be able to identify
<ide> // the type of the first Immediate since it will miss its `init` invocation.
<ide> const types = [ 'Immediate', 'Unknown' ];
<ide><path>test/async-hooks/test-fseventwrap.js
<ide> 'use strict';
<del>require('../common');
<add>const common = require('../common');
<ide>
<ide> const assert = require('assert');
<ide> const initHooks = require('./init-hooks');
<ide> const tick = require('./tick');
<ide> const { checkInvocations } = require('./hook-checks');
<ide> const fs = require('fs');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('Worker bootstrapping works differently -> different async IDs');
<add>
<ide> const hooks = initHooks();
<ide>
<ide> hooks.enable();
<ide><path>test/async-hooks/test-fsreqwrap-readFile.js
<ide> const initHooks = require('./init-hooks');
<ide> const { checkInvocations } = require('./hook-checks');
<ide> const fs = require('fs');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('Worker bootstrapping works differently -> different async IDs');
<add>
<ide> const hooks = initHooks();
<ide>
<ide> hooks.enable();
<ide><path>test/async-hooks/test-getaddrinforeqwrap.js
<ide> const initHooks = require('./init-hooks');
<ide> const { checkInvocations } = require('./hook-checks');
<ide> const dns = require('dns');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('Worker bootstrapping works differently -> different async IDs');
<add>
<ide> const hooks = initHooks();
<ide>
<ide> hooks.enable();
<ide><path>test/async-hooks/test-getnameinforeqwrap.js
<ide> const initHooks = require('./init-hooks');
<ide> const { checkInvocations } = require('./hook-checks');
<ide> const dns = require('dns');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('Worker bootstrapping works differently -> different async IDs');
<add>
<ide> const hooks = initHooks();
<ide>
<ide> hooks.enable();
<ide><path>test/async-hooks/test-graph.signal.js
<ide> 'use strict';
<ide>
<ide> const common = require('../common');
<del>if (common.isWindows) {
<add>if (common.isWindows)
<ide> common.skip('no signals on Windows');
<del>}
<add>if (!common.isMainThread)
<add> common.skip('No signal handling available in Workers');
<ide>
<ide> const initHooks = require('./init-hooks');
<ide> const verifyGraph = require('./verify-graph');
<ide><path>test/async-hooks/test-no-assert-when-disabled.js
<ide> 'use strict';
<ide> // Flags: --no-force-async-hooks-checks --expose-internals
<del>require('../common');
<add>const common = require('../common');
<add>
<add>if (!common.isMainThread)
<add> common.skip('Workers don\'t inherit per-env state like the check flag');
<ide>
<ide> const async_hooks = require('internal/async_hooks');
<ide>
<ide><path>test/async-hooks/test-pipewrap.js
<ide> const initHooks = require('./init-hooks');
<ide> const { checkInvocations } = require('./hook-checks');
<ide> const spawn = require('child_process').spawn;
<ide>
<add>if (!common.isMainThread)
<add> common.skip('Worker bootstrapping works differently -> different async IDs');
<add>
<ide> const hooks = initHooks();
<ide>
<ide> hooks.enable();
<ide><path>test/async-hooks/test-promise.chain-promise-before-init-hooks.js
<ide> const common = require('../common');
<ide> const assert = require('assert');
<ide> const initHooks = require('./init-hooks');
<ide> const { checkInvocations } = require('./hook-checks');
<add>
<add>if (!common.isMainThread)
<add> common.skip('Worker bootstrapping works differently -> different async IDs');
<add>
<ide> common.crashOnUnhandledRejection();
<ide>
<ide> const p = new Promise(common.mustCall(function executor(resolve, reject) {
<ide><path>test/async-hooks/test-promise.js
<ide> const assert = require('assert');
<ide> const initHooks = require('./init-hooks');
<ide> const { checkInvocations } = require('./hook-checks');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('Worker bootstrapping works differently -> different async IDs');
<add>
<ide> common.crashOnUnhandledRejection();
<ide>
<ide> const hooks = initHooks();
<ide><path>test/async-hooks/test-signalwrap.js
<ide> const common = require('../common');
<ide>
<ide> if (common.isWindows)
<ide> common.skip('no signals in Windows');
<add>if (!common.isMainThread)
<add> common.skip('No signal handling available in Workers');
<ide>
<ide> const assert = require('assert');
<ide> const initHooks = require('./init-hooks');
<ide><path>test/async-hooks/test-statwatcher.js
<ide> 'use strict';
<ide>
<del>require('../common');
<add>const common = require('../common');
<ide> const commonPath = require.resolve('../common');
<ide> const assert = require('assert');
<ide> const initHooks = require('./init-hooks');
<ide> const { checkInvocations } = require('./hook-checks');
<ide> const fs = require('fs');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('Worker bootstrapping works differently -> different async IDs');
<add>
<ide> const hooks = initHooks();
<ide> hooks.enable();
<ide>
<ide><path>test/common/index.js
<ide> Object.defineProperty(exports, 'PORT', {
<ide> enumerable: true
<ide> });
<ide>
<add>exports.isMainThread = (() => {
<add> try {
<add> return require('worker').isMainThread;
<add> } catch {
<add> // Worker module not enabled → only a single main thread exists.
<add> return true;
<add> }
<add>})();
<ide>
<ide> exports.isWindows = process.platform === 'win32';
<ide> exports.isWOW64 = exports.isWindows &&
<ide> exports.skipIfInspectorDisabled = function skipIfInspectorDisabled() {
<ide> if (process.config.variables.v8_enable_inspector === 0) {
<ide> exports.skip('V8 inspector is disabled');
<ide> }
<add> if (!exports.isMainThread) {
<add> // TODO(addaleax): Fix me.
<add> exports.skip('V8 inspector is not available in Workers');
<add> }
<ide> };
<ide>
<ide> exports.skipIf32Bits = function skipIf32Bits() {
<ide><path>test/known_issues/test-stdin-is-always-net.socket.js
<ide> const proc = spawn(
<ide> // To double-check this test, set stdio to 'pipe' and uncomment the line below.
<ide> // proc.stderr.pipe(process.stderr);
<ide> proc.on('exit', common.mustCall(function(exitCode) {
<del> process.exitCode = exitCode;
<add> assert.strictEqual(exitCode, 0);
<ide> }));
<ide><path>test/parallel/test-async-hooks-disable-during-promise.js
<ide> const common = require('../common');
<ide> const async_hooks = require('async_hooks');
<ide> common.crashOnUnhandledRejection();
<ide>
<add>if (!common.isMainThread)
<add> common.skip('Worker bootstrapping works differently -> different AsyncWraps');
<add>
<ide> const hook = async_hooks.createHook({
<ide> init: common.mustCall(2),
<ide> before: common.mustCall(1),
<ide><path>test/parallel/test-async-hooks-disable-gc-tracking.js
<ide> const common = require('../common');
<ide> const async_hooks = require('async_hooks');
<ide>
<ide> const hook = async_hooks.createHook({
<del> destroy: common.mustCall(1) // only 1 immediate is destroyed
<add> destroy: common.mustCallAtLeast(1) // only 1 immediate is destroyed
<ide> }).enable();
<ide>
<ide> new async_hooks.AsyncResource('foobar', { requireManualDestroy: true });
<ide><path>test/parallel/test-async-hooks-enable-during-promise.js
<ide> common.crashOnUnhandledRejection();
<ide> Promise.resolve(1).then(common.mustCall(() => {
<ide> async_hooks.createHook({
<ide> init: common.mustCall(),
<del> before: common.mustCall(),
<del> after: common.mustCall(2)
<add> before: common.mustCallAtLeast(),
<add> after: common.mustCallAtLeast(2)
<ide> }).enable();
<ide>
<ide> process.nextTick(common.mustCall());
<ide><path>test/parallel/test-async-hooks-prevent-double-destroy.js
<ide> const common = require('../common');
<ide> const async_hooks = require('async_hooks');
<ide>
<ide> const hook = async_hooks.createHook({
<del> destroy: common.mustCall(2) // 1 immediate + manual destroy
<add> destroy: common.mustCallAtLeast(2) // 1 immediate + manual destroy
<ide> }).enable();
<ide>
<ide> {
<ide><path>test/parallel/test-async-hooks-promise-triggerid.js
<ide> const common = require('../common');
<ide> const assert = require('assert');
<ide> const async_hooks = require('async_hooks');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('Worker bootstrapping works differently -> different async IDs');
<add>
<ide> common.crashOnUnhandledRejection();
<ide>
<ide> const promiseAsyncIds = [];
<ide><path>test/parallel/test-async-hooks-promise.js
<ide> const common = require('../common');
<ide> const assert = require('assert');
<ide> const async_hooks = require('async_hooks');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('Worker bootstrapping works differently -> different async IDs');
<add>
<ide> const initCalls = [];
<ide> const resolveCalls = [];
<ide>
<ide><path>test/parallel/test-async-hooks-top-level-clearimmediate.js
<ide> const common = require('../common');
<ide> const assert = require('assert');
<ide> const async_hooks = require('async_hooks');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('Worker bootstrapping works differently -> different async IDs');
<add>
<ide> let seenId, seenResource;
<ide>
<ide> async_hooks.createHook({
<ide><path>test/parallel/test-async-wrap-promise-after-enabled.js
<ide> const common = require('../common');
<ide> const assert = require('assert');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('Worker bootstrapping works differently -> different timing');
<add>
<ide> const async_hooks = require('async_hooks');
<ide>
<ide> const seenEvents = [];
<ide><path>test/parallel/test-child-process-custom-fds.js
<ide> const assert = require('assert');
<ide> const internalCp = require('internal/child_process');
<ide> const oldSpawnSync = internalCp.spawnSync;
<ide>
<add>if (!common.isMainThread)
<add> common.skip('stdio is not associated with file descriptors in Workers');
<add>
<ide> // Verify that customFds is used if stdio is not provided.
<ide> {
<ide> const msg = 'child_process: options.customFds option is deprecated. ' +
<ide><path>test/parallel/test-child-process-http-socket-leak.js
<ide> const common = require('../common');
<ide> const assert = require('assert');
<ide> const { fork } = require('child_process');
<ide> const http = require('http');
<del>const { kTimeout } = require('internal/timers');
<ide>
<ide> if (process.argv[2] === 'child') {
<ide> process.once('message', (req, socket) => {
<ide> if (process.argv[2] === 'child') {
<ide> return;
<ide> }
<ide>
<add>const { kTimeout } = require('internal/timers');
<add>
<ide> let child;
<ide> let socket;
<ide>
<ide><path>test/parallel/test-child-process-validate-stdio.js
<ide> common.expectsError(() => _validateStdio(stdio2, true),
<ide> { code: 'ERR_IPC_SYNC_FORK', type: Error }
<ide> );
<ide>
<del>{
<add>
<add>if (common.isMainThread) {
<ide> const stdio3 = [process.stdin, process.stdout, process.stderr];
<ide> const result = _validateStdio(stdio3, false);
<ide> assert.deepStrictEqual(result, {
<ide> common.expectsError(() => _validateStdio(stdio2, true),
<ide> ipc: undefined,
<ide> ipcFd: undefined
<ide> });
<add>} else {
<add> common.printSkipMessage(
<add> 'stdio is not associated with file descriptors in Workers');
<ide> }
<ide><path>test/parallel/test-cli-eval.js
<ide> const path = require('path');
<ide> const fixtures = require('../common/fixtures');
<ide> const nodejs = `"${process.execPath}"`;
<ide>
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<add>
<ide> if (process.argv.length > 2) {
<ide> console.log(process.argv.slice(2).join(' '));
<ide> process.exit(0);
<ide><path>test/parallel/test-cli-node-options-disallowed.js
<ide> const common = require('../common');
<ide> if (process.config.variables.node_without_node_options)
<ide> common.skip('missing NODE_OPTIONS support');
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<ide>
<ide> // Test options specified by env variable.
<ide>
<ide><path>test/parallel/test-cli-node-options.js
<ide> const common = require('../common');
<ide> if (process.config.variables.node_without_node_options)
<ide> common.skip('missing NODE_OPTIONS support');
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<ide>
<ide> // Test options specified by env variable.
<ide>
<ide><path>test/parallel/test-cluster-net-listen-relative-path.js
<ide> const tmpdir = require('../common/tmpdir');
<ide> if (common.isWindows)
<ide> common.skip('On Windows named pipes live in their own ' +
<ide> 'filesystem and don\'t have a ~100 byte limit');
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<ide>
<ide> // Choose a socket name such that the absolute path would exceed 100 bytes.
<ide> const socketDir = './unix-socket-dir';
<ide><path>test/parallel/test-console.js
<ide> const util = require('util');
<ide> assert.ok(process.stdout.writable);
<ide> assert.ok(process.stderr.writable);
<ide> // Support legacy API
<del>assert.strictEqual(typeof process.stdout.fd, 'number');
<del>assert.strictEqual(typeof process.stderr.fd, 'number');
<del>
<add>if (common.isMainThread) {
<add> assert.strictEqual(typeof process.stdout.fd, 'number');
<add> assert.strictEqual(typeof process.stderr.fd, 'number');
<add>}
<ide> process.once('warning', common.mustCall((warning) => {
<ide> assert(/no such label/.test(warning.message));
<ide> }));
<ide><path>test/parallel/test-cwd-enoent-preload.js
<ide> const common = require('../common');
<ide> // Fails with EINVAL on SmartOS, EBUSY on Windows, EBUSY on AIX.
<ide> if (common.isSunOS || common.isWindows || common.isAIX)
<ide> common.skip('cannot rmdir current working directory');
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<ide>
<ide> const assert = require('assert');
<ide> const fs = require('fs');
<ide><path>test/parallel/test-cwd-enoent-repl.js
<ide> const common = require('../common');
<ide> // Fails with EINVAL on SmartOS, EBUSY on Windows, EBUSY on AIX.
<ide> if (common.isSunOS || common.isWindows || common.isAIX)
<ide> common.skip('cannot rmdir current working directory');
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<ide>
<ide> const assert = require('assert');
<ide> const fs = require('fs');
<ide><path>test/parallel/test-cwd-enoent.js
<ide> const common = require('../common');
<ide> // Fails with EINVAL on SmartOS, EBUSY on Windows, EBUSY on AIX.
<ide> if (common.isSunOS || common.isWindows || common.isAIX)
<ide> common.skip('cannot rmdir current working directory');
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<ide>
<ide> const assert = require('assert');
<ide> const fs = require('fs');
<ide><path>test/parallel/test-debug-args.js
<ide> 'use strict';
<ide> // Flags: --debug-code
<ide>
<del>require('../common');
<add>const common = require('../common');
<ide> const assert = require('assert');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('execArgv does not affect Workers');
<add>
<ide> assert(process.execArgv.includes('--debug-code'));
<ide><path>test/parallel/test-fs-realpath.js
<ide> const common = require('../common');
<ide> const fixtures = require('../common/fixtures');
<ide> const tmpdir = require('../common/tmpdir');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<add>
<ide> const assert = require('assert');
<ide> const fs = require('fs');
<ide> const path = require('path');
<ide><path>test/parallel/test-fs-write-file-sync.js
<ide> let openCount = 0;
<ide> let mode;
<ide> let content;
<ide>
<add>if (!common.isMainThread)
<add> common.skip('process.umask is not available in Workers');
<add>
<ide> // Need to hijack fs.open/close to make sure that things
<ide> // get closed once they're opened.
<ide> fs._openSync = fs.openSync;
<ide><path>test/parallel/test-module-cjs-helpers.js
<ide> 'use strict';
<del>// Flags: --expose-internals
<add>// Flags: --expose-internals --experimental-worker
<ide>
<ide> require('../common');
<ide> const assert = require('assert');
<ide> const { builtinLibs } = require('internal/modules/cjs/helpers');
<ide>
<ide> const hasInspector = process.config.variables.v8_enable_inspector === 1;
<ide>
<del>const expectedLibs = hasInspector ? 33 : 32;
<add>const expectedLibs = hasInspector ? 34 : 33;
<ide> assert.strictEqual(builtinLibs.length, expectedLibs);
<ide><path>test/parallel/test-preload.js
<ide> const fixtures = require('../common/fixtures');
<ide> // Refs: https://github.com/nodejs/node/pull/2253
<ide> if (common.isSunOS)
<ide> common.skip('unreliable on SunOS');
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<ide>
<ide> const assert = require('assert');
<ide> const childProcess = require('child_process');
<ide><path>test/parallel/test-process-chdir-errormessage.js
<ide> 'use strict';
<ide>
<del>const { expectsError } = require('../common');
<add>const common = require('../common');
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<ide>
<del>expectsError(
<add>common.expectsError(
<ide> () => {
<ide> process.chdir('does-not-exist');
<ide> },
<ide><path>test/parallel/test-process-chdir.js
<ide> const assert = require('assert');
<ide> const fs = require('fs');
<ide> const path = require('path');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<add>
<ide> const tmpdir = require('../common/tmpdir');
<ide>
<ide> process.chdir('..');
<ide><path>test/parallel/test-process-euid-egid.js
<ide> const common = require('../common');
<ide> const assert = require('assert');
<ide>
<del>if (common.isWindows) {
<del> assert.strictEqual(process.geteuid, undefined);
<del> assert.strictEqual(process.getegid, undefined);
<add>if (common.isWindows || !common.isMainThread) {
<add> if (common.isMainThread) {
<add> assert.strictEqual(process.geteuid, undefined);
<add> assert.strictEqual(process.getegid, undefined);
<add> }
<ide> assert.strictEqual(process.seteuid, undefined);
<ide> assert.strictEqual(process.setegid, undefined);
<ide> return;
<ide><path>test/parallel/test-process-exit-handler.js
<ide> 'use strict';
<del>require('../common');
<add>const common = require('../common');
<add>
<add>if (!common.isMainThread)
<add> common.skip('execArgv does not affect Workers');
<ide>
<ide> // This test ensures that no asynchronous operations are performed in the 'exit'
<ide> // handler.
<ide> // https://github.com/nodejs/node/issues/12322
<ide>
<ide> process.on('exit', () => {
<del> setTimeout(process.abort, 0); // Should not run.
<add> setTimeout(() => process.abort(), 0); // Should not run.
<ide> for (const start = Date.now(); Date.now() - start < 10;);
<ide> });
<ide><path>test/parallel/test-process-fatal-exception-tick.js
<ide> const common = require('../common');
<ide> const assert = require('assert');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('Error handling timing is different in Workers');
<add>
<ide> // If a process encounters an uncaughtException, it should schedule
<ide> // processing of nextTicks on the next Immediates cycle but not
<ide> // before all Immediates are handled
<ide><path>test/parallel/test-process-uid-gid.js
<ide> const common = require('../common');
<ide>
<ide> const assert = require('assert');
<ide>
<del>if (common.isWindows) {
<del> // uid/gid functions are POSIX only
<del> assert.strictEqual(process.getuid, undefined);
<add>if (common.isWindows || !common.isMainThread) {
<add> // uid/gid functions are POSIX only, setters are main-thread only.
<add> if (common.isMainThread) {
<add> assert.strictEqual(process.getuid, undefined);
<add> assert.strictEqual(process.getgid, undefined);
<add> }
<ide> assert.strictEqual(process.setuid, undefined);
<del> assert.strictEqual(process.getgid, undefined);
<ide> assert.strictEqual(process.setgid, undefined);
<ide> return;
<ide> }
<ide><path>test/parallel/test-process-umask-mask.js
<ide> const common = require('../common');
<ide> const assert = require('assert');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('process.umask is not available in Workers');
<add>
<ide> let mask;
<ide>
<ide> if (common.isWindows) {
<ide><path>test/parallel/test-process-umask.js
<ide> 'use strict';
<ide> const common = require('../common');
<ide> const assert = require('assert');
<add>if (!common.isMainThread)
<add> common.skip('process.umask is not available in Workers');
<ide>
<ide> // Note in Windows one can only set the "user" bits.
<ide> let mask;
<ide><path>test/parallel/test-repl-require.js
<ide> const fixtures = require('../common/fixtures');
<ide> const assert = require('assert');
<ide> const net = require('net');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<add>
<ide> process.chdir(fixtures.fixturesDir);
<ide> const repl = require('repl');
<ide>
<ide><path>test/parallel/test-repl-sigint-nested-eval.js
<ide> if (common.isWindows) {
<ide> // No way to send CTRL_C_EVENT to processes from JS right now.
<ide> common.skip('platform not supported');
<ide> }
<add>if (!common.isMainThread)
<add> common.skip('No signal handling available in Workers');
<ide>
<ide> const assert = require('assert');
<ide> const spawn = require('child_process').spawn;
<ide><path>test/parallel/test-repl-sigint.js
<ide> if (common.isWindows) {
<ide> // No way to send CTRL_C_EVENT to processes from JS right now.
<ide> common.skip('platform not supported');
<ide> }
<add>if (!common.isMainThread)
<add> common.skip('No signal handling available in Workers');
<ide>
<ide> const assert = require('assert');
<ide> const spawn = require('child_process').spawn;
<ide><path>test/parallel/test-repl-tab-complete.js
<ide> const assert = require('assert');
<ide> const fixtures = require('../common/fixtures');
<ide> const hasInspector = process.config.variables.v8_enable_inspector === 1;
<ide>
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<add>
<ide> // We have to change the directory to ../fixtures before requiring repl
<ide> // in order to make the tests for completion of node_modules work properly
<ide> // since repl modifies module.paths.
<ide><path>test/parallel/test-require-symlink.js
<ide> const common = require('../common');
<ide>
<ide> if (!common.canCreateSymLink())
<ide> common.skip('insufficient privileges');
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<ide>
<ide> const assert = require('assert');
<ide> const { spawn } = require('child_process');
<ide><path>test/parallel/test-setproctitle.js
<ide> const common = require('../common');
<ide> // FIXME add sunos support
<ide> if (common.isSunOS)
<ide> common.skip(`Unsupported platform [${process.platform}]`);
<add>if (!common.isMainThread)
<add> common.skip('Setting the process title from Workers is not supported');
<ide>
<ide> const assert = require('assert');
<ide> const exec = require('child_process').exec;
<ide><path>test/parallel/test-signal-args.js
<ide> const common = require('../common');
<ide> const assert = require('assert');
<ide>
<del>if (common.isWindows) {
<add>if (common.isWindows)
<ide> common.skip('Sending signals with process.kill is not supported on Windows');
<del>}
<add>if (!common.isMainThread)
<add> common.skip('No signal handling available in Workers');
<ide>
<ide> process.once('SIGINT', common.mustCall((signal) => {
<ide> assert.strictEqual(signal, 'SIGINT');
<ide><path>test/parallel/test-signal-handler.js
<ide> const common = require('../common');
<ide>
<ide> if (common.isWindows)
<ide> common.skip('SIGUSR1 and SIGHUP signals are not supported');
<add>if (!common.isMainThread)
<add> common.skip('Signal handling in Workers is not supported');
<ide>
<ide> console.log(`process.pid: ${process.pid}`);
<ide>
<ide><path>test/parallel/test-stdio-pipe-access.js
<ide> 'use strict';
<del>require('../common');
<add>const common = require('../common');
<add>if (!common.isMainThread)
<add> common.skip('Workers don’t have process-like stdio');
<ide>
<ide> // Test if Node handles acessing process.stdin if it is a redirected
<ide> // pipe without deadlocking
<ide><path>test/parallel/test-stdio-pipe-redirect.js
<ide> 'use strict';
<del>require('../common');
<add>const common = require('../common');
<add>if (!common.isMainThread)
<add> common.skip('Workers don’t have process-like stdio');
<ide>
<ide> // Test if Node handles redirecting one child process stdout to another
<ide> // process stdin without crashing.
<ide><path>test/parallel/test-timers-immediate-unref-nested-once.js
<add>'use strict';
<add>
<add>const common = require('../common');
<add>
<add>// This immediate should not execute as it was unrefed
<add>// and nothing else is keeping the event loop alive
<add>setImmediate(() => {
<add> setImmediate(common.mustNotCall()).unref();
<add>});
<ide><path>test/parallel/test-timers-immediate-unref-simple.js
<ide>
<ide> const common = require('../common');
<ide>
<add>if (!common.isMainThread) {
<add> // Note that test-timers-immediate-unref-nested-once works instead.
<add> common.skip('Worker bootstrapping works differently -> different timing');
<add>}
<add>
<ide> // This immediate should not execute as it was unrefed
<ide> // and nothing else is keeping the event loop alive
<ide> setImmediate(common.mustNotCall()).unref();
<ide><path>test/parallel/test-trace-events-all.js
<ide> const assert = require('assert');
<ide> const cp = require('child_process');
<ide> const fs = require('fs');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<add>
<ide> const CODE =
<ide> 'setTimeout(() => { for (var i = 0; i < 100000; i++) { "test" + i } }, 1)';
<ide> const FILE_NAME = 'node_trace.1.log';
<ide><path>test/parallel/test-trace-events-api.js
<ide> const common = require('../common');
<ide>
<ide> if (!common.hasTracing)
<ide> common.skip('missing trace events');
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<ide>
<ide> const assert = require('assert');
<ide> const cp = require('child_process');
<ide><path>test/parallel/test-trace-events-async-hooks.js
<ide> const cp = require('child_process');
<ide> const fs = require('fs');
<ide> const util = require('util');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<add>
<ide> const CODE =
<ide> 'setTimeout(() => { for (var i = 0; i < 100000; i++) { "test" + i } }, 1)';
<ide> const FILE_NAME = 'node_trace.1.log';
<ide><path>test/parallel/test-trace-events-binding.js
<ide> const assert = require('assert');
<ide> const cp = require('child_process');
<ide> const fs = require('fs');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<add>
<ide> const CODE = `
<ide> process.binding("trace_events").emit(
<ide> 'b'.charCodeAt(0), 'custom',
<ide><path>test/parallel/test-trace-events-bootstrap.js
<ide> const path = require('path');
<ide> const fs = require('fs');
<ide> const tmpdir = require('../common/tmpdir');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<add>
<ide> const names = [
<ide> 'environment',
<ide> 'nodeStart',
<ide><path>test/parallel/test-trace-events-category-used.js
<ide> const common = require('../common');
<ide> const assert = require('assert');
<ide> const cp = require('child_process');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<add>
<ide> const CODE = `console.log(
<ide> process.binding("trace_events").categoryGroupEnabled("custom")
<ide> );`;
<ide><path>test/parallel/test-trace-events-file-pattern.js
<ide> const assert = require('assert');
<ide> const cp = require('child_process');
<ide> const fs = require('fs');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<add>
<ide> tmpdir.refresh();
<ide> process.chdir(tmpdir.path);
<ide>
<ide><path>test/parallel/test-trace-events-fs-sync.js
<ide> const assert = require('assert');
<ide> const cp = require('child_process');
<ide> const fs = require('fs');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<add>
<ide> const tests = new Array();
<ide> const traceFile = 'node_trace.1.log';
<ide>
<ide><path>test/parallel/test-trace-events-metadata.js
<ide> const assert = require('assert');
<ide> const cp = require('child_process');
<ide> const fs = require('fs');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<add>
<ide> const CODE =
<ide> 'setTimeout(() => { for (var i = 0; i < 100000; i++) { "test" + i } }, 1)';
<ide> const FILE_NAME = 'node_trace.1.log';
<ide><path>test/parallel/test-trace-events-none.js
<ide> const assert = require('assert');
<ide> const cp = require('child_process');
<ide> const fs = require('fs');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<add>
<ide> const CODE =
<ide> 'setTimeout(() => { for (var i = 0; i < 100000; i++) { "test" + i } }, 1)';
<ide> const FILE_NAME = 'node_trace.1.log';
<ide><path>test/parallel/test-trace-events-perf.js
<ide> const path = require('path');
<ide> const fs = require('fs');
<ide> const tmpdir = require('../common/tmpdir');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<add>
<ide> if (process.argv[2] === 'child') {
<ide> const { performance } = require('perf_hooks');
<ide>
<ide><path>test/parallel/test-trace-events-process-exit.js
<ide> const assert = require('assert');
<ide> const cp = require('child_process');
<ide> const fs = require('fs');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<add>
<ide> const tmpdir = require('../common/tmpdir');
<ide>
<ide> const FILE_NAME = 'node_trace.1.log';
<ide><path>test/parallel/test-trace-events-v8.js
<ide> const assert = require('assert');
<ide> const cp = require('child_process');
<ide> const fs = require('fs');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<add>
<ide> const CODE =
<ide> 'setTimeout(() => { for (var i = 0; i < 100000; i++) { "test" + i } }, 1)';
<ide> const FILE_NAME = 'node_trace.1.log';
<ide><path>test/parallel/test-trace-events-vm.js
<ide> const path = require('path');
<ide> const fs = require('fs');
<ide> const tmpdir = require('../common/tmpdir');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<add>
<ide> const names = [
<ide> 'ContextifyScript::New',
<ide> 'RunInThisContext',
<ide><path>test/sequential/test-async-wrap-getasyncid.js
<ide> common.crashOnUnhandledRejection();
<ide> delete providers.STREAMPIPE;
<ide> delete providers.MESSAGEPORT;
<ide> delete providers.WORKER;
<add> if (!common.isMainThread)
<add> delete providers.INSPECTORJSBINDING;
<ide>
<ide> const objKeys = Object.keys(providers);
<ide> if (objKeys.length > 0)
<ide> if (common.hasCrypto) { // eslint-disable-line node-core/crypto-check
<ide> testInitialized(req, 'SendWrap');
<ide> }
<ide>
<del>if (process.config.variables.v8_enable_inspector !== 0) {
<add>if (process.config.variables.v8_enable_inspector !== 0 &&
<add> common.isMainThread) {
<ide> const binding = process.binding('inspector');
<ide> const handle = new binding.Connection(() => {});
<ide> testInitialized(handle, 'Connection');
<ide><path>test/sequential/test-buffer-creation-regression.js
<ide> try {
<ide> } catch (e) {
<ide> if (e instanceof RangeError && acceptableOOMErrors.includes(e.message))
<ide> common.skip(`Unable to allocate ${size} bytes for ArrayBuffer`);
<del>
<ide> throw e;
<ide> }
<ide>
<ide><path>test/sequential/test-fs-watch.js
<ide> const path = require('path');
<ide>
<ide> const tmpdir = require('../common/tmpdir');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<add>
<ide> const expectFilePath = common.isWindows ||
<ide> common.isLinux ||
<ide> common.isOSX ||
<ide><path>test/sequential/test-init.js
<ide> const assert = require('assert');
<ide> const child = require('child_process');
<ide> const fixtures = require('../common/fixtures');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('process.chdir is not available in Workers');
<add>
<ide> if (process.env.TEST_INIT) {
<ide> return process.stdout.write('Loaded successfully!');
<ide> }
<ide><path>test/sequential/test-inspector-overwrite-config.js
<ide>
<ide> const common = require('../common');
<ide> const assert = require('assert');
<add>
<add>if (!common.isMainThread)
<add> common.skip('--require does not work with Workers');
<add>
<ide> const inspector = require('inspector');
<ide> const msg = 'Test inspector logging';
<ide> let asserted = false;
<ide><path>test/sequential/test-performance.js
<ide> const common = require('../common');
<ide> const assert = require('assert');
<ide> const { performance } = require('perf_hooks');
<ide>
<add>if (!common.isMainThread)
<add> common.skip('bootstrapping workers works differently');
<add>
<ide> assert(performance);
<ide> assert(performance.nodeTiming);
<ide> assert.strictEqual(typeof performance.timeOrigin, 'number');
<ide><path>tools/run-worker.js
<add>'use strict';
<add>if (typeof require === 'undefined') {
<add> console.log('1..0 # Skipped: Not being run as CommonJS');
<add> process.exit(0);
<add>}
<add>
<add>const path = require('path');
<add>const { Worker } = require('worker');
<add>
<add>new Worker(path.resolve(process.cwd(), process.argv[2]))
<add> .on('exit', (code) => process.exitCode = code);
<ide><path>tools/test.py
<ide> def BuildOptions():
<ide> help="Expect test cases to fail", default=False, action="store_true")
<ide> result.add_option("--valgrind", help="Run tests through valgrind",
<ide> default=False, action="store_true")
<add> result.add_option("--worker", help="Run parallel tests inside a worker context",
<add> default=False, action="store_true")
<ide> result.add_option("--check-deopts", help="Check tests for permanent deoptimizations",
<ide> default=False, action="store_true")
<ide> result.add_option("--cat", help="Print the source of the tests",
<ide> def Main():
<ide> options.node_args.append("--always-opt")
<ide> options.progress = "deopts"
<ide>
<add> if options.worker:
<add> run_worker = join(workspace, "tools", "run-worker.js")
<add> options.node_args.append('--experimental-worker')
<add> options.node_args.append(run_worker)
<add>
<ide> shell = abspath(options.shell)
<ide> buildspace = dirname(shell)
<ide> | 84 |
Ruby | Ruby | add optional junit support | b1f00c57573bcb852f97cdbd863ba664635e9ab2 | <ide><path>Library/Contributions/cmd/brew-test-bot.rb
<ide> # --keep-logs: Write and keep log files under ./brewbot/
<ide> # --cleanup: Clean the Homebrew directory. Very dangerous. Use with care.
<ide> # --skip-setup: Don't check the local system is setup correctly.
<add># --junit: Generate a JUnit XML test results file.
<ide>
<ide> require 'formula'
<ide> require 'utils'
<ide> require 'date'
<add>require 'erb'
<ide>
<ide> HOMEBREW_CONTRIBUTED_CMDS = HOMEBREW_REPOSITORY + "Library/Contributions/cmd/"
<ide>
<ide> class Step
<del> attr_reader :command, :repository
<add> attr_reader :command, :repository, :name
<ide> attr_accessor :status
<ide>
<ide> def initialize test, command
<ide> def check_results
<ide> status == :passed
<ide> end
<ide>
<del> def self.run argument
<del> test = new argument
<del> test.cleanup_before
<del> test.download
<del> test.setup unless ARGV.include? "--skip-setup"
<del> test.formulae.each do |formula|
<del> test.formula formula
<add> def run
<add> cleanup_before
<add> download
<add> setup unless ARGV.include? "--skip-setup"
<add> formulae.each do |f|
<add> formula(f)
<ide> end
<del> test.homebrew if test.core_changed
<del> test.cleanup_after
<del> test.check_results
<add> homebrew if core_changed
<add> cleanup_after
<add> check_results
<ide> end
<ide> end
<ide>
<ide> if Pathname.pwd == HOMEBREW_PREFIX and ARGV.include? "--cleanup"
<ide> odie 'cannot use --cleanup from HOMEBREW_PREFIX as it will delete all output.'
<ide> end
<ide>
<add>tests = []
<ide> any_errors = false
<ide> if ARGV.named.empty?
<ide> # With no arguments just build the most recent commit.
<del> any_errors = Test.run 'HEAD'
<add> test = Test.new('HEAD')
<add> any_errors = test.run
<add> tests << test
<ide> else
<del> ARGV.named.each { |argument| any_errors = Test.run(argument) or any_errors }
<add> ARGV.named.each do |argument|
<add> test = Test.new(argument)
<add> any_errors = test.run or any_errors
<add> tests << test
<add> end
<ide> end
<add>
<add>if ARGV.include? "--junit"
<add> xml_erb = HOMEBREW_CONTRIBUTED_CMDS + "brew-test-bot.xml.erb"
<add> erb = ERB.new IO.read xml_erb
<add> open("brew-test-bot.xml", "w") do |xml|
<add> xml.write erb.result binding
<add> end
<add>end
<add>
<ide> exit any_errors ? 0 : 1 | 1 |
Ruby | Ruby | use transform_values in a few more places | 7af59e16a255da5aee2863af0eefaa9f0cfd857d | <ide><path>activemodel/lib/active_model/errors.rb
<ide> def as_json(options = nil)
<ide> # person.errors.to_hash # => {:name=>["cannot be nil"]}
<ide> # person.errors.to_hash(true) # => {:name=>["name cannot be nil"]}
<ide> def to_hash(full_messages = false)
<del> hash = {}
<ide> message_method = full_messages ? :full_message : :message
<del> group_by_attribute.each do |attribute, errors|
<del> hash[attribute] = errors.map(&message_method)
<add> group_by_attribute.transform_values do |errors|
<add> errors.map(&message_method)
<ide> end
<del> hash
<ide> end
<ide>
<ide> def to_h
<ide> def messages
<ide> # Updating this hash would still update errors state for backward
<ide> # compatibility, but this behavior is deprecated.
<ide> def details
<del> hash = {}
<del> group_by_attribute.each do |attribute, errors|
<del> hash[attribute] = errors.map(&:detail)
<add> hash = group_by_attribute.transform_values do |errors|
<add> errors.map(&:detail)
<ide> end
<ide> DeprecationHandlingDetailsHash.new(hash)
<ide> end
<ide><path>activesupport/lib/active_support/json/decoding.rb
<ide> def convert_dates_from(data)
<ide> when Array
<ide> data.map! { |d| convert_dates_from(d) }
<ide> when Hash
<del> data.each do |key, value|
<del> data[key] = convert_dates_from(value)
<add> data.transform_values! do |value|
<add> convert_dates_from(value)
<ide> end
<ide> else
<ide> data | 2 |
Text | Text | make some translation improvements | 6bfabcb748f464666ea7f366faf9f2431b1e865c | <ide><path>guide/russian/swift/index.md
<ide> ---
<ide> title: Swift
<del>localeTitle: стриж
<add>localeTitle: Swift
<ide> ---
<ide> 
<ide>
<del># Что такое Свифт?
<add># Что такое Swift?
<ide>
<ide> Swift - это [открытый](https://en.wikipedia.org/wiki/Open-source_software) язык программирования общего назначения, разработанный Apple Inc. Они описывают его как:
<ide>
<del>> Swift - мощный и интуитивно понятный язык программирования для macOS, iOS, watchOS и tvOS. Написание кода Swift является интерактивным и забавным, синтаксис является сжатым, но выразительным, а Swift включает в себя современные функции, которые разработчики любят. Быстрый код безопасен по дизайну, но также производит программное обеспечение, которое работает молниеносно. 1
<add>> Swift - мощный и интуитивно понятный язык программирования для macOS, iOS, watchOS и tvOS. Написание кода на Swift является интерактивным и забавным, синтаксис является сжатым, но выразительным, а Swift включает в себя современные функции, которые любят разработчики. Быстрый код безопасен по дизайну, но также производит программное обеспечение, которое работает молниеносно. 1
<ide>
<ide> Хотите попробовать Swift прямо сейчас? [Repl.it](https://repl.it/languages/swift) предлагает онлайн-цикл Read-Eval-Print для Swift. У вас не будет доступа к UIKit или другим API, которые обычно используются, но дайте ему шанс!
<ide>
<del># основы
<add># Основы
<ide>
<ide> Чтобы объявить переменную в Swift, просто используйте var, за которым следует имя вашей переменной.
<ide>
<ide> let x = 6
<ide> let boole = true
<ide> ```
<ide>
<del>Чтобы распечатать что-либо на стандартный вывод, просто используйте print () и поместите свой вывод в круглые скобки.
<add>Чтобы вывести что-либо стандартным методом, просто используйте print() и поместите свой вывод в круглые скобки.
<ide>
<ide> ```Swift
<ide> let x = "World"
<ide> let x = "World"
<ide>
<ide> # Документация
<ide>
<del>Свифт сильно задокументирован. Имейте в виду, что кодирование Swift включает не только используя язык, но также и многие API. Лучший способ узнать Swift - сделать проекта или приложения, независимо от того, насколько малы!
<add>Свифт сильно задокументирован. Имейте в виду, что программирование на Swift включает не только сам язык, но также и многие API. Лучший способ узнать Swift - сделать проект или приложение, независимо от того, насколько они будут малы!
<ide>
<ide> * [Исходный код](https://github.com/apple/swift)
<ide>
<del>* [Разработка приложений iOS (Swift)](https://developer.apple.com/library/content/referencelibrary/GettingStarted/DevelopiOSAppsSwift/) : хотите сделать iOS Apps? Это отличное место для начала.
<add>* [Разработка приложений iOS (Swift)](https://developer.apple.com/library/content/referencelibrary/GettingStarted/DevelopiOSAppsSwift/) : хотите сделать приложение для iOS? Это отличное место для начала.
<ide>
<ide> * [Language Guide](https://developer.apple.com/library/content/documentation/Swift/Conceptual/Swift_Programming_Language/) : содержит обзор практически каждой функции в Swift. Если вы запутались при чтении чужого кода, этот документ может вам помочь.
<ide>
<ide>
<ide> # Хотите узнать больше?
<ide>
<ide> * [RayWenderlich.com](https://www.raywenderlich.com/) : Есть много отличных учебников для Swift и iOS.
<add>
<ide> * [Взлом с Swift](https://www.hackingwithswift.com/read) : полный учебник Swift, который позволяет вам начинать до продвинутых, используя практические проекты.
<ide>
<del>### источники
<add>### Источники
<ide>
<del>1. «Swift 4 - мощный язык программирования, который также легко усваивается». Apple, [developer.apple.com/swift](https://developer.apple.com/swift/) , доступ к 31 октября 2017 года.
<ide>\ No newline at end of file
<add>1. «Swift 4 - мощный язык программирования, который также легко усваивается». Apple, [developer.apple.com/swift](https://developer.apple.com/swift/). | 1 |
Python | Python | change paramsdict to a mutablemapping subclass | 4ef1f9a0461c44e6de563386abfb6f8c2314dbb1 | <ide><path>airflow/models/param.py
<ide> # KIND, either express or implied. See the License for the
<ide> # specific language governing permissions and limitations
<ide> # under the License.
<del>from typing import Any, Dict, Optional
<add>from typing import Any, Dict, ItemsView, MutableMapping, Optional, ValuesView
<ide>
<ide> import jsonschema
<ide> from jsonschema import FormatChecker
<ide> class Param:
<ide> __NO_VALUE_SENTINEL = NoValueSentinel()
<ide> CLASS_IDENTIFIER = '__class'
<ide>
<del> def __init__(self, default: Any = __NO_VALUE_SENTINEL, description: str = None, **kwargs):
<add> def __init__(self, default: Any = __NO_VALUE_SENTINEL, description: Optional[str] = None, **kwargs):
<ide> self.value = default
<ide> self.description = description
<ide> self.schema = kwargs.pop('schema') if 'schema' in kwargs else kwargs
<ide> def has_value(self) -> bool:
<ide> return not isinstance(self.value, NoValueSentinel)
<ide>
<ide>
<del>class ParamsDict(dict):
<add>class ParamsDict(MutableMapping[str, Any]):
<ide> """
<ide> Class to hold all params for dags or tasks. All the keys are strictly string and values
<ide> are converted into Param's object if they are not already. This class is to replace param's
<ide> dictionary implicitly and ideally not needed to be used directly.
<ide> """
<ide>
<add> __slots__ = ['__dict', 'suppress_exception']
<add>
<ide> def __init__(self, dict_obj: Optional[Dict] = None, suppress_exception: bool = False):
<ide> """
<del> Init override for ParamsDict
<ide> :param dict_obj: A dict or dict like object to init ParamsDict
<ide> :type dict_obj: Optional[dict]
<ide> :param suppress_exception: Flag to suppress value exceptions while initializing the ParamsDict
<ide> def __init__(self, dict_obj: Optional[Dict] = None, suppress_exception: bool = F
<ide> params_dict[k] = Param(v)
<ide> else:
<ide> params_dict[k] = v
<del> super().__init__(params_dict)
<add> self.__dict = params_dict
<ide> self.suppress_exception = suppress_exception
<ide>
<add> def __contains__(self, o: object) -> bool:
<add> return o in self.__dict
<add>
<add> def __len__(self) -> int:
<add> return len(self.__dict)
<add>
<add> def __delitem__(self, v: str) -> None:
<add> del self.__dict[v]
<add>
<add> def __iter__(self):
<add> return iter(self.__dict)
<add>
<ide> def __setitem__(self, key: str, value: Any) -> None:
<ide> """
<ide> Override for dictionary's ``setitem`` method. This method make sure that all values are of
<ide> def __setitem__(self, key: str, value: Any) -> None:
<ide> """
<ide> if isinstance(value, Param):
<ide> param = value
<del> elif key in self:
<del> param = dict.__getitem__(self, key)
<del> param.resolve(value=value, suppress_exception=self.suppress_exception)
<add> elif key in self.__dict:
<add> param = self.__dict[key]
<add> try:
<add> param.resolve(value=value, suppress_exception=self.suppress_exception)
<add> except ValueError as ve:
<add> raise ValueError(f'Invalid input for param {key}: {ve}') from None
<ide> else:
<ide> # if the key isn't there already and if the value isn't of Param type create a new Param object
<ide> param = Param(value)
<ide>
<del> super().__setitem__(key, param)
<add> self.__dict[key] = param
<ide>
<ide> def __getitem__(self, key: str) -> Any:
<ide> """
<ide> def __getitem__(self, key: str) -> Any:
<ide> :param key: The key to fetch
<ide> :type key: str
<ide> """
<del> param = super().__getitem__(key)
<add> param = self.__dict[key]
<ide> return param.resolve(suppress_exception=self.suppress_exception)
<ide>
<add> def get_param(self, key: str) -> Param:
<add> """Get the internal :class:`.Param` object for this key"""
<add> return self.__dict[key]
<add>
<add> def items(self):
<add> return ItemsView(self.__dict)
<add>
<add> def values(self):
<add> return ValuesView(self.__dict)
<add>
<add> def update(self, *args, **kwargs) -> None:
<add> if len(args) == 1 and not kwargs and isinstance(args[0], ParamsDict):
<add> return super().update(args[0].__dict)
<add> super().update(*args, **kwargs)
<add>
<ide> def dump(self) -> dict:
<ide> """Dumps the ParamsDict object as a dictionary, while suppressing exceptions"""
<ide> return {k: v.resolve(suppress_exception=True) for k, v in self.items()}
<ide>
<del> def update(self, other_dict: dict) -> None:
<del> """
<del> Override for dictionary's update method.
<del> :param other_dict: A dict type object which needs to be merged in the ParamsDict object
<del> :type other_dict: dict
<del> """
<del> try:
<del> for k, v in other_dict.items():
<del> self.__setitem__(k, v)
<del> except ValueError as ve:
<del> raise ValueError(f'Invalid input for param {k}: {ve}') from None
<del>
<ide> def validate(self) -> dict:
<ide> """Validates & returns all the Params object stored in the dictionary"""
<ide> resolved_dict = {}
<ide> try:
<del> for k, v in dict.items(self):
<add> for k, v in self.items():
<ide> resolved_dict[k] = v.resolve(suppress_exception=self.suppress_exception)
<ide> except ValueError as ve:
<ide> raise ValueError(f'Invalid input for param {k}: {ve}') from None
<ide><path>tests/models/test_dag.py
<ide> from airflow.models import DAG, DagModel, DagRun, DagTag, TaskFail, TaskInstance as TI
<ide> from airflow.models.baseoperator import BaseOperator
<ide> from airflow.models.dag import dag as dag_decorator
<del>from airflow.models.param import DagParam, Param
<add>from airflow.models.param import DagParam, Param, ParamsDict
<ide> from airflow.operators.bash import BashOperator
<ide> from airflow.operators.dummy import DummyOperator
<ide> from airflow.operators.subdag import SubDagOperator
<ide> def test_params_not_passed_is_empty_dict(self):
<ide> """
<ide> dag = models.DAG('test-dag')
<ide>
<del> assert isinstance(dag.params, dict)
<add> assert isinstance(dag.params, ParamsDict)
<ide> assert 0 == len(dag.params)
<ide>
<ide> def test_params_passed_and_params_in_default_args_no_override(self):
<ide><path>tests/models/test_param.py
<ide> def test_dump(self):
<ide> assert dump['schema'] == {'type': 'string', 'minLength': 2}
<ide>
<ide>
<del>class TestParamsDict(unittest.TestCase):
<add>class TestParamsDict:
<ide> def test_params_dict(self):
<ide> # Init with a simple dictionary
<ide> pd = ParamsDict(dict_obj={'key': 'value'})
<del> assert pd.get('key').__class__ == Param
<add> assert isinstance(pd.get_param('key'), Param)
<ide> assert pd['key'] == 'value'
<ide> assert pd.suppress_exception is False
<ide>
<ide> # Init with a dict which contains Param objects
<ide> pd2 = ParamsDict({'key': Param('value', type='string')}, suppress_exception=True)
<del> assert pd2.get('key').__class__ == Param
<add> assert isinstance(pd2.get_param('key'), Param)
<ide> assert pd2['key'] == 'value'
<ide> assert pd2.suppress_exception is True
<ide>
<ide> # Init with another object of another ParamsDict
<ide> pd3 = ParamsDict(pd2)
<del> assert pd3.get('key').__class__ == Param
<add> assert isinstance(pd3.get_param('key'), Param)
<ide> assert pd3['key'] == 'value'
<ide> assert pd3.suppress_exception is False # as it's not a deepcopy of pd2
<ide>
<ide> def test_params_dict(self):
<ide> assert pd3.dump() == {'key': 'value'}
<ide>
<ide> # Validate the ParamsDict
<del> pd.validate()
<add> plain_dict = pd.validate()
<add> assert type(plain_dict) == dict
<ide> pd2.validate()
<ide> pd3.validate()
<ide>
<ide> # Update the ParamsDict
<del> with pytest.raises(ValueError):
<add> with pytest.raises(ValueError, match=r'Invalid input for param key: 1 is not'):
<ide> pd3['key'] = 1
<ide>
<ide> # Should not raise an error as suppress_exception is True
<ide> pd2['key'] = 1
<ide> pd2.validate()
<ide>
<add> def test_update(self):
<add> pd = ParamsDict({'key': Param('value', type='string')})
<add>
<add> pd.update({'key': 'a'})
<add> internal_value = pd.get_param('key')
<add> assert isinstance(internal_value, Param)
<add> with pytest.raises(ValueError, match=r'Invalid input for param key: 1 is not'):
<add> pd.update({'key': 1})
<add>
<ide>
<ide> class TestDagParamRuntime:
<ide> VALUE = 42
<ide><path>tests/serialization/test_dag_serialization.py
<ide> def test_full_param_roundtrip(self, param):
<ide> dag = SerializedDAG.from_dict(serialized)
<ide>
<ide> assert dag.params["my_param"] == param.value
<del> observed_param = dict.get(dag.params, 'my_param')
<add> observed_param = dag.params.get_param('my_param')
<ide> assert isinstance(observed_param, Param)
<ide> assert observed_param.description == param.description
<ide> assert observed_param.schema == param.schema
<ide> def test_params_upgrade(self):
<ide> dag = SerializedDAG.from_dict(serialized)
<ide>
<ide> assert dag.params["none"] is None
<del> assert isinstance(dict.__getitem__(dag.params, "none"), Param)
<add> assert isinstance(dag.params.get_param("none"), Param)
<ide> assert dag.params["str"] == "str"
<ide>
<ide> def test_params_serialize_default_2_2_0(self):
<ide> def test_params_serialize_default_2_2_0(self):
<ide> SerializedDAG.validate_schema(serialized)
<ide> dag = SerializedDAG.from_dict(serialized)
<ide>
<del> assert isinstance(dict.__getitem__(dag.params, "str"), Param)
<add> assert isinstance(dag.params.get_param("str"), Param)
<ide> assert dag.params["str"] == "str"
<ide>
<ide> def test_params_serialize_default(self):
<ide> def test_params_serialize_default(self):
<ide> dag = SerializedDAG.from_dict(serialized)
<ide>
<ide> assert dag.params["my_param"] == "a string value"
<del> param = dict.get(dag.params, 'my_param')
<add> param = dag.params.get_param('my_param')
<ide> assert isinstance(param, Param)
<ide> assert param.description == 'hello'
<ide> assert param.schema == {'type': 'string'} | 4 |
Javascript | Javascript | add compat trailers, adjust multi-headers | 641646463ddac7f306730a216675428e2bbe4dbf | <ide><path>lib/internal/http2/compat.js
<ide> const kStream = Symbol('stream');
<ide> const kRequest = Symbol('request');
<ide> const kResponse = Symbol('response');
<ide> const kHeaders = Symbol('headers');
<add>const kRawHeaders = Symbol('rawHeaders');
<ide> const kTrailers = Symbol('trailers');
<add>const kRawTrailers = Symbol('rawTrailers');
<ide>
<ide> let statusMessageWarned = false;
<ide>
<ide> function isPseudoHeader(name) {
<ide> }
<ide> }
<ide>
<add>function statusMessageWarn() {
<add> if (statusMessageWarned === false) {
<add> process.emitWarning(
<add> 'Status message is not supported by HTTP/2 (RFC7540 8.1.2.4)',
<add> 'UnsupportedWarning'
<add> );
<add> statusMessageWarned = true;
<add> }
<add>}
<add>
<ide> function onStreamData(chunk) {
<ide> const request = this[kRequest];
<ide> if (!request.push(chunk))
<ide> this.pause();
<ide> }
<ide>
<add>function onStreamTrailers(trailers, flags, rawTrailers) {
<add> const request = this[kRequest];
<add> Object.assign(request[kTrailers], trailers);
<add> request[kRawTrailers].push(...rawTrailers);
<add>}
<add>
<ide> function onStreamEnd() {
<ide> // Cause the request stream to end as well.
<ide> const request = this[kRequest];
<ide> function onAborted(hadError, code) {
<ide> }
<ide>
<ide> class Http2ServerRequest extends Readable {
<del> constructor(stream, headers, options) {
<add> constructor(stream, headers, options, rawHeaders) {
<ide> super(options);
<ide> this[kState] = {
<ide> statusCode: null,
<ide> closed: false,
<ide> closedCode: constants.NGHTTP2_NO_ERROR
<ide> };
<ide> this[kHeaders] = headers;
<add> this[kRawHeaders] = rawHeaders;
<add> this[kTrailers] = {};
<add> this[kRawTrailers] = [];
<ide> this[kStream] = stream;
<ide> stream[kRequest] = this;
<ide>
<ide> // Pause the stream..
<ide> stream.pause();
<ide> stream.on('data', onStreamData);
<add> stream.on('trailers', onStreamTrailers);
<ide> stream.on('end', onStreamEnd);
<ide> stream.on('error', onStreamError);
<ide> stream.on('close', onStreamClosedRequest);
<ide> class Http2ServerRequest extends Readable {
<ide> }
<ide>
<ide> get rawHeaders() {
<del> const headers = this[kHeaders];
<del> if (headers === undefined)
<del> return [];
<del> const tuples = Object.entries(headers);
<del> const flattened = Array.prototype.concat.apply([], tuples);
<del> return flattened.map(String);
<add> return this[kRawHeaders];
<ide> }
<ide>
<ide> get trailers() {
<ide> return this[kTrailers];
<ide> }
<ide>
<add> get rawTrailers() {
<add> return this[kRawTrailers];
<add> }
<add>
<ide> get httpVersionMajor() {
<ide> return 2;
<ide> }
<ide> class Http2ServerResponse extends Stream {
<ide> }
<ide>
<ide> get statusMessage() {
<del> if (statusMessageWarned === false) {
<del> process.emitWarning(
<del> 'Status message is not supported by HTTP/2 (RFC7540 8.1.2.4)',
<del> 'UnsupportedWarning'
<del> );
<del> statusMessageWarned = true;
<del> }
<add> statusMessageWarn();
<ide>
<ide> return '';
<ide> }
<ide>
<add> set statusMessage(msg) {
<add> statusMessageWarn();
<add> }
<add>
<ide> flushHeaders() {
<ide> if (this[kStream].headersSent === false)
<ide> this[kBeginSend]();
<ide> }
<ide>
<ide> writeHead(statusCode, statusMessage, headers) {
<del> if (typeof statusMessage === 'string' && statusMessageWarned === false) {
<del> process.emitWarning(
<del> 'Status message is not supported by HTTP/2 (RFC7540 8.1.2.4)',
<del> 'UnsupportedWarning'
<del> );
<del> statusMessageWarned = true;
<add> if (typeof statusMessage === 'string') {
<add> statusMessageWarn();
<ide> }
<add>
<ide> if (headers === undefined && typeof statusMessage === 'object') {
<ide> headers = statusMessage;
<ide> }
<ide> class Http2ServerResponse extends Stream {
<ide> }
<ide> }
<ide>
<del>function onServerStream(stream, headers, flags) {
<add>function onServerStream(stream, headers, flags, rawHeaders) {
<ide> const server = this;
<del> const request = new Http2ServerRequest(stream, headers);
<add> const request = new Http2ServerRequest(stream, headers, undefined,
<add> rawHeaders);
<ide> const response = new Http2ServerResponse(stream);
<ide>
<ide> // Check for the CONNECT method
<ide><path>lib/internal/http2/core.js
<ide> function onSessionHeaders(id, cat, flags, headers) {
<ide> 'report this as a bug in Node.js');
<ide> }
<ide> streams.set(id, stream);
<del> process.nextTick(emit.bind(owner, 'stream', stream, obj, flags));
<add> process.nextTick(emit.bind(owner, 'stream', stream, obj, flags, headers));
<ide> } else {
<ide> let event;
<ide> let status;
<ide> function onSessionHeaders(id, cat, flags, headers) {
<ide> 'report this as a bug in Node.js');
<ide> }
<ide> debug(`[${sessionName(owner[kType])}] emitting stream '${event}' event`);
<del> process.nextTick(emit.bind(stream, event, obj, flags));
<add> process.nextTick(emit.bind(stream, event, obj, flags, headers));
<add> }
<add> if (endOfStream) {
<add> stream.push(null);
<ide> }
<ide> }
<ide>
<ide> function socketOnTimeout() {
<ide>
<ide> // Handles the on('stream') event for a session and forwards
<ide> // it on to the server object.
<del>function sessionOnStream(stream, headers, flags) {
<add>function sessionOnStream(stream, headers, flags, rawHeaders) {
<ide> debug(`[${sessionName(this[kType])}] emit server stream event`);
<del> this[kServer].emit('stream', stream, headers, flags);
<add> this[kServer].emit('stream', stream, headers, flags, rawHeaders);
<ide> }
<ide>
<ide> function sessionOnPriority(stream, parent, weight, exclusive) {
<ide><path>lib/internal/http2/util.js
<ide> const {
<ide> HTTP2_HEADER_RANGE,
<ide> HTTP2_HEADER_REFERER,
<ide> HTTP2_HEADER_RETRY_AFTER,
<add> HTTP2_HEADER_SET_COOKIE,
<ide> HTTP2_HEADER_USER_AGENT,
<ide>
<ide> HTTP2_HEADER_CONNECTION,
<ide> function toHeaderObject(headers) {
<ide> if (existing === undefined) {
<ide> obj[name] = value;
<ide> } else if (!kSingleValueHeaders.has(name)) {
<del> if (name === HTTP2_HEADER_COOKIE) {
<del> // https://tools.ietf.org/html/rfc7540#section-8.1.2.5
<del> // "...If there are multiple Cookie header fields after decompression,
<del> // these MUST be concatenated into a single octet string using the
<del> // two-octet delimiter of 0x3B, 0x20 (the ASCII string "; ") before
<del> // being passed into a non-HTTP/2 context."
<del> obj[name] = `${existing}; ${value}`;
<del> } else {
<del> if (Array.isArray(existing))
<del> existing.push(value);
<del> else
<del> obj[name] = [existing, value];
<add> switch (name) {
<add> case HTTP2_HEADER_COOKIE:
<add> // https://tools.ietf.org/html/rfc7540#section-8.1.2.5
<add> // "...If there are multiple Cookie header fields after decompression,
<add> // these MUST be concatenated into a single octet string using the
<add> // two-octet delimiter of 0x3B, 0x20 (the ASCII string "; ") before
<add> // being passed into a non-HTTP/2 context."
<add> obj[name] = `${existing}; ${value}`;
<add> break;
<add> case HTTP2_HEADER_SET_COOKIE:
<add> // https://tools.ietf.org/html/rfc7230#section-3.2.2
<add> // "Note: In practice, the "Set-Cookie" header field ([RFC6265]) often
<add> // appears multiple times in a response message and does not use the
<add> // list syntax, violating the above requirements on multiple header
<add> // fields with the same name. Since it cannot be combined into a
<add> // single field-value, recipients ought to handle "Set-Cookie" as a
<add> // special case while processing header fields."
<add> if (Array.isArray(existing))
<add> existing.push(value);
<add> else
<add> obj[name] = [existing, value];
<add> break;
<add> default:
<add> // https://tools.ietf.org/html/rfc7230#section-3.2.2
<add> // "A recipient MAY combine multiple header fields with the same field
<add> // name into one "field-name: field-value" pair, without changing the
<add> // semantics of the message, by appending each subsequent field value
<add> // to the combined field value in order, separated by a comma."
<add> obj[name] = `${existing}, ${value}`;
<add> break;
<ide> }
<ide> }
<ide> }
<ide><path>test/parallel/test-http2-compat-serverrequest-end.js
<add>// Flags: --expose-http2
<add>'use strict';
<add>
<add>const common = require('../common');
<add>if (!common.hasCrypto)
<add> common.skip('missing crypto');
<add>const h2 = require('http2');
<add>
<add>// Http2ServerRequest should always end readable stream
<add>// even on GET requests with no body
<add>
<add>const server = h2.createServer();
<add>server.listen(0, common.mustCall(function() {
<add> const port = server.address().port;
<add> server.once('request', common.mustCall(function(request, response) {
<add> request.on('data', () => {});
<add> request.on('end', common.mustCall(() => {
<add> response.on('finish', common.mustCall(function() {
<add> server.close();
<add> }));
<add> response.end();
<add> }));
<add> }));
<add>
<add> const url = `http://localhost:${port}`;
<add> const client = h2.connect(url, common.mustCall(function() {
<add> const headers = {
<add> ':path': '/foobar',
<add> ':method': 'GET',
<add> ':scheme': 'http',
<add> ':authority': `localhost:${port}`
<add> };
<add> const request = client.request(headers);
<add> request.resume();
<add> request.on('end', common.mustCall(function() {
<add> client.destroy();
<add> }));
<add> request.end();
<add> }));
<add>}));
<ide><path>test/parallel/test-http2-compat-serverrequest-trailers.js
<add>// Flags: --expose-http2
<add>'use strict';
<add>
<add>const common = require('../common');
<add>if (!common.hasCrypto)
<add> common.skip('missing crypto');
<add>const assert = require('assert');
<add>const h2 = require('http2');
<add>
<add>// Http2ServerRequest should have getter for trailers & rawTrailers
<add>
<add>const expectedTrailers = {
<add> 'x-foo': 'xOxOxOx, OxOxOxO, xOxOxOx, OxOxOxO',
<add> 'x-foo-test': 'test, test'
<add>};
<add>
<add>const server = h2.createServer();
<add>server.listen(0, common.mustCall(function() {
<add> const port = server.address().port;
<add> server.once('request', common.mustCall(function(request, response) {
<add> let data = '';
<add> request.setEncoding('utf8');
<add> request.on('data', common.mustCall((chunk) => data += chunk));
<add> request.on('end', common.mustCall(() => {
<add> const trailers = request.trailers;
<add> for (const [name, value] of Object.entries(expectedTrailers)) {
<add> assert.strictEqual(trailers[name], value);
<add> }
<add> assert.deepStrictEqual([
<add> 'x-foo',
<add> 'xOxOxOx',
<add> 'x-foo',
<add> 'OxOxOxO',
<add> 'x-foo',
<add> 'xOxOxOx',
<add> 'x-foo',
<add> 'OxOxOxO',
<add> 'x-foo-test',
<add> 'test, test'
<add> ], request.rawTrailers);
<add> assert.strictEqual(data, 'test\ntest');
<add> response.end();
<add> }));
<add> }));
<add>
<add> const url = `http://localhost:${port}`;
<add> const client = h2.connect(url, common.mustCall(function() {
<add> const headers = {
<add> ':path': '/foobar',
<add> ':method': 'POST',
<add> ':scheme': 'http',
<add> ':authority': `localhost:${port}`
<add> };
<add> const request = client.request(headers, {
<add> getTrailers(trailers) {
<add> trailers['x-fOo'] = 'xOxOxOx';
<add> trailers['x-foO'] = 'OxOxOxO';
<add> trailers['X-fOo'] = 'xOxOxOx';
<add> trailers['X-foO'] = 'OxOxOxO';
<add> trailers['x-foo-test'] = 'test, test';
<add> }
<add> });
<add> request.resume();
<add> request.on('end', common.mustCall(function() {
<add> server.close();
<add> client.destroy();
<add> }));
<add> request.write('test\n');
<add> request.end('test');
<add> }));
<add>}));
<ide><path>test/parallel/test-http2-compat-serverresponse-statusmessage-property-set.js
<add>// Flags: --expose-http2
<add>'use strict';
<add>
<add>const common = require('../common');
<add>if (!common.hasCrypto)
<add> common.skip('missing crypto');
<add>const assert = require('assert');
<add>const h2 = require('http2');
<add>
<add>// Http2ServerResponse.statusMessage should warn
<add>
<add>const unsupportedWarned = common.mustCall(1);
<add>process.on('warning', ({ name, message }) => {
<add> const expectedMessage =
<add> 'Status message is not supported by HTTP/2 (RFC7540 8.1.2.4)';
<add> if (name === 'UnsupportedWarning' && message === expectedMessage)
<add> unsupportedWarned();
<add>});
<add>
<add>const server = h2.createServer();
<add>server.listen(0, common.mustCall(function() {
<add> const port = server.address().port;
<add> server.once('request', common.mustCall(function(request, response) {
<add> response.on('finish', common.mustCall(function() {
<add> response.statusMessage = 'test';
<add> response.statusMessage = 'test'; // only warn once
<add> assert.strictEqual(response.statusMessage, ''); // no change
<add> server.close();
<add> }));
<add> response.end();
<add> }));
<add>
<add> const url = `http://localhost:${port}`;
<add> const client = h2.connect(url, common.mustCall(function() {
<add> const headers = {
<add> ':path': '/',
<add> ':method': 'GET',
<add> ':scheme': 'http',
<add> ':authority': `localhost:${port}`
<add> };
<add> const request = client.request(headers);
<add> request.on('response', common.mustCall(function(headers) {
<add> assert.strictEqual(headers[':status'], 200);
<add> }, 1));
<add> request.on('end', common.mustCall(function() {
<add> client.destroy();
<add> }));
<add> request.end();
<add> request.resume();
<add> }));
<add>}));
<ide><path>test/parallel/test-http2-compat-serverresponse-statusmessage-property.js
<ide> server.listen(0, common.mustCall(function() {
<ide> server.once('request', common.mustCall(function(request, response) {
<ide> response.on('finish', common.mustCall(function() {
<ide> assert.strictEqual(response.statusMessage, '');
<add> assert.strictEqual(response.statusMessage, ''); // only warn once
<ide> server.close();
<ide> }));
<ide> response.end();
<ide><path>test/parallel/test-http2-cookies.js
<ide> server.on('stream', common.mustCall(onStream));
<ide>
<ide> function onStream(stream, headers, flags) {
<ide>
<del> assert(Array.isArray(headers.abc));
<del> assert.strictEqual(headers.abc.length, 3);
<del> assert.strictEqual(headers.abc[0], '1');
<del> assert.strictEqual(headers.abc[1], '2');
<del> assert.strictEqual(headers.abc[2], '3');
<add> assert.strictEqual(typeof headers.abc, 'string');
<add> assert.strictEqual(headers.abc, '1, 2, 3');
<ide> assert.strictEqual(typeof headers.cookie, 'string');
<ide> assert.strictEqual(headers.cookie, 'a=b; c=d; e=f');
<ide>
<ide><path>test/parallel/test-http2-multiheaders-raw.js
<add>// Flags: --expose-http2
<add>'use strict';
<add>
<add>const common = require('../common');
<add>if (!common.hasCrypto)
<add> common.skip('missing crypto');
<add>const assert = require('assert');
<add>const http2 = require('http2');
<add>
<add>const server = http2.createServer();
<add>
<add>const src = Object.create(null);
<add>src['www-authenticate'] = 'foo';
<add>src['WWW-Authenticate'] = 'bar';
<add>src['WWW-AUTHENTICATE'] = 'baz';
<add>src['test'] = 'foo, bar, baz';
<add>
<add>server.on('stream', common.mustCall((stream, headers, flags, rawHeaders) => {
<add> const expected = [
<add> ':path',
<add> '/',
<add> ':scheme',
<add> 'http',
<add> ':authority',
<add> `localhost:${server.address().port}`,
<add> ':method',
<add> 'GET',
<add> 'www-authenticate',
<add> 'foo',
<add> 'www-authenticate',
<add> 'bar',
<add> 'www-authenticate',
<add> 'baz',
<add> 'test',
<add> 'foo, bar, baz'
<add> ];
<add>
<add> assert.deepStrictEqual(expected, rawHeaders);
<add> stream.respond(src);
<add> stream.end();
<add>}));
<add>
<add>server.listen(0, common.mustCall(() => {
<add> const client = http2.connect(`http://localhost:${server.address().port}`);
<add> const req = client.request(src);
<add> req.on('streamClosed', common.mustCall(() => {
<add> server.close();
<add> client.destroy();
<add> }));
<add>}));
<ide><path>test/parallel/test-http2-multiheaders.js
<ide> src['__Proto__'] = 'baz';
<ide>
<ide> function checkHeaders(headers) {
<ide> assert.deepStrictEqual(headers['accept'],
<del> [ 'abc', 'def', 'ghijklmnop' ]);
<add> 'abc, def, ghijklmnop');
<ide> assert.deepStrictEqual(headers['www-authenticate'],
<del> [ 'foo', 'bar', 'baz' ]);
<add> 'foo, bar, baz');
<ide> assert.deepStrictEqual(headers['proxy-authenticate'],
<del> [ 'foo', 'bar', 'baz' ]);
<del> assert.deepStrictEqual(headers['x-foo'], [ 'foo', 'bar', 'baz' ]);
<del> assert.deepStrictEqual(headers['constructor'], [ 'foo', 'bar', 'baz' ]);
<add> 'foo, bar, baz');
<add> assert.deepStrictEqual(headers['x-foo'], 'foo, bar, baz');
<add> assert.deepStrictEqual(headers['constructor'], 'foo, bar, baz');
<ide> // eslint-disable-next-line no-proto
<del> assert.deepStrictEqual(headers['__proto__'], [ 'foo', 'bar', 'baz' ]);
<add> assert.deepStrictEqual(headers['__proto__'], 'foo, bar, baz');
<ide> }
<ide>
<ide> server.on('stream', common.mustCall((stream, headers) => { | 10 |
Text | Text | clarify model used in djangomodelpermissions | dca4d7c027be123f4634ed458cf2af4ef055c432 | <ide><path>docs/api-guide/permissions.md
<ide> This permission is suitable if you want to your API to allow read permissions to
<ide>
<ide> ## DjangoModelPermissions
<ide>
<del>This permission class ties into Django's standard `django.contrib.auth` [model permissions][contribauth]. This permission must only be applied to views that have a `.queryset` property or `get_queryset()` method. Authorization will only be granted if the user *is authenticated* and has the *relevant model permissions* assigned.
<add>This permission class ties into Django's standard `django.contrib.auth` [model permissions][contribauth]. This permission must only be applied to views that have a `.queryset` property or `get_queryset()` method. Authorization will only be granted if the user *is authenticated* and has the *relevant model permissions* assigned. The appropriate model is determined by checking `get_queryset().model` or `queryset.model`.
<ide>
<ide> * `POST` requests require the user to have the `add` permission on the model.
<ide> * `PUT` and `PATCH` requests require the user to have the `change` permission on the model. | 1 |
Text | Text | fix broken links in contributing.md | 850f7841f7e132460be44b9a9c5e5581368c296b | <ide><path>CONTRIBUTING.md
<ide>
<ide> Want to hack on Docker? Awesome! We have a contributor's guide that explains
<ide> [setting up a Docker development environment and the contribution
<del>process](https://docs.docker.com/project/who-written-for/).
<add>process](https://docs.docker.com/opensource/project/who-written-for/).
<ide>
<ide> 
<ide>
<ide> anybody starts working on it.
<ide> We are always thrilled to receive pull requests. We do our best to process them
<ide> quickly. If your pull request is not accepted on the first try,
<ide> don't get discouraged! Our contributor's guide explains [the review process we
<del>use for simple changes](https://docs.docker.com/project/make-a-contribution/).
<add>use for simple changes](https://docs.docker.com/opensource/workflow/make-a-contribution/).
<ide>
<ide> ### Design and cleanup proposals
<ide>
<ide> You can propose new designs for existing Docker features. You can also design
<ide> entirely new features. We really appreciate contributors who want to refactor or
<ide> otherwise cleanup our project. For information on making these types of
<ide> contributions, see [the advanced contribution
<del>section](https://docs.docker.com/project/advanced-contributing/) in the
<del>contributors guide.
<add>section](https://docs.docker.com/opensource/workflow/advanced-contributing/) in
<add>the contributors guide.
<ide>
<ide> We try hard to keep Docker lean and focused. Docker can't do everything for
<ide> everybody. This means that we might decide against incorporating a new feature.
<ide> However, there might be a way to implement that feature *on top of* Docker.
<ide> IRC is a rich chat protocol but it can overwhelm new users. You can search
<ide> <a href="https://botbot.me/freenode/docker/#" target="_blank">our chat archives</a>.
<ide> </p>
<del> Read our <a href="https://docs.docker.com/project/get-help/#irc-quickstart" target="_blank">IRC quickstart guide</a> for an easy way to get started.
<add> Read our <a href="https://docs.docker.com/opensource/get-help/#irc-quickstart" target="_blank">IRC quickstart guide</a> for an easy way to get started.
<ide> </td>
<ide> </tr>
<ide> <tr>
<ide> Fork the repository and make changes on your fork in a feature branch:
<ide>
<ide> Submit unit tests for your changes. Go has a great test framework built in; use
<ide> it! Take a look at existing tests for inspiration. [Run the full test
<del>suite](https://docs.docker.com/project/test-and-docs/) on your branch before
<add>suite](https://docs.docker.com/opensource/project/test-and-docs/) on your branch before
<ide> submitting a pull request.
<ide>
<ide> Update the documentation when creating or modifying features. Test your
<ide> documentation changes for clarity, concision, and correctness, as well as a
<ide> clean documentation build. See our contributors guide for [our style
<del>guide](https://docs.docker.com/project/doc-style) and instructions on [building
<del>the documentation](https://docs.docker.com/project/test-and-docs/#build-and-test-the-documentation).
<add>guide](https://docs.docker.com/opensource/doc-style) and instructions on [building
<add>the documentation](https://docs.docker.com/opensource/project/test-and-docs/#build-and-test-the-documentation).
<ide>
<ide> Write clean code. Universally formatted code promotes ease of writing, reading,
<ide> and maintenance. Always run `gofmt -s -w file.go` on each changed file before
<ide> high majority of submissions should have a single commit, so if in doubt: squash
<ide> down to one.
<ide>
<ide> After every commit, [make sure the test suite passes]
<del>(https://docs.docker.com/project/test-and-docs/). Include documentation
<add>(https://docs.docker.com/opensource/project/test-and-docs/). Include documentation
<ide> changes in the same pull request so that a revert would remove all traces of
<ide> the feature or fix.
<ide> | 1 |
Ruby | Ruby | fix regression introduced in pull request 8812 | c692774bbae528d83925d04e1c3cebb73e3f8ae5 | <ide><path>actionpack/lib/action_dispatch/middleware/static.rb
<ide> class FileHandler
<ide> def initialize(root, cache_control)
<ide> @root = root.chomp('/')
<ide> @compiled_root = /^#{Regexp.escape(root)}/
<del> @file_server = ::Rack::File.new(@root, 'Cache-Control' => cache_control)
<add> headers = cache_control && { 'Cache-Control' => cache_control }
<add> @file_server = ::Rack::File.new(@root, headers)
<ide> end
<ide>
<ide> def match?(path) | 1 |
PHP | PHP | fix macros method | 41e066e2eec5e496b48abfa94c7d50e681c9d0a7 | <ide><path>laravel/form.php
<ide> protected static function id($name, $attributes)
<ide> */
<ide> public static function __callStatic($method, $parameters)
<ide> {
<del> if (isset(static::$inputs[$method]))
<add> if (isset(static::$macros[$method]))
<ide> {
<del> return call_user_func_array(static::$inputs[$method], $parameters);
<add> return call_user_func_array(static::$macros[$method], $parameters);
<ide> }
<ide>
<ide> throw new \Exception("Method [$method] does not exist."); | 1 |
Go | Go | remove some intermediate vars, use struct-literals | 0eb7b49a17088f7495a3353510b4f744a864af47 | <ide><path>daemon/inspect.go
<ide> func (daemon *Daemon) containerInspect120(name string) (*v1p20.ContainerJSON, er
<ide> return nil, err
<ide> }
<ide>
<del> mountPoints := ctr.GetMountPoints()
<del> config := &v1p20.ContainerConfig{
<del> Config: ctr.Config,
<del> MacAddress: ctr.Config.MacAddress,
<del> NetworkDisabled: ctr.Config.NetworkDisabled,
<del> ExposedPorts: ctr.Config.ExposedPorts,
<del> VolumeDriver: ctr.HostConfig.VolumeDriver,
<del> }
<del> networkSettings := daemon.getBackwardsCompatibleNetworkSettings(ctr.NetworkSettings)
<del>
<ide> return &v1p20.ContainerJSON{
<ide> ContainerJSONBase: base,
<del> Mounts: mountPoints,
<del> Config: config,
<del> NetworkSettings: networkSettings,
<add> Mounts: ctr.GetMountPoints(),
<add> Config: &v1p20.ContainerConfig{
<add> Config: ctr.Config,
<add> MacAddress: ctr.Config.MacAddress,
<add> NetworkDisabled: ctr.Config.NetworkDisabled,
<add> ExposedPorts: ctr.Config.ExposedPorts,
<add> VolumeDriver: ctr.HostConfig.VolumeDriver,
<add> },
<add> NetworkSettings: daemon.getBackwardsCompatibleNetworkSettings(ctr.NetworkSettings),
<ide> }, nil
<ide> }
<ide>
<ide><path>daemon/inspect_linux.go
<ide> func (daemon *Daemon) containerInspectPre120(name string) (*v1p19.ContainerJSON,
<ide> volumesRW[m.Destination] = m.RW
<ide> }
<ide>
<del> config := &v1p19.ContainerConfig{
<del> Config: ctr.Config,
<del> MacAddress: ctr.Config.MacAddress,
<del> NetworkDisabled: ctr.Config.NetworkDisabled,
<del> ExposedPorts: ctr.Config.ExposedPorts,
<del> VolumeDriver: ctr.HostConfig.VolumeDriver,
<del> Memory: ctr.HostConfig.Memory,
<del> MemorySwap: ctr.HostConfig.MemorySwap,
<del> CPUShares: ctr.HostConfig.CPUShares,
<del> CPUSet: ctr.HostConfig.CpusetCpus,
<del> }
<del> networkSettings := daemon.getBackwardsCompatibleNetworkSettings(ctr.NetworkSettings)
<del>
<ide> return &v1p19.ContainerJSON{
<ide> ContainerJSONBase: base,
<ide> Volumes: volumes,
<ide> VolumesRW: volumesRW,
<del> Config: config,
<del> NetworkSettings: networkSettings,
<add> Config: &v1p19.ContainerConfig{
<add> Config: ctr.Config,
<add> MacAddress: ctr.Config.MacAddress,
<add> NetworkDisabled: ctr.Config.NetworkDisabled,
<add> ExposedPorts: ctr.Config.ExposedPorts,
<add> VolumeDriver: ctr.HostConfig.VolumeDriver,
<add> Memory: ctr.HostConfig.Memory,
<add> MemorySwap: ctr.HostConfig.MemorySwap,
<add> CPUShares: ctr.HostConfig.CPUShares,
<add> CPUSet: ctr.HostConfig.CpusetCpus,
<add> },
<add> NetworkSettings: daemon.getBackwardsCompatibleNetworkSettings(ctr.NetworkSettings),
<ide> }, nil
<ide> }
<ide> | 2 |
Ruby | Ruby | enforce https on *.bintray.com urls | f6946cd9f644cb9686bf5541e11e463c0e8195af | <ide><path>Library/Homebrew/cmd/audit.rb
<ide> def audit_urls
<ide> problem "Fossies urls should be https://, not http (url is #{p})."
<ide> when %r[^http://mirrors\.kernel\.org/]
<ide> problem "mirrors.kernel urls should be https://, not http (url is #{p})."
<add> when %r[^http://[^/]*\.bintray\.com/]
<add> problem "Bintray urls should be https://, not http (url is #{p})."
<ide> when %r[^http://tools\.ietf\.org/]
<ide> problem "ietf urls should be https://, not http (url is #{p})."
<ide> end | 1 |
Python | Python | update error handling and docstring | bbb94b37c61d090e307da16519390839513dfc52 | <ide><path>spacy/errors.py
<ide> class Errors:
<ide> "issue tracker: http://github.com/explosion/spaCy/issues")
<ide>
<ide> # TODO: fix numbering after merging develop into master
<del> E890 = ("Can not add the alias '{alias}' to the Knowledge base. "
<add> E886 = ("Can't replace {name} -> {tok2vec} listeners: path '{path}' not "
<add> "found in config for component '{name}'.")
<add> E887 = ("Can't replace {name} -> {tok2vec} listeners: the paths to replace "
<add> "({paths}) don't match the available listeners in the model ({n_listeners}).")
<add> E888 = ("Can't replace listeners for '{name}' ({pipe}): invalid upstream "
<add> "component that doesn't seem to support listeners. Expected Tok2Vec "
<add> "or Transformer component. If you didn't call nlp.replace_listeners "
<add> "manually, this is likely a bug in spaCy.")
<add> E889 = ("Can't replace listeners of component '{name}' because it's not "
<add> "in the pipeline. Available components: {opts}. If you didn't call "
<add> "nlp.replace_listeners manually, this is likely a bug in spaCy.")
<add> E890 = ("Cannot add the alias '{alias}' to the Knowledge base. "
<ide> "Each alias should be a meaningful string.")
<ide> E891 = ("Alias '{alias}' could not be added to the Knowledge base. "
<ide> "This is likely a bug in spaCy.")
<ide><path>spacy/language.py
<ide> def replace_listeners(
<ide> tok2vec_name: str,
<ide> pipe_name: str,
<ide> listeners: Iterable[str] = SimpleFrozenList(),
<del> ):
<add> ) -> None:
<add> """Find listener layers (connecting to a token-to-vector embedding
<add> component) of a given pipeline component model and replace
<add> them with a standalone copy of the token-to-vector layer. This can be
<add> useful when training a pipeline with components sourced from an existing
<add> pipeline: if multiple components (e.g. tagger, parser, NER) listen to
<add> the same tok2vec component, but some of them are frozen and not updated,
<add> their performance may degrade significally as the tok2vec component is
<add> updated with new data. To prevent this, listeners can be replaced with
<add> a standalone tok2vec layer that is owned by the component and doesn't
<add> change if the component isn't updated.
<add>
<add> tok2vec_name (str): Name of the token-to-vector component, typically
<add> "tok2vec" or "transformer".
<add> pipe_name (str): Name of pipeline component to replace listeners for.
<add> listeners (Iterable[str]): The paths to the listeners, relative to the
<add> component config, e.g. ["model.tok2vec"]. Typically, implementations
<add> will only connect to one tok2vec component, [model.tok2vec], but in
<add> theory, custom models can use multiple listeners. The value here can
<add> either be an empty list to not replace any listeners, or a complete
<add> (!) list of the paths to all listener layers used by the model.
<add>
<add> DOCS: https://nightly.spacy.io/api/language#replace_listeners
<add> """
<ide> if tok2vec_name not in self.pipe_names:
<del> raise ValueError # TODO:
<add> err = Errors.E889.format(name=tok2vec_name, opts=", ".join(self.pipe_names))
<add> raise ValueError(err)
<ide> if pipe_name not in self.pipe_names:
<del> raise ValueError # TODO:
<add> err = Errors.E889.format(name=pipe_name, opts=", ".join(self.pipe_names))
<add> raise ValueError(err)
<ide> tok2vec = self.get_pipe(tok2vec_name)
<ide> tok2vec_cfg = self.get_pipe_config(tok2vec_name)
<ide> if (
<ide> not hasattr(tok2vec, "model")
<ide> or not hasattr(tok2vec, "listener_map")
<ide> or "model" not in tok2vec_cfg
<ide> ):
<del> raise ValueError # TODO: likely bug in spaCy if this happens
<add> raise ValueError(Errors.E888.format(name=tok2vec_name, pipe=type(tok2vec)))
<ide> pipe_listeners = tok2vec.listener_map.get(pipe_name, [])
<ide> pipe_cfg = self._pipe_configs[pipe_name]
<ide> if listeners:
<ide> def replace_listeners(
<ide> # The number of listeners defined in the component model doesn't
<ide> # match the listeners to replace, so we won't be able to update
<ide> # the nodes and generate a matching config
<del> raise ValueError(f"{listeners}, {pipe_listeners}") # TODO:
<add> err = Errors.E887.format(
<add> name=pipe_name,
<add> tok2vec=tok2vec_name,
<add> paths=listeners,
<add> n_listeners=len(pipe_listeners),
<add> )
<add> raise ValueError(err)
<ide> pipe = self.get_pipe(pipe_name)
<ide> # Go over the listener layers and replace them
<ide> for listener in pipe_listeners:
<ide> def replace_listeners(
<ide> try:
<ide> util.dot_to_object(pipe_cfg, listener_path)
<ide> except KeyError:
<del> raise ValueError # TODO:
<add> err = Errors.E886.format(
<add> name=pipe_name, tok2vec=tok2vec_name, path=listener_path
<add> )
<add> raise ValueError(err)
<ide> util.set_dot_to_object(pipe_cfg, listener_path, tok2vec_cfg["model"])
<ide>
<ide> def create_pipe_from_source( | 2 |
Python | Python | remove unused conftest | 3f3a46722c5ef89fe1964012b0abe04be8880e3b | <ide><path>spacy/tests/spans/conftest.py
<del>import pytest
<del>from spacy.en import English
<del>import os
<del>
<del>
<del>@pytest.fixture(scope="session")
<del>def en_nlp():
<del> return English() | 1 |
Ruby | Ruby | fix my typo | 694c9ed6c383f9b0d7e510f9e9b0a5491636e032 | <ide><path>activerecord/lib/active_record/nested_attributes.rb
<ide> class TooManyRecords < ActiveRecordError
<ide> # { title: 'Bar' } ])
<ide> #
<ide> # The keys of the hash which is the value for +:posts_attributes+ are
<del> # ignores in this case.
<add> # ignored in this case.
<ide> # However, it is not allowed to use +'id'+ or +:id+ for one of
<ide> # such keys, otherwise the hash will be wrapped in an array and
<ide> # interpreted as an attribute hash for a single post. | 1 |
Javascript | Javascript | use tabs instead of spaces | f7528916a29f5d8cd3d19285a67cd84ea9dcfe91 | <ide><path>test/unit/manipulation.js
<ide> test( "insertAfter, insertBefore, etc do not work when destination is original e
<ide> });
<ide>
<ide> test( "Index for function argument should be received (#13094)", 2, function() {
<del> var i = 0;
<add> var i = 0;
<ide>
<ide> jQuery("<div/><div/>").before(function( index ) {
<ide> equal( index, i++, "Index should be correct" ); | 1 |
PHP | PHP | add name alias to routeregistrar | f353d7586430ef0f06c8d4aeac100da63f510b76 | <ide><path>src/Illuminate/Routing/RouteRegistrar.php
<ide> class RouteRegistrar
<ide> 'get', 'post', 'put', 'patch', 'delete', 'options', 'any',
<ide> ];
<ide>
<add> /**
<add> * The attributes that are aliased.
<add> *
<add> * @var array
<add> */
<add> protected $aliases = [
<add> 'name' => 'as',
<add> ];
<add>
<ide> /**
<ide> * Create a new route registrar instance.
<ide> *
<ide> public function __construct(Router $router)
<ide> */
<ide> public function attribute($key, $value)
<ide> {
<del> $this->attributes[$key] = $value;
<add> $this->attributes[array_get($this->aliases, $key, $key)] = $value;
<ide>
<ide> return $this;
<ide> }
<ide><path>tests/Routing/RouteRegistrarTest.php
<ide> public function testCanRegisterGroupWithMiddleware()
<ide> $this->seeMiddleware('group-middleware');
<ide> }
<ide>
<del>
<ide> public function testCanRegisterGroupWithNamespace()
<ide> {
<ide> $this->router->namespace('App\Http\Controllers')->group(function ($router) { | 2 |
Python | Python | finalize optimizer fixes | 6fc8660a5148c1693e2c02874ffdb6a7d8f1a2c8 | <ide><path>keras/optimizers.py
<ide> def clip_norm(g, c, n):
<ide> g = T.switch(T.ge(n, c), g*c/n, g)
<ide> return g
<ide>
<add>def kl_divergence(p, p_hat):
<add> return p_hat - p + p*T.log(p/p_hat)
<add>
<ide> class Optimizer(object):
<ide> def get_updates(self, params, grads):
<ide> raise NotImplementedError
<ide>
<ide> def get_gradients(self, cost, params, regularizers):
<del>
<ide> grads = T.grad(cost, params)
<ide>
<ide> if hasattr(self, 'clipnorm') and self.clipnorm > 0:
<ide> def get_gradients(self, cost, params, regularizers):
<ide> class SGD(Optimizer):
<ide>
<ide> def __init__(self, lr=0.01, momentum=0., decay=0., nesterov=False, *args, **kwargs):
<add> self.__dict__.update(kwargs)
<ide> self.__dict__.update(locals())
<ide> self.iterations = shared_scalar(0)
<ide>
<ide> def get_updates(self, params, regularizers, constraints, cost):
<ide> grads = self.get_gradients(cost, params, regularizers)
<del>
<ide> lr = self.lr * (1.0 / (1.0 + self.decay * self.iterations))
<ide> updates = [(self.iterations, self.iterations+1.)]
<ide>
<del> for p, g in zip(params, grads):
<add> for p, g, c in zip(params, grads, constraints):
<ide> m = shared_zeros(p.get_value().shape) # momentum
<ide> v = self.momentum * m - lr * g # velocity
<ide> updates.append((m, v))
<ide> def get_updates(self, params, regularizers, constraints, cost):
<ide> class RMSprop(Optimizer):
<ide>
<ide> def __init__(self, lr=0.001, rho=0.9, epsilon=1e-6, *args, **kwargs):
<add> self.__dict__.update(kwargs)
<ide> self.__dict__.update(locals())
<ide>
<ide> def get_updates(self, params, regularizers, constraints, cost):
<ide> grads = self.get_gradients(cost, params, regularizers)
<del>
<ide> accumulators = [shared_zeros(p.get_value().shape) for p in params]
<ide> updates = []
<ide>
<del> for p, g, a in zip(params, grads, accumulators):
<add> for p, g, a, c in zip(params, grads, accumulators, constraints):
<ide> new_a = self.rho * a + (1 - self.rho) * g ** 2 # update accumulator
<ide> updates.append((a, new_a))
<ide>
<ide> new_p = p - self.lr * g / T.sqrt(new_a + self.epsilon)
<del>
<ide> updates.append((p, c(new_p))) # apply constraints
<ide>
<ide> return updates
<ide> def get_updates(self, params, regularizers, constraints, cost):
<ide> class Adagrad(Optimizer):
<ide>
<ide> def __init__(self, lr=0.01, epsilon=1e-6, *args, **kwargs):
<add> self.__dict__.update(kwargs)
<ide> self.__dict__.update(locals())
<ide>
<ide> def get_updates(self, params, regularizers, constraints, cost):
<ide> grads = self.get_gradients(cost, params, regularizers)
<del>
<ide> accumulators = [shared_zeros(p.get_value().shape) for p in params]
<ide> updates = []
<ide>
<del> for p, g, a in zip(params, grads, accumulators):
<add> for p, g, a, c in zip(params, grads, accumulators, constraints):
<ide> new_a = a + g ** 2 # update accumulator
<ide> updates.append((a, new_a))
<ide>
<ide> class Adadelta(Optimizer):
<ide> Reference: http://arxiv.org/abs/1212.5701
<ide> '''
<ide> def __init__(self, lr=1.0, rho=0.95, epsilon=1e-6, *args, **kwargs):
<add> self.__dict__.update(kwargs)
<ide> self.__dict__.update(locals())
<ide>
<ide> def get_updates(self, params, regularizers, constraints, cost):
<ide> grads = self.get_gradients(cost, params, regularizers)
<del>
<ide> accumulators = [shared_zeros(p.get_value().shape) for p in params]
<ide> delta_accumulators = [shared_zeros(p.get_value().shape) for p in params]
<ide> updates = []
<ide>
<del> for p, g, a, d_a in zip(params, grads, accumulators, delta_accumulators):
<add> for p, g, a, d_a, c in zip(params, grads, accumulators, delta_accumulators, constraints):
<ide> new_a = self.rho * a + (1 - self.rho) * g ** 2 # update accumulator
<ide> updates.append((a, new_a))
<ide>
<ide> def get_updates(self, params, regularizers, constraints, cost):
<ide> class Adam(Optimizer):
<ide> '''
<ide> Reference: http://arxiv.org/abs/1412.6980
<add>
<ide> Default parameters follow those provided in the original paper
<add>
<ide> lambda is renamed kappa.
<ide> '''
<ide> def __init__(self, lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-8, kappa=1-1e-8, *args, **kwargs):
<add> self.__dict__.update(kwargs)
<ide> self.__dict__.update(locals())
<ide> self.iterations = shared_scalar(0)
<ide>
<ide> def get_updates(self, params, regularizers, constraints, cost):
<ide> # the update below seems missing from the paper, but is obviously required
<ide> beta_2_t = self.beta_2 * (self.kappa**i)
<ide>
<del> for p, g in zip(params, grads):
<add> for p, g, c in zip(params, grads, constraints):
<ide> m = theano.shared(p.get_value() * 0.) # zero init of moment
<ide> v = theano.shared(p.get_value() * 0.) # zero init of velocity
<ide>
<ide> def get_updates(self, params, regularizers, constraints, cost):
<ide> v_b_t = v_t / (1 - beta_2_t)
<ide>
<ide> p_t = p - self.lr * m_b_t / (T.sqrt(v_b_t) + self.epsilon)
<del>
<add>
<ide> updates.append((m, m_t))
<ide> updates.append((v, v_t))
<del>
<ide> updates.append((p, c(p_t))) # apply constraints
<ide> return updates
<ide> | 1 |
Ruby | Ruby | increase timeout for integration tests | 8298f95677c939f42299ba4c42d1bd89a38c6179 | <ide><path>Library/Homebrew/test/spec_helper.rb
<ide> if ENV["CI"]
<ide> config.verbose_retry = true
<ide> config.display_try_failure_messages = true
<del> config.default_retry_count = 2
<add>
<add> config.around(:each, :integration_test) do |example|
<add> example.metadata[:timeout] ||= 120
<add> example.run
<add> end
<ide>
<ide> config.around(:each, :needs_network) do |example|
<ide> example.metadata[:timeout] ||= 120 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.