diff --git a/credentials.json b/credentials.json
new file mode 100644
index 0000000000000000000000000000000000000000..691cae41c8e79a69cc3dacb279d3e5345d469ebc
--- /dev/null
+++ b/credentials.json
@@ -0,0 +1,13 @@
+{
+ "type": "service_account",
+ "project_id": "adr-demo1",
+ "private_key_id": "4cc6ab0f41bd07b7458c4ac31001a58da494e701",
+ "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCkXzEvAM42FL15\ngkRvnJMxi6bcNxWTE96UJYJ+Y+WaHkXr3v4BrWV3sJnYOI3HFiUpcG/BbKZUzoxJ\nL1vTtzZIhA/OrEl9xDF9Ta4xxDiwaZWBQnwbtCZIIqk8Al5UvjHcP8ZpR3dZ3luf\nql/M+o8Wh1pllejrHtq/rrXDhuFYEuyBh/Oft8wpqIoPwKLW51I3jn42Q0u9hOJO\n56lHultNfCPzGzRd6H0XpbCi5S0Doo8m1mOMjHdrHUAk2/4CQ39wepeyB/77LXhD\n51/9PIaSKHo7tY4GMfIhAmAPuOF2718EY8xJWjkOdzbs9kXheW7OFTBq5zyrNLwm\nRaDUFXmBAgMBAAECggEAAQ9hcH1/gaHInGFWslGUMXGbyzfKZSnE+bee4SnLxUla\nB2y8vqNbpYCiErRAAH9dZf2vpX5fOZTLcCgPXjeERhtz7qVLJAsr4TRVt0RzXnqP\n2Ebm01bqGEpWRRN5gEUUapM5YwcZofQT1d0cw7mp9T885K1+1GVdNndDUiuAheI2\ntrAca4JApH6uRy6rbeEm0pPnIsvwIDA/6YuaQJvPuBclN44aLanAO/M2UhtXnuYI\nfXCMdV0EG3dwTmQGpjPWJxEjXO8pQn67pqJfBej0x8ZZ/qJCR0P8hGhPOrZcu+db\nZ6XxeKHMX0t95tuonMAx+0XaUnKXfG0VorJ3EDsbQQKBgQDSYWc9jK043hJk588F\ndPmRavuSQCjJbEShVtbf1bIhW6vlemqCnkkGg5X8OfLprf+eikzLbx2ToVPx6x+j\nMzEYXr5ZsWmXhN8MXY+wka5bl+V1XI7bekTB+dKjUauF22SFvEv2XPVvi/5m7S3L\nCrjfIYSAzTtjv0Q9W2NAOwHvhQKBgQDIA8RhgnuLK7GJyBOQ77WbAdaG/r17x7yP\nBhaP+uB09OyGUyWMvgzzh3m8fo0T8hxyJGNL2lG77MlxdUt49FRoin4z3SwD73JF\nHaXSyHBF/gSSaEje2OOg+XY3Ry7Nt8CRM+UWlZ5S8TEFfaBsDfxIH1cwpTGKX+JT\nUOFPnwe8zQKBgQCRrk1Ve8K/7FeEdFkRCE2Bezm1rqQPxt9HceDdoxb0row8Hib/\nSCh9UEVeIaqqUoj9+frvrIQopMCCdzSzSFGo9IKPjaD3AVWLNIa52EeKuYZ1XKC4\nLtTKFfrciUNjL+dODHP1p611jtWd5IOfZE9cg9NDddq4WcwhSNVTtwjCGQKBgDIA\nvxehh0FDqx7G5h1V0BDwZLFa/gPz6y/WVc2o6nRSxs7N+pU07gfz1ntJJ/vUi0f9\n0NUlv0k76SB0MmoKqw93kBVnNZnGQssEnnATq/3Tg1EWwSD1F/MmamdnZEwJpBqo\n3pzGys3xfk/hGDnrRSbCrwl6QuhVED/D+I/vMgABAoGBAMyMz38hTbEnp/qePGJy\nUwIcJrgd0GexJBayR29zFcUIeJRX7bq11/LRu+bFYJ5wuCem72QndfEneU5T1l1F\nwIPBhJsK1XP9m0sLIR3TWNywRX+1omfK0V9rxoAm22Tq9t/CXwQuXjHBqUyvoE3c\nvFo3c3iTC+cUM7XnlDzOQDsV\n-----END PRIVATE KEY-----\n",
+ "client_email": "adrgooglesheet@adr-demo1.iam.gserviceaccount.com",
+ "client_id": "114059569575086599132",
+ "auth_uri": "https://accounts.google.com/o/oauth2/auth",
+ "token_uri": "https://oauth2.googleapis.com/token",
+ "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
+ "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/adrgooglesheet%40adr-demo1.iam.gserviceaccount.com",
+ "universe_domain": "googleapis.com"
+}
diff --git a/dropbox_plans/2301 BMW job1 external.pdf b/dropbox_plans/2301 BMW job1 external.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..faef0b11e8f2f6c1567f861da9934f19a9fa28be
Binary files /dev/null and b/dropbox_plans/2301 BMW job1 external.pdf differ
diff --git a/dropbox_plans/2301 BMW job1 foundation.pdf b/dropbox_plans/2301 BMW job1 foundation.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..cfc1378e435bb75e4ea6896294c4e82ce699a790
Binary files /dev/null and b/dropbox_plans/2301 BMW job1 foundation.pdf differ
diff --git a/dropbox_plans/2301 BMW job1 pc2.pdf b/dropbox_plans/2301 BMW job1 pc2.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..fd4c17741770a45ee05723ce7383e91a7a6457b6
Binary files /dev/null and b/dropbox_plans/2301 BMW job1 pc2.pdf differ
diff --git a/dropbox_plans/2301 BMW job1 pc3.pdf b/dropbox_plans/2301 BMW job1 pc3.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..faef0b11e8f2f6c1567f861da9934f19a9fa28be
Binary files /dev/null and b/dropbox_plans/2301 BMW job1 pc3.pdf differ
diff --git a/dropbox_plans/2301 BMW job1 piles.pdf b/dropbox_plans/2301 BMW job1 piles.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..fd4c17741770a45ee05723ce7383e91a7a6457b6
Binary files /dev/null and b/dropbox_plans/2301 BMW job1 piles.pdf differ
diff --git a/dropbox_plans/2301 BMW job2 external.pdf b/dropbox_plans/2301 BMW job2 external.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..e5c473a10ffbf6919154830a133b1f34767fb45e
Binary files /dev/null and b/dropbox_plans/2301 BMW job2 external.pdf differ
diff --git a/dropbox_plans/2301 BMW job2 interior.pdf b/dropbox_plans/2301 BMW job2 interior.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..de57b90c6025b12f6bbe26fd68af7d3a521808a0
Binary files /dev/null and b/dropbox_plans/2301 BMW job2 interior.pdf differ
diff --git a/dropbox_plans/2301 BMW job2 pc5.pdf b/dropbox_plans/2301 BMW job2 pc5.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..e5c473a10ffbf6919154830a133b1f34767fb45e
Binary files /dev/null and b/dropbox_plans/2301 BMW job2 pc5.pdf differ
diff --git a/dropbox_plans/2301 BMW job2 pc7.pdf b/dropbox_plans/2301 BMW job2 pc7.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..de57b90c6025b12f6bbe26fd68af7d3a521808a0
Binary files /dev/null and b/dropbox_plans/2301 BMW job2 pc7.pdf differ
diff --git a/dropbox_plans/2301BMWfoundation.pdf b/dropbox_plans/2301BMWfoundation.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/dropbox_plans/2301BMWpc2.pdf b/dropbox_plans/2301BMWpc2.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/dropbox_plans/2301BMWpc3.pdf b/dropbox_plans/2301BMWpc3.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/dropbox_plans/BMW job1 23-HRW-01-BG-DR-S-110_P.02_FN.pdf b/dropbox_plans/BMW job1 23-HRW-01-BG-DR-S-110_P.02_FN.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..60dfb7dd20a90955b64415b3269ad794e8c9bce7
Binary files /dev/null and b/dropbox_plans/BMW job1 23-HRW-01-BG-DR-S-110_P.02_FN.pdf differ
diff --git a/dropbox_plans/foundation.pdf b/dropbox_plans/foundation.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..cfc1378e435bb75e4ea6896294c4e82ce699a790
Binary files /dev/null and b/dropbox_plans/foundation.pdf differ
diff --git a/dropbox_plans/foundation2023.pdf b/dropbox_plans/foundation2023.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..cfc1378e435bb75e4ea6896294c4e82ce699a790
Binary files /dev/null and b/dropbox_plans/foundation2023.pdf differ
diff --git a/dropbox_plans/pc2-2023.pdf b/dropbox_plans/pc2-2023.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..fd4c17741770a45ee05723ce7383e91a7a6457b6
Binary files /dev/null and b/dropbox_plans/pc2-2023.pdf differ
diff --git a/dropbox_plans/pc2.pdf b/dropbox_plans/pc2.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..fd4c17741770a45ee05723ce7383e91a7a6457b6
Binary files /dev/null and b/dropbox_plans/pc2.pdf differ
diff --git a/dropbox_plans/pc3-2023.pdf b/dropbox_plans/pc3-2023.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..faef0b11e8f2f6c1567f861da9934f19a9fa28be
Binary files /dev/null and b/dropbox_plans/pc3-2023.pdf differ
diff --git a/dropbox_plans/pc3.pdf b/dropbox_plans/pc3.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..faef0b11e8f2f6c1567f861da9934f19a9fa28be
Binary files /dev/null and b/dropbox_plans/pc3.pdf differ
diff --git a/dropbox_plans/pc5 2023.pdf b/dropbox_plans/pc5 2023.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..e5c473a10ffbf6919154830a133b1f34767fb45e
Binary files /dev/null and b/dropbox_plans/pc5 2023.pdf differ
diff --git a/dropbox_plans/pc5.pdf b/dropbox_plans/pc5.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..e5c473a10ffbf6919154830a133b1f34767fb45e
Binary files /dev/null and b/dropbox_plans/pc5.pdf differ
diff --git a/dropbox_plans/pc7 2023.pdf b/dropbox_plans/pc7 2023.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..de57b90c6025b12f6bbe26fd68af7d3a521808a0
Binary files /dev/null and b/dropbox_plans/pc7 2023.pdf differ
diff --git a/dropbox_plans/pc7.pdf b/dropbox_plans/pc7.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..de57b90c6025b12f6bbe26fd68af7d3a521808a0
Binary files /dev/null and b/dropbox_plans/pc7.pdf differ
diff --git a/dropbox_plans/piles_origCountSummary.csv b/dropbox_plans/piles_origCountSummary.csv
new file mode 100644
index 0000000000000000000000000000000000000000..77df10231ef15aa5d63bbff9219258fb594b235e
--- /dev/null
+++ b/dropbox_plans/piles_origCountSummary.csv
@@ -0,0 +1,2 @@
+Plan_Name,Matched items,Sensitivity
+piles_orig.pdf,8,70
diff --git a/node_modules/node-fetch/LICENSE.md b/node_modules/node-fetch/LICENSE.md
new file mode 100644
index 0000000000000000000000000000000000000000..660ffecb58b02f27a562c193c71f14b7b573fc96
--- /dev/null
+++ b/node_modules/node-fetch/LICENSE.md
@@ -0,0 +1,22 @@
+The MIT License (MIT)
+
+Copyright (c) 2016 David Frank
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
diff --git a/node_modules/node-fetch/README.md b/node_modules/node-fetch/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..4f87a59a0638185f29cc0e0251311a48fe1a443d
--- /dev/null
+++ b/node_modules/node-fetch/README.md
@@ -0,0 +1,633 @@
+node-fetch
+==========
+
+[![npm version][npm-image]][npm-url]
+[![build status][travis-image]][travis-url]
+[![coverage status][codecov-image]][codecov-url]
+[![install size][install-size-image]][install-size-url]
+[![Discord][discord-image]][discord-url]
+
+A light-weight module that brings `window.fetch` to Node.js
+
+(We are looking for [v2 maintainers and collaborators](https://github.com/bitinn/node-fetch/issues/567))
+
+[![Backers][opencollective-image]][opencollective-url]
+
+
+
+- [Motivation](#motivation)
+- [Features](#features)
+- [Difference from client-side fetch](#difference-from-client-side-fetch)
+- [Installation](#installation)
+- [Loading and configuring the module](#loading-and-configuring-the-module)
+- [Common Usage](#common-usage)
+ - [Plain text or HTML](#plain-text-or-html)
+ - [JSON](#json)
+ - [Simple Post](#simple-post)
+ - [Post with JSON](#post-with-json)
+ - [Post with form parameters](#post-with-form-parameters)
+ - [Handling exceptions](#handling-exceptions)
+ - [Handling client and server errors](#handling-client-and-server-errors)
+- [Advanced Usage](#advanced-usage)
+ - [Streams](#streams)
+ - [Buffer](#buffer)
+ - [Accessing Headers and other Meta data](#accessing-headers-and-other-meta-data)
+ - [Extract Set-Cookie Header](#extract-set-cookie-header)
+ - [Post data using a file stream](#post-data-using-a-file-stream)
+ - [Post with form-data (detect multipart)](#post-with-form-data-detect-multipart)
+ - [Request cancellation with AbortSignal](#request-cancellation-with-abortsignal)
+- [API](#api)
+ - [fetch(url[, options])](#fetchurl-options)
+ - [Options](#options)
+ - [Class: Request](#class-request)
+ - [Class: Response](#class-response)
+ - [Class: Headers](#class-headers)
+ - [Interface: Body](#interface-body)
+ - [Class: FetchError](#class-fetcherror)
+- [License](#license)
+- [Acknowledgement](#acknowledgement)
+
+
+
+## Motivation
+
+Instead of implementing `XMLHttpRequest` in Node.js to run browser-specific [Fetch polyfill](https://github.com/github/fetch), why not go from native `http` to `fetch` API directly? Hence, `node-fetch`, minimal code for a `window.fetch` compatible API on Node.js runtime.
+
+See Matt Andrews' [isomorphic-fetch](https://github.com/matthew-andrews/isomorphic-fetch) or Leonardo Quixada's [cross-fetch](https://github.com/lquixada/cross-fetch) for isomorphic usage (exports `node-fetch` for server-side, `whatwg-fetch` for client-side).
+
+## Features
+
+- Stay consistent with `window.fetch` API.
+- Make conscious trade-off when following [WHATWG fetch spec][whatwg-fetch] and [stream spec](https://streams.spec.whatwg.org/) implementation details, document known differences.
+- Use native promise but allow substituting it with [insert your favorite promise library].
+- Use native Node streams for body on both request and response.
+- Decode content encoding (gzip/deflate) properly and convert string output (such as `res.text()` and `res.json()`) to UTF-8 automatically.
+- Useful extensions such as timeout, redirect limit, response size limit, [explicit errors](ERROR-HANDLING.md) for troubleshooting.
+
+## Difference from client-side fetch
+
+- See [Known Differences](LIMITS.md) for details.
+- If you happen to use a missing feature that `window.fetch` offers, feel free to open an issue.
+- Pull requests are welcomed too!
+
+## Installation
+
+Current stable release (`2.x`)
+
+```sh
+$ npm install node-fetch
+```
+
+## Loading and configuring the module
+We suggest you load the module via `require` until the stabilization of ES modules in node:
+```js
+const fetch = require('node-fetch');
+```
+
+If you are using a Promise library other than native, set it through `fetch.Promise`:
+```js
+const Bluebird = require('bluebird');
+
+fetch.Promise = Bluebird;
+```
+
+## Common Usage
+
+NOTE: The documentation below is up-to-date with `2.x` releases; see the [`1.x` readme](https://github.com/bitinn/node-fetch/blob/1.x/README.md), [changelog](https://github.com/bitinn/node-fetch/blob/1.x/CHANGELOG.md) and [2.x upgrade guide](UPGRADE-GUIDE.md) for the differences.
+
+#### Plain text or HTML
+```js
+fetch('https://github.com/')
+ .then(res => res.text())
+ .then(body => console.log(body));
+```
+
+#### JSON
+
+```js
+
+fetch('https://api.github.com/users/github')
+ .then(res => res.json())
+ .then(json => console.log(json));
+```
+
+#### Simple Post
+```js
+fetch('https://httpbin.org/post', { method: 'POST', body: 'a=1' })
+ .then(res => res.json()) // expecting a json response
+ .then(json => console.log(json));
+```
+
+#### Post with JSON
+
+```js
+const body = { a: 1 };
+
+fetch('https://httpbin.org/post', {
+ method: 'post',
+ body: JSON.stringify(body),
+ headers: { 'Content-Type': 'application/json' },
+ })
+ .then(res => res.json())
+ .then(json => console.log(json));
+```
+
+#### Post with form parameters
+`URLSearchParams` is available in Node.js as of v7.5.0. See [official documentation](https://nodejs.org/api/url.html#url_class_urlsearchparams) for more usage methods.
+
+NOTE: The `Content-Type` header is only set automatically to `x-www-form-urlencoded` when an instance of `URLSearchParams` is given as such:
+
+```js
+const { URLSearchParams } = require('url');
+
+const params = new URLSearchParams();
+params.append('a', 1);
+
+fetch('https://httpbin.org/post', { method: 'POST', body: params })
+ .then(res => res.json())
+ .then(json => console.log(json));
+```
+
+#### Handling exceptions
+NOTE: 3xx-5xx responses are *NOT* exceptions and should be handled in `then()`; see the next section for more information.
+
+Adding a catch to the fetch promise chain will catch *all* exceptions, such as errors originating from node core libraries, network errors and operational errors, which are instances of FetchError. See the [error handling document](ERROR-HANDLING.md) for more details.
+
+```js
+fetch('https://domain.invalid/')
+ .catch(err => console.error(err));
+```
+
+#### Handling client and server errors
+It is common to create a helper function to check that the response contains no client (4xx) or server (5xx) error responses:
+
+```js
+function checkStatus(res) {
+ if (res.ok) { // res.status >= 200 && res.status < 300
+ return res;
+ } else {
+ throw MyCustomError(res.statusText);
+ }
+}
+
+fetch('https://httpbin.org/status/400')
+ .then(checkStatus)
+ .then(res => console.log('will not get here...'))
+```
+
+## Advanced Usage
+
+#### Streams
+The "Node.js way" is to use streams when possible:
+
+```js
+fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png')
+ .then(res => {
+ const dest = fs.createWriteStream('./octocat.png');
+ res.body.pipe(dest);
+ });
+```
+
+In Node.js 14 you can also use async iterators to read `body`; however, be careful to catch
+errors -- the longer a response runs, the more likely it is to encounter an error.
+
+```js
+const fetch = require('node-fetch');
+const response = await fetch('https://httpbin.org/stream/3');
+try {
+ for await (const chunk of response.body) {
+ console.dir(JSON.parse(chunk.toString()));
+ }
+} catch (err) {
+ console.error(err.stack);
+}
+```
+
+In Node.js 12 you can also use async iterators to read `body`; however, async iterators with streams
+did not mature until Node.js 14, so you need to do some extra work to ensure you handle errors
+directly from the stream and wait on it response to fully close.
+
+```js
+const fetch = require('node-fetch');
+const read = async body => {
+ let error;
+ body.on('error', err => {
+ error = err;
+ });
+ for await (const chunk of body) {
+ console.dir(JSON.parse(chunk.toString()));
+ }
+ return new Promise((resolve, reject) => {
+ body.on('close', () => {
+ error ? reject(error) : resolve();
+ });
+ });
+};
+try {
+ const response = await fetch('https://httpbin.org/stream/3');
+ await read(response.body);
+} catch (err) {
+ console.error(err.stack);
+}
+```
+
+#### Buffer
+If you prefer to cache binary data in full, use buffer(). (NOTE: `buffer()` is a `node-fetch`-only API)
+
+```js
+const fileType = require('file-type');
+
+fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png')
+ .then(res => res.buffer())
+ .then(buffer => fileType(buffer))
+ .then(type => { /* ... */ });
+```
+
+#### Accessing Headers and other Meta data
+```js
+fetch('https://github.com/')
+ .then(res => {
+ console.log(res.ok);
+ console.log(res.status);
+ console.log(res.statusText);
+ console.log(res.headers.raw());
+ console.log(res.headers.get('content-type'));
+ });
+```
+
+#### Extract Set-Cookie Header
+
+Unlike browsers, you can access raw `Set-Cookie` headers manually using `Headers.raw()`. This is a `node-fetch` only API.
+
+```js
+fetch(url).then(res => {
+ // returns an array of values, instead of a string of comma-separated values
+ console.log(res.headers.raw()['set-cookie']);
+});
+```
+
+#### Post data using a file stream
+
+```js
+const { createReadStream } = require('fs');
+
+const stream = createReadStream('input.txt');
+
+fetch('https://httpbin.org/post', { method: 'POST', body: stream })
+ .then(res => res.json())
+ .then(json => console.log(json));
+```
+
+#### Post with form-data (detect multipart)
+
+```js
+const FormData = require('form-data');
+
+const form = new FormData();
+form.append('a', 1);
+
+fetch('https://httpbin.org/post', { method: 'POST', body: form })
+ .then(res => res.json())
+ .then(json => console.log(json));
+
+// OR, using custom headers
+// NOTE: getHeaders() is non-standard API
+
+const form = new FormData();
+form.append('a', 1);
+
+const options = {
+ method: 'POST',
+ body: form,
+ headers: form.getHeaders()
+}
+
+fetch('https://httpbin.org/post', options)
+ .then(res => res.json())
+ .then(json => console.log(json));
+```
+
+#### Request cancellation with AbortSignal
+
+> NOTE: You may cancel streamed requests only on Node >= v8.0.0
+
+You may cancel requests with `AbortController`. A suggested implementation is [`abort-controller`](https://www.npmjs.com/package/abort-controller).
+
+An example of timing out a request after 150ms could be achieved as the following:
+
+```js
+import AbortController from 'abort-controller';
+
+const controller = new AbortController();
+const timeout = setTimeout(
+ () => { controller.abort(); },
+ 150,
+);
+
+fetch(url, { signal: controller.signal })
+ .then(res => res.json())
+ .then(
+ data => {
+ useData(data)
+ },
+ err => {
+ if (err.name === 'AbortError') {
+ // request was aborted
+ }
+ },
+ )
+ .finally(() => {
+ clearTimeout(timeout);
+ });
+```
+
+See [test cases](https://github.com/bitinn/node-fetch/blob/master/test/test.js) for more examples.
+
+
+## API
+
+### fetch(url[, options])
+
+- `url` A string representing the URL for fetching
+- `options` [Options](#fetch-options) for the HTTP(S) request
+- Returns: Promise<[Response](#class-response)>
+
+Perform an HTTP(S) fetch.
+
+`url` should be an absolute url, such as `https://example.com/`. A path-relative URL (`/file/under/root`) or protocol-relative URL (`//can-be-http-or-https.com/`) will result in a rejected `Promise`.
+
+
+### Options
+
+The default values are shown after each option key.
+
+```js
+{
+ // These properties are part of the Fetch Standard
+ method: 'GET',
+ headers: {}, // request headers. format is the identical to that accepted by the Headers constructor (see below)
+ body: null, // request body. can be null, a string, a Buffer, a Blob, or a Node.js Readable stream
+ redirect: 'follow', // set to `manual` to extract redirect headers, `error` to reject redirect
+ signal: null, // pass an instance of AbortSignal to optionally abort requests
+
+ // The following properties are node-fetch extensions
+ follow: 20, // maximum redirect count. 0 to not follow redirect
+ timeout: 0, // req/res timeout in ms, it resets on redirect. 0 to disable (OS limit applies). Signal is recommended instead.
+ compress: true, // support gzip/deflate content encoding. false to disable
+ size: 0, // maximum response body size in bytes. 0 to disable
+ agent: null // http(s).Agent instance or function that returns an instance (see below)
+}
+```
+
+##### Default Headers
+
+If no values are set, the following request headers will be sent automatically:
+
+Header | Value
+------------------- | --------------------------------------------------------
+`Accept-Encoding` | `gzip,deflate` _(when `options.compress === true`)_
+`Accept` | `*/*`
+`Connection` | `close` _(when no `options.agent` is present)_
+`Content-Length` | _(automatically calculated, if possible)_
+`Transfer-Encoding` | `chunked` _(when `req.body` is a stream)_
+`User-Agent` | `node-fetch/1.0 (+https://github.com/bitinn/node-fetch)`
+
+Note: when `body` is a `Stream`, `Content-Length` is not set automatically.
+
+##### Custom Agent
+
+The `agent` option allows you to specify networking related options which are out of the scope of Fetch, including and not limited to the following:
+
+- Support self-signed certificate
+- Use only IPv4 or IPv6
+- Custom DNS Lookup
+
+See [`http.Agent`](https://nodejs.org/api/http.html#http_new_agent_options) for more information.
+
+In addition, the `agent` option accepts a function that returns `http`(s)`.Agent` instance given current [URL](https://nodejs.org/api/url.html), this is useful during a redirection chain across HTTP and HTTPS protocol.
+
+```js
+const httpAgent = new http.Agent({
+ keepAlive: true
+});
+const httpsAgent = new https.Agent({
+ keepAlive: true
+});
+
+const options = {
+ agent: function (_parsedURL) {
+ if (_parsedURL.protocol == 'http:') {
+ return httpAgent;
+ } else {
+ return httpsAgent;
+ }
+ }
+}
+```
+
+
+### Class: Request
+
+An HTTP(S) request containing information about URL, method, headers, and the body. This class implements the [Body](#iface-body) interface.
+
+Due to the nature of Node.js, the following properties are not implemented at this moment:
+
+- `type`
+- `destination`
+- `referrer`
+- `referrerPolicy`
+- `mode`
+- `credentials`
+- `cache`
+- `integrity`
+- `keepalive`
+
+The following node-fetch extension properties are provided:
+
+- `follow`
+- `compress`
+- `counter`
+- `agent`
+
+See [options](#fetch-options) for exact meaning of these extensions.
+
+#### new Request(input[, options])
+
+*(spec-compliant)*
+
+- `input` A string representing a URL, or another `Request` (which will be cloned)
+- `options` [Options][#fetch-options] for the HTTP(S) request
+
+Constructs a new `Request` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Request/Request).
+
+In most cases, directly `fetch(url, options)` is simpler than creating a `Request` object.
+
+
+### Class: Response
+
+An HTTP(S) response. This class implements the [Body](#iface-body) interface.
+
+The following properties are not implemented in node-fetch at this moment:
+
+- `Response.error()`
+- `Response.redirect()`
+- `type`
+- `trailer`
+
+#### new Response([body[, options]])
+
+*(spec-compliant)*
+
+- `body` A `String` or [`Readable` stream][node-readable]
+- `options` A [`ResponseInit`][response-init] options dictionary
+
+Constructs a new `Response` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Response/Response).
+
+Because Node.js does not implement service workers (for which this class was designed), one rarely has to construct a `Response` directly.
+
+#### response.ok
+
+*(spec-compliant)*
+
+Convenience property representing if the request ended normally. Will evaluate to true if the response status was greater than or equal to 200 but smaller than 300.
+
+#### response.redirected
+
+*(spec-compliant)*
+
+Convenience property representing if the request has been redirected at least once. Will evaluate to true if the internal redirect counter is greater than 0.
+
+
+### Class: Headers
+
+This class allows manipulating and iterating over a set of HTTP headers. All methods specified in the [Fetch Standard][whatwg-fetch] are implemented.
+
+#### new Headers([init])
+
+*(spec-compliant)*
+
+- `init` Optional argument to pre-fill the `Headers` object
+
+Construct a new `Headers` object. `init` can be either `null`, a `Headers` object, an key-value map object or any iterable object.
+
+```js
+// Example adapted from https://fetch.spec.whatwg.org/#example-headers-class
+
+const meta = {
+ 'Content-Type': 'text/xml',
+ 'Breaking-Bad': '<3'
+};
+const headers = new Headers(meta);
+
+// The above is equivalent to
+const meta = [
+ [ 'Content-Type', 'text/xml' ],
+ [ 'Breaking-Bad', '<3' ]
+];
+const headers = new Headers(meta);
+
+// You can in fact use any iterable objects, like a Map or even another Headers
+const meta = new Map();
+meta.set('Content-Type', 'text/xml');
+meta.set('Breaking-Bad', '<3');
+const headers = new Headers(meta);
+const copyOfHeaders = new Headers(headers);
+```
+
+
+### Interface: Body
+
+`Body` is an abstract interface with methods that are applicable to both `Request` and `Response` classes.
+
+The following methods are not yet implemented in node-fetch at this moment:
+
+- `formData()`
+
+#### body.body
+
+*(deviation from spec)*
+
+* Node.js [`Readable` stream][node-readable]
+
+Data are encapsulated in the `Body` object. Note that while the [Fetch Standard][whatwg-fetch] requires the property to always be a WHATWG `ReadableStream`, in node-fetch it is a Node.js [`Readable` stream][node-readable].
+
+#### body.bodyUsed
+
+*(spec-compliant)*
+
+* `Boolean`
+
+A boolean property for if this body has been consumed. Per the specs, a consumed body cannot be used again.
+
+#### body.arrayBuffer()
+#### body.blob()
+#### body.json()
+#### body.text()
+
+*(spec-compliant)*
+
+* Returns: Promise
+
+Consume the body and return a promise that will resolve to one of these formats.
+
+#### body.buffer()
+
+*(node-fetch extension)*
+
+* Returns: Promise<Buffer>
+
+Consume the body and return a promise that will resolve to a Buffer.
+
+#### body.textConverted()
+
+*(node-fetch extension)*
+
+* Returns: Promise<String>
+
+Identical to `body.text()`, except instead of always converting to UTF-8, encoding sniffing will be performed and text converted to UTF-8 if possible.
+
+(This API requires an optional dependency of the npm package [encoding](https://www.npmjs.com/package/encoding), which you need to install manually. `webpack` users may see [a warning message](https://github.com/bitinn/node-fetch/issues/412#issuecomment-379007792) due to this optional dependency.)
+
+
+### Class: FetchError
+
+*(node-fetch extension)*
+
+An operational error in the fetching process. See [ERROR-HANDLING.md][] for more info.
+
+
+### Class: AbortError
+
+*(node-fetch extension)*
+
+An Error thrown when the request is aborted in response to an `AbortSignal`'s `abort` event. It has a `name` property of `AbortError`. See [ERROR-HANDLING.MD][] for more info.
+
+## Acknowledgement
+
+Thanks to [github/fetch](https://github.com/github/fetch) for providing a solid implementation reference.
+
+`node-fetch` v1 was maintained by [@bitinn](https://github.com/bitinn); v2 was maintained by [@TimothyGu](https://github.com/timothygu), [@bitinn](https://github.com/bitinn) and [@jimmywarting](https://github.com/jimmywarting); v2 readme is written by [@jkantr](https://github.com/jkantr).
+
+## License
+
+MIT
+
+[npm-image]: https://flat.badgen.net/npm/v/node-fetch
+[npm-url]: https://www.npmjs.com/package/node-fetch
+[travis-image]: https://flat.badgen.net/travis/bitinn/node-fetch
+[travis-url]: https://travis-ci.org/bitinn/node-fetch
+[codecov-image]: https://flat.badgen.net/codecov/c/github/bitinn/node-fetch/master
+[codecov-url]: https://codecov.io/gh/bitinn/node-fetch
+[install-size-image]: https://flat.badgen.net/packagephobia/install/node-fetch
+[install-size-url]: https://packagephobia.now.sh/result?p=node-fetch
+[discord-image]: https://img.shields.io/discord/619915844268326952?color=%237289DA&label=Discord&style=flat-square
+[discord-url]: https://discord.gg/Zxbndcm
+[opencollective-image]: https://opencollective.com/node-fetch/backers.svg
+[opencollective-url]: https://opencollective.com/node-fetch
+[whatwg-fetch]: https://fetch.spec.whatwg.org/
+[response-init]: https://fetch.spec.whatwg.org/#responseinit
+[node-readable]: https://nodejs.org/api/stream.html#stream_readable_streams
+[mdn-headers]: https://developer.mozilla.org/en-US/docs/Web/API/Headers
+[LIMITS.md]: https://github.com/bitinn/node-fetch/blob/master/LIMITS.md
+[ERROR-HANDLING.md]: https://github.com/bitinn/node-fetch/blob/master/ERROR-HANDLING.md
+[UPGRADE-GUIDE.md]: https://github.com/bitinn/node-fetch/blob/master/UPGRADE-GUIDE.md
diff --git a/node_modules/node-fetch/browser.js b/node_modules/node-fetch/browser.js
new file mode 100644
index 0000000000000000000000000000000000000000..ee86265ae3adab9f03cf10f486384735417320e6
--- /dev/null
+++ b/node_modules/node-fetch/browser.js
@@ -0,0 +1,25 @@
+"use strict";
+
+// ref: https://github.com/tc39/proposal-global
+var getGlobal = function () {
+ // the only reliable means to get the global object is
+ // `Function('return this')()`
+ // However, this causes CSP violations in Chrome apps.
+ if (typeof self !== 'undefined') { return self; }
+ if (typeof window !== 'undefined') { return window; }
+ if (typeof global !== 'undefined') { return global; }
+ throw new Error('unable to locate global object');
+}
+
+var globalObject = getGlobal();
+
+module.exports = exports = globalObject.fetch;
+
+// Needed for TypeScript and Webpack.
+if (globalObject.fetch) {
+ exports.default = globalObject.fetch.bind(globalObject);
+}
+
+exports.Headers = globalObject.Headers;
+exports.Request = globalObject.Request;
+exports.Response = globalObject.Response;
diff --git a/node_modules/node-fetch/lib/index.es.js b/node_modules/node-fetch/lib/index.es.js
new file mode 100644
index 0000000000000000000000000000000000000000..79d717be9ae56d455f7f5c7890178bc4bb65da9c
--- /dev/null
+++ b/node_modules/node-fetch/lib/index.es.js
@@ -0,0 +1,1778 @@
+process.emitWarning("The .es.js file is deprecated. Use .mjs instead.");
+
+import Stream from 'stream';
+import http from 'http';
+import Url from 'url';
+import whatwgUrl from 'whatwg-url';
+import https from 'https';
+import zlib from 'zlib';
+
+// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
+
+// fix for "Readable" isn't a named export issue
+const Readable = Stream.Readable;
+
+const BUFFER = Symbol('buffer');
+const TYPE = Symbol('type');
+
+class Blob {
+ constructor() {
+ this[TYPE] = '';
+
+ const blobParts = arguments[0];
+ const options = arguments[1];
+
+ const buffers = [];
+ let size = 0;
+
+ if (blobParts) {
+ const a = blobParts;
+ const length = Number(a.length);
+ for (let i = 0; i < length; i++) {
+ const element = a[i];
+ let buffer;
+ if (element instanceof Buffer) {
+ buffer = element;
+ } else if (ArrayBuffer.isView(element)) {
+ buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);
+ } else if (element instanceof ArrayBuffer) {
+ buffer = Buffer.from(element);
+ } else if (element instanceof Blob) {
+ buffer = element[BUFFER];
+ } else {
+ buffer = Buffer.from(typeof element === 'string' ? element : String(element));
+ }
+ size += buffer.length;
+ buffers.push(buffer);
+ }
+ }
+
+ this[BUFFER] = Buffer.concat(buffers);
+
+ let type = options && options.type !== undefined && String(options.type).toLowerCase();
+ if (type && !/[^\u0020-\u007E]/.test(type)) {
+ this[TYPE] = type;
+ }
+ }
+ get size() {
+ return this[BUFFER].length;
+ }
+ get type() {
+ return this[TYPE];
+ }
+ text() {
+ return Promise.resolve(this[BUFFER].toString());
+ }
+ arrayBuffer() {
+ const buf = this[BUFFER];
+ const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
+ return Promise.resolve(ab);
+ }
+ stream() {
+ const readable = new Readable();
+ readable._read = function () {};
+ readable.push(this[BUFFER]);
+ readable.push(null);
+ return readable;
+ }
+ toString() {
+ return '[object Blob]';
+ }
+ slice() {
+ const size = this.size;
+
+ const start = arguments[0];
+ const end = arguments[1];
+ let relativeStart, relativeEnd;
+ if (start === undefined) {
+ relativeStart = 0;
+ } else if (start < 0) {
+ relativeStart = Math.max(size + start, 0);
+ } else {
+ relativeStart = Math.min(start, size);
+ }
+ if (end === undefined) {
+ relativeEnd = size;
+ } else if (end < 0) {
+ relativeEnd = Math.max(size + end, 0);
+ } else {
+ relativeEnd = Math.min(end, size);
+ }
+ const span = Math.max(relativeEnd - relativeStart, 0);
+
+ const buffer = this[BUFFER];
+ const slicedBuffer = buffer.slice(relativeStart, relativeStart + span);
+ const blob = new Blob([], { type: arguments[2] });
+ blob[BUFFER] = slicedBuffer;
+ return blob;
+ }
+}
+
+Object.defineProperties(Blob.prototype, {
+ size: { enumerable: true },
+ type: { enumerable: true },
+ slice: { enumerable: true }
+});
+
+Object.defineProperty(Blob.prototype, Symbol.toStringTag, {
+ value: 'Blob',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+/**
+ * fetch-error.js
+ *
+ * FetchError interface for operational errors
+ */
+
+/**
+ * Create FetchError instance
+ *
+ * @param String message Error message for human
+ * @param String type Error type for machine
+ * @param String systemError For Node.js system error
+ * @return FetchError
+ */
+function FetchError(message, type, systemError) {
+ Error.call(this, message);
+
+ this.message = message;
+ this.type = type;
+
+ // when err.type is `system`, err.code contains system error code
+ if (systemError) {
+ this.code = this.errno = systemError.code;
+ }
+
+ // hide custom error implementation details from end-users
+ Error.captureStackTrace(this, this.constructor);
+}
+
+FetchError.prototype = Object.create(Error.prototype);
+FetchError.prototype.constructor = FetchError;
+FetchError.prototype.name = 'FetchError';
+
+let convert;
+try {
+ convert = require('encoding').convert;
+} catch (e) {}
+
+const INTERNALS = Symbol('Body internals');
+
+// fix an issue where "PassThrough" isn't a named export for node <10
+const PassThrough = Stream.PassThrough;
+
+/**
+ * Body mixin
+ *
+ * Ref: https://fetch.spec.whatwg.org/#body
+ *
+ * @param Stream body Readable stream
+ * @param Object opts Response options
+ * @return Void
+ */
+function Body(body) {
+ var _this = this;
+
+ var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
+ _ref$size = _ref.size;
+
+ let size = _ref$size === undefined ? 0 : _ref$size;
+ var _ref$timeout = _ref.timeout;
+ let timeout = _ref$timeout === undefined ? 0 : _ref$timeout;
+
+ if (body == null) {
+ // body is undefined or null
+ body = null;
+ } else if (isURLSearchParams(body)) {
+ // body is a URLSearchParams
+ body = Buffer.from(body.toString());
+ } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
+ // body is ArrayBuffer
+ body = Buffer.from(body);
+ } else if (ArrayBuffer.isView(body)) {
+ // body is ArrayBufferView
+ body = Buffer.from(body.buffer, body.byteOffset, body.byteLength);
+ } else if (body instanceof Stream) ; else {
+ // none of the above
+ // coerce to string then buffer
+ body = Buffer.from(String(body));
+ }
+ this[INTERNALS] = {
+ body,
+ disturbed: false,
+ error: null
+ };
+ this.size = size;
+ this.timeout = timeout;
+
+ if (body instanceof Stream) {
+ body.on('error', function (err) {
+ const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);
+ _this[INTERNALS].error = error;
+ });
+ }
+}
+
+Body.prototype = {
+ get body() {
+ return this[INTERNALS].body;
+ },
+
+ get bodyUsed() {
+ return this[INTERNALS].disturbed;
+ },
+
+ /**
+ * Decode response as ArrayBuffer
+ *
+ * @return Promise
+ */
+ arrayBuffer() {
+ return consumeBody.call(this).then(function (buf) {
+ return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
+ });
+ },
+
+ /**
+ * Return raw response as Blob
+ *
+ * @return Promise
+ */
+ blob() {
+ let ct = this.headers && this.headers.get('content-type') || '';
+ return consumeBody.call(this).then(function (buf) {
+ return Object.assign(
+ // Prevent copying
+ new Blob([], {
+ type: ct.toLowerCase()
+ }), {
+ [BUFFER]: buf
+ });
+ });
+ },
+
+ /**
+ * Decode response as json
+ *
+ * @return Promise
+ */
+ json() {
+ var _this2 = this;
+
+ return consumeBody.call(this).then(function (buffer) {
+ try {
+ return JSON.parse(buffer.toString());
+ } catch (err) {
+ return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));
+ }
+ });
+ },
+
+ /**
+ * Decode response as text
+ *
+ * @return Promise
+ */
+ text() {
+ return consumeBody.call(this).then(function (buffer) {
+ return buffer.toString();
+ });
+ },
+
+ /**
+ * Decode response as buffer (non-spec api)
+ *
+ * @return Promise
+ */
+ buffer() {
+ return consumeBody.call(this);
+ },
+
+ /**
+ * Decode response as text, while automatically detecting the encoding and
+ * trying to decode to UTF-8 (non-spec api)
+ *
+ * @return Promise
+ */
+ textConverted() {
+ var _this3 = this;
+
+ return consumeBody.call(this).then(function (buffer) {
+ return convertBody(buffer, _this3.headers);
+ });
+ }
+};
+
+// In browsers, all properties are enumerable.
+Object.defineProperties(Body.prototype, {
+ body: { enumerable: true },
+ bodyUsed: { enumerable: true },
+ arrayBuffer: { enumerable: true },
+ blob: { enumerable: true },
+ json: { enumerable: true },
+ text: { enumerable: true }
+});
+
+Body.mixIn = function (proto) {
+ for (const name of Object.getOwnPropertyNames(Body.prototype)) {
+ // istanbul ignore else: future proof
+ if (!(name in proto)) {
+ const desc = Object.getOwnPropertyDescriptor(Body.prototype, name);
+ Object.defineProperty(proto, name, desc);
+ }
+ }
+};
+
+/**
+ * Consume and convert an entire Body to a Buffer.
+ *
+ * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body
+ *
+ * @return Promise
+ */
+function consumeBody() {
+ var _this4 = this;
+
+ if (this[INTERNALS].disturbed) {
+ return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));
+ }
+
+ this[INTERNALS].disturbed = true;
+
+ if (this[INTERNALS].error) {
+ return Body.Promise.reject(this[INTERNALS].error);
+ }
+
+ let body = this.body;
+
+ // body is null
+ if (body === null) {
+ return Body.Promise.resolve(Buffer.alloc(0));
+ }
+
+ // body is blob
+ if (isBlob(body)) {
+ body = body.stream();
+ }
+
+ // body is buffer
+ if (Buffer.isBuffer(body)) {
+ return Body.Promise.resolve(body);
+ }
+
+ // istanbul ignore if: should never happen
+ if (!(body instanceof Stream)) {
+ return Body.Promise.resolve(Buffer.alloc(0));
+ }
+
+ // body is stream
+ // get ready to actually consume the body
+ let accum = [];
+ let accumBytes = 0;
+ let abort = false;
+
+ return new Body.Promise(function (resolve, reject) {
+ let resTimeout;
+
+ // allow timeout on slow response body
+ if (_this4.timeout) {
+ resTimeout = setTimeout(function () {
+ abort = true;
+ reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));
+ }, _this4.timeout);
+ }
+
+ // handle stream errors
+ body.on('error', function (err) {
+ if (err.name === 'AbortError') {
+ // if the request was aborted, reject with this Error
+ abort = true;
+ reject(err);
+ } else {
+ // other errors, such as incorrect content-encoding
+ reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));
+ }
+ });
+
+ body.on('data', function (chunk) {
+ if (abort || chunk === null) {
+ return;
+ }
+
+ if (_this4.size && accumBytes + chunk.length > _this4.size) {
+ abort = true;
+ reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));
+ return;
+ }
+
+ accumBytes += chunk.length;
+ accum.push(chunk);
+ });
+
+ body.on('end', function () {
+ if (abort) {
+ return;
+ }
+
+ clearTimeout(resTimeout);
+
+ try {
+ resolve(Buffer.concat(accum, accumBytes));
+ } catch (err) {
+ // handle streams that have accumulated too much data (issue #414)
+ reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));
+ }
+ });
+ });
+}
+
+/**
+ * Detect buffer encoding and convert to target encoding
+ * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
+ *
+ * @param Buffer buffer Incoming buffer
+ * @param String encoding Target encoding
+ * @return String
+ */
+function convertBody(buffer, headers) {
+ if (typeof convert !== 'function') {
+ throw new Error('The package `encoding` must be installed to use the textConverted() function');
+ }
+
+ const ct = headers.get('content-type');
+ let charset = 'utf-8';
+ let res, str;
+
+ // header
+ if (ct) {
+ res = /charset=([^;]*)/i.exec(ct);
+ }
+
+ // no charset in content type, peek at response body for at most 1024 bytes
+ str = buffer.slice(0, 1024).toString();
+
+ // html5
+ if (!res && str) {
+ res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;
+
+ this[MAP] = Object.create(null);
+
+ if (init instanceof Headers) {
+ const rawHeaders = init.raw();
+ const headerNames = Object.keys(rawHeaders);
+
+ for (const headerName of headerNames) {
+ for (const value of rawHeaders[headerName]) {
+ this.append(headerName, value);
+ }
+ }
+
+ return;
+ }
+
+ // We don't worry about converting prop to ByteString here as append()
+ // will handle it.
+ if (init == null) ; else if (typeof init === 'object') {
+ const method = init[Symbol.iterator];
+ if (method != null) {
+ if (typeof method !== 'function') {
+ throw new TypeError('Header pairs must be iterable');
+ }
+
+ // sequence>
+ // Note: per spec we have to first exhaust the lists then process them
+ const pairs = [];
+ for (const pair of init) {
+ if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {
+ throw new TypeError('Each header pair must be iterable');
+ }
+ pairs.push(Array.from(pair));
+ }
+
+ for (const pair of pairs) {
+ if (pair.length !== 2) {
+ throw new TypeError('Each header pair must be a name/value tuple');
+ }
+ this.append(pair[0], pair[1]);
+ }
+ } else {
+ // record
+ for (const key of Object.keys(init)) {
+ const value = init[key];
+ this.append(key, value);
+ }
+ }
+ } else {
+ throw new TypeError('Provided initializer must be an object');
+ }
+ }
+
+ /**
+ * Return combined header value given name
+ *
+ * @param String name Header name
+ * @return Mixed
+ */
+ get(name) {
+ name = `${name}`;
+ validateName(name);
+ const key = find(this[MAP], name);
+ if (key === undefined) {
+ return null;
+ }
+
+ return this[MAP][key].join(', ');
+ }
+
+ /**
+ * Iterate over all headers
+ *
+ * @param Function callback Executed for each item with parameters (value, name, thisArg)
+ * @param Boolean thisArg `this` context for callback function
+ * @return Void
+ */
+ forEach(callback) {
+ let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;
+
+ let pairs = getHeaders(this);
+ let i = 0;
+ while (i < pairs.length) {
+ var _pairs$i = pairs[i];
+ const name = _pairs$i[0],
+ value = _pairs$i[1];
+
+ callback.call(thisArg, value, name, this);
+ pairs = getHeaders(this);
+ i++;
+ }
+ }
+
+ /**
+ * Overwrite header values given name
+ *
+ * @param String name Header name
+ * @param String value Header value
+ * @return Void
+ */
+ set(name, value) {
+ name = `${name}`;
+ value = `${value}`;
+ validateName(name);
+ validateValue(value);
+ const key = find(this[MAP], name);
+ this[MAP][key !== undefined ? key : name] = [value];
+ }
+
+ /**
+ * Append a value onto existing header
+ *
+ * @param String name Header name
+ * @param String value Header value
+ * @return Void
+ */
+ append(name, value) {
+ name = `${name}`;
+ value = `${value}`;
+ validateName(name);
+ validateValue(value);
+ const key = find(this[MAP], name);
+ if (key !== undefined) {
+ this[MAP][key].push(value);
+ } else {
+ this[MAP][name] = [value];
+ }
+ }
+
+ /**
+ * Check for header name existence
+ *
+ * @param String name Header name
+ * @return Boolean
+ */
+ has(name) {
+ name = `${name}`;
+ validateName(name);
+ return find(this[MAP], name) !== undefined;
+ }
+
+ /**
+ * Delete all header values given name
+ *
+ * @param String name Header name
+ * @return Void
+ */
+ delete(name) {
+ name = `${name}`;
+ validateName(name);
+ const key = find(this[MAP], name);
+ if (key !== undefined) {
+ delete this[MAP][key];
+ }
+ }
+
+ /**
+ * Return raw headers (non-spec api)
+ *
+ * @return Object
+ */
+ raw() {
+ return this[MAP];
+ }
+
+ /**
+ * Get an iterator on keys.
+ *
+ * @return Iterator
+ */
+ keys() {
+ return createHeadersIterator(this, 'key');
+ }
+
+ /**
+ * Get an iterator on values.
+ *
+ * @return Iterator
+ */
+ values() {
+ return createHeadersIterator(this, 'value');
+ }
+
+ /**
+ * Get an iterator on entries.
+ *
+ * This is the default iterator of the Headers object.
+ *
+ * @return Iterator
+ */
+ [Symbol.iterator]() {
+ return createHeadersIterator(this, 'key+value');
+ }
+}
+Headers.prototype.entries = Headers.prototype[Symbol.iterator];
+
+Object.defineProperty(Headers.prototype, Symbol.toStringTag, {
+ value: 'Headers',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+Object.defineProperties(Headers.prototype, {
+ get: { enumerable: true },
+ forEach: { enumerable: true },
+ set: { enumerable: true },
+ append: { enumerable: true },
+ has: { enumerable: true },
+ delete: { enumerable: true },
+ keys: { enumerable: true },
+ values: { enumerable: true },
+ entries: { enumerable: true }
+});
+
+function getHeaders(headers) {
+ let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';
+
+ const keys = Object.keys(headers[MAP]).sort();
+ return keys.map(kind === 'key' ? function (k) {
+ return k.toLowerCase();
+ } : kind === 'value' ? function (k) {
+ return headers[MAP][k].join(', ');
+ } : function (k) {
+ return [k.toLowerCase(), headers[MAP][k].join(', ')];
+ });
+}
+
+const INTERNAL = Symbol('internal');
+
+function createHeadersIterator(target, kind) {
+ const iterator = Object.create(HeadersIteratorPrototype);
+ iterator[INTERNAL] = {
+ target,
+ kind,
+ index: 0
+ };
+ return iterator;
+}
+
+const HeadersIteratorPrototype = Object.setPrototypeOf({
+ next() {
+ // istanbul ignore if
+ if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
+ throw new TypeError('Value of `this` is not a HeadersIterator');
+ }
+
+ var _INTERNAL = this[INTERNAL];
+ const target = _INTERNAL.target,
+ kind = _INTERNAL.kind,
+ index = _INTERNAL.index;
+
+ const values = getHeaders(target, kind);
+ const len = values.length;
+ if (index >= len) {
+ return {
+ value: undefined,
+ done: true
+ };
+ }
+
+ this[INTERNAL].index = index + 1;
+
+ return {
+ value: values[index],
+ done: false
+ };
+ }
+}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));
+
+Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
+ value: 'HeadersIterator',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+/**
+ * Export the Headers object in a form that Node.js can consume.
+ *
+ * @param Headers headers
+ * @return Object
+ */
+function exportNodeCompatibleHeaders(headers) {
+ const obj = Object.assign({ __proto__: null }, headers[MAP]);
+
+ // http.request() only supports string as Host header. This hack makes
+ // specifying custom Host header possible.
+ const hostHeaderKey = find(headers[MAP], 'Host');
+ if (hostHeaderKey !== undefined) {
+ obj[hostHeaderKey] = obj[hostHeaderKey][0];
+ }
+
+ return obj;
+}
+
+/**
+ * Create a Headers object from an object of headers, ignoring those that do
+ * not conform to HTTP grammar productions.
+ *
+ * @param Object obj Object of headers
+ * @return Headers
+ */
+function createHeadersLenient(obj) {
+ const headers = new Headers();
+ for (const name of Object.keys(obj)) {
+ if (invalidTokenRegex.test(name)) {
+ continue;
+ }
+ if (Array.isArray(obj[name])) {
+ for (const val of obj[name]) {
+ if (invalidHeaderCharRegex.test(val)) {
+ continue;
+ }
+ if (headers[MAP][name] === undefined) {
+ headers[MAP][name] = [val];
+ } else {
+ headers[MAP][name].push(val);
+ }
+ }
+ } else if (!invalidHeaderCharRegex.test(obj[name])) {
+ headers[MAP][name] = [obj[name]];
+ }
+ }
+ return headers;
+}
+
+const INTERNALS$1 = Symbol('Response internals');
+
+// fix an issue where "STATUS_CODES" aren't a named export for node <10
+const STATUS_CODES = http.STATUS_CODES;
+
+/**
+ * Response class
+ *
+ * @param Stream body Readable stream
+ * @param Object opts Response options
+ * @return Void
+ */
+class Response {
+ constructor() {
+ let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
+ let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+
+ Body.call(this, body, opts);
+
+ const status = opts.status || 200;
+ const headers = new Headers(opts.headers);
+
+ if (body != null && !headers.has('Content-Type')) {
+ const contentType = extractContentType(body);
+ if (contentType) {
+ headers.append('Content-Type', contentType);
+ }
+ }
+
+ this[INTERNALS$1] = {
+ url: opts.url,
+ status,
+ statusText: opts.statusText || STATUS_CODES[status],
+ headers,
+ counter: opts.counter
+ };
+ }
+
+ get url() {
+ return this[INTERNALS$1].url || '';
+ }
+
+ get status() {
+ return this[INTERNALS$1].status;
+ }
+
+ /**
+ * Convenience property representing if the request ended normally
+ */
+ get ok() {
+ return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;
+ }
+
+ get redirected() {
+ return this[INTERNALS$1].counter > 0;
+ }
+
+ get statusText() {
+ return this[INTERNALS$1].statusText;
+ }
+
+ get headers() {
+ return this[INTERNALS$1].headers;
+ }
+
+ /**
+ * Clone this response
+ *
+ * @return Response
+ */
+ clone() {
+ return new Response(clone(this), {
+ url: this.url,
+ status: this.status,
+ statusText: this.statusText,
+ headers: this.headers,
+ ok: this.ok,
+ redirected: this.redirected
+ });
+ }
+}
+
+Body.mixIn(Response.prototype);
+
+Object.defineProperties(Response.prototype, {
+ url: { enumerable: true },
+ status: { enumerable: true },
+ ok: { enumerable: true },
+ redirected: { enumerable: true },
+ statusText: { enumerable: true },
+ headers: { enumerable: true },
+ clone: { enumerable: true }
+});
+
+Object.defineProperty(Response.prototype, Symbol.toStringTag, {
+ value: 'Response',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+const INTERNALS$2 = Symbol('Request internals');
+const URL = Url.URL || whatwgUrl.URL;
+
+// fix an issue where "format", "parse" aren't a named export for node <10
+const parse_url = Url.parse;
+const format_url = Url.format;
+
+/**
+ * Wrapper around `new URL` to handle arbitrary URLs
+ *
+ * @param {string} urlStr
+ * @return {void}
+ */
+function parseURL(urlStr) {
+ /*
+ Check whether the URL is absolute or not
+ Scheme: https://tools.ietf.org/html/rfc3986#section-3.1
+ Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3
+ */
+ if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) {
+ urlStr = new URL(urlStr).toString();
+ }
+
+ // Fallback to old implementation for arbitrary URLs
+ return parse_url(urlStr);
+}
+
+const streamDestructionSupported = 'destroy' in Stream.Readable.prototype;
+
+/**
+ * Check if a value is an instance of Request.
+ *
+ * @param Mixed input
+ * @return Boolean
+ */
+function isRequest(input) {
+ return typeof input === 'object' && typeof input[INTERNALS$2] === 'object';
+}
+
+function isAbortSignal(signal) {
+ const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);
+ return !!(proto && proto.constructor.name === 'AbortSignal');
+}
+
+/**
+ * Request class
+ *
+ * @param Mixed input Url or Request instance
+ * @param Object init Custom options
+ * @return Void
+ */
+class Request {
+ constructor(input) {
+ let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+
+ let parsedURL;
+
+ // normalize input
+ if (!isRequest(input)) {
+ if (input && input.href) {
+ // in order to support Node.js' Url objects; though WHATWG's URL objects
+ // will fall into this branch also (since their `toString()` will return
+ // `href` property anyway)
+ parsedURL = parseURL(input.href);
+ } else {
+ // coerce input to a string before attempting to parse
+ parsedURL = parseURL(`${input}`);
+ }
+ input = {};
+ } else {
+ parsedURL = parseURL(input.url);
+ }
+
+ let method = init.method || input.method || 'GET';
+ method = method.toUpperCase();
+
+ if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {
+ throw new TypeError('Request with GET/HEAD method cannot have body');
+ }
+
+ let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;
+
+ Body.call(this, inputBody, {
+ timeout: init.timeout || input.timeout || 0,
+ size: init.size || input.size || 0
+ });
+
+ const headers = new Headers(init.headers || input.headers || {});
+
+ if (inputBody != null && !headers.has('Content-Type')) {
+ const contentType = extractContentType(inputBody);
+ if (contentType) {
+ headers.append('Content-Type', contentType);
+ }
+ }
+
+ let signal = isRequest(input) ? input.signal : null;
+ if ('signal' in init) signal = init.signal;
+
+ if (signal != null && !isAbortSignal(signal)) {
+ throw new TypeError('Expected signal to be an instanceof AbortSignal');
+ }
+
+ this[INTERNALS$2] = {
+ method,
+ redirect: init.redirect || input.redirect || 'follow',
+ headers,
+ parsedURL,
+ signal
+ };
+
+ // node-fetch-only options
+ this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;
+ this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;
+ this.counter = init.counter || input.counter || 0;
+ this.agent = init.agent || input.agent;
+ }
+
+ get method() {
+ return this[INTERNALS$2].method;
+ }
+
+ get url() {
+ return format_url(this[INTERNALS$2].parsedURL);
+ }
+
+ get headers() {
+ return this[INTERNALS$2].headers;
+ }
+
+ get redirect() {
+ return this[INTERNALS$2].redirect;
+ }
+
+ get signal() {
+ return this[INTERNALS$2].signal;
+ }
+
+ /**
+ * Clone this request
+ *
+ * @return Request
+ */
+ clone() {
+ return new Request(this);
+ }
+}
+
+Body.mixIn(Request.prototype);
+
+Object.defineProperty(Request.prototype, Symbol.toStringTag, {
+ value: 'Request',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+Object.defineProperties(Request.prototype, {
+ method: { enumerable: true },
+ url: { enumerable: true },
+ headers: { enumerable: true },
+ redirect: { enumerable: true },
+ clone: { enumerable: true },
+ signal: { enumerable: true }
+});
+
+/**
+ * Convert a Request to Node.js http request options.
+ *
+ * @param Request A Request instance
+ * @return Object The options object to be passed to http.request
+ */
+function getNodeRequestOptions(request) {
+ const parsedURL = request[INTERNALS$2].parsedURL;
+ const headers = new Headers(request[INTERNALS$2].headers);
+
+ // fetch step 1.3
+ if (!headers.has('Accept')) {
+ headers.set('Accept', '*/*');
+ }
+
+ // Basic fetch
+ if (!parsedURL.protocol || !parsedURL.hostname) {
+ throw new TypeError('Only absolute URLs are supported');
+ }
+
+ if (!/^https?:$/.test(parsedURL.protocol)) {
+ throw new TypeError('Only HTTP(S) protocols are supported');
+ }
+
+ if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {
+ throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');
+ }
+
+ // HTTP-network-or-cache fetch steps 2.4-2.7
+ let contentLengthValue = null;
+ if (request.body == null && /^(POST|PUT)$/i.test(request.method)) {
+ contentLengthValue = '0';
+ }
+ if (request.body != null) {
+ const totalBytes = getTotalBytes(request);
+ if (typeof totalBytes === 'number') {
+ contentLengthValue = String(totalBytes);
+ }
+ }
+ if (contentLengthValue) {
+ headers.set('Content-Length', contentLengthValue);
+ }
+
+ // HTTP-network-or-cache fetch step 2.11
+ if (!headers.has('User-Agent')) {
+ headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');
+ }
+
+ // HTTP-network-or-cache fetch step 2.15
+ if (request.compress && !headers.has('Accept-Encoding')) {
+ headers.set('Accept-Encoding', 'gzip,deflate');
+ }
+
+ let agent = request.agent;
+ if (typeof agent === 'function') {
+ agent = agent(parsedURL);
+ }
+
+ if (!headers.has('Connection') && !agent) {
+ headers.set('Connection', 'close');
+ }
+
+ // HTTP-network fetch step 4.2
+ // chunked encoding is handled by Node.js
+
+ return Object.assign({}, parsedURL, {
+ method: request.method,
+ headers: exportNodeCompatibleHeaders(headers),
+ agent
+ });
+}
+
+/**
+ * abort-error.js
+ *
+ * AbortError interface for cancelled requests
+ */
+
+/**
+ * Create AbortError instance
+ *
+ * @param String message Error message for human
+ * @return AbortError
+ */
+function AbortError(message) {
+ Error.call(this, message);
+
+ this.type = 'aborted';
+ this.message = message;
+
+ // hide custom error implementation details from end-users
+ Error.captureStackTrace(this, this.constructor);
+}
+
+AbortError.prototype = Object.create(Error.prototype);
+AbortError.prototype.constructor = AbortError;
+AbortError.prototype.name = 'AbortError';
+
+const URL$1 = Url.URL || whatwgUrl.URL;
+
+// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
+const PassThrough$1 = Stream.PassThrough;
+
+const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) {
+ const orig = new URL$1(original).hostname;
+ const dest = new URL$1(destination).hostname;
+
+ return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest);
+};
+
+/**
+ * isSameProtocol reports whether the two provided URLs use the same protocol.
+ *
+ * Both domains must already be in canonical form.
+ * @param {string|URL} original
+ * @param {string|URL} destination
+ */
+const isSameProtocol = function isSameProtocol(destination, original) {
+ const orig = new URL$1(original).protocol;
+ const dest = new URL$1(destination).protocol;
+
+ return orig === dest;
+};
+
+/**
+ * Fetch function
+ *
+ * @param Mixed url Absolute url or Request instance
+ * @param Object opts Fetch options
+ * @return Promise
+ */
+function fetch(url, opts) {
+
+ // allow custom promise
+ if (!fetch.Promise) {
+ throw new Error('native promise missing, set fetch.Promise to your favorite alternative');
+ }
+
+ Body.Promise = fetch.Promise;
+
+ // wrap http.request into fetch
+ return new fetch.Promise(function (resolve, reject) {
+ // build request object
+ const request = new Request(url, opts);
+ const options = getNodeRequestOptions(request);
+
+ const send = (options.protocol === 'https:' ? https : http).request;
+ const signal = request.signal;
+
+ let response = null;
+
+ const abort = function abort() {
+ let error = new AbortError('The user aborted a request.');
+ reject(error);
+ if (request.body && request.body instanceof Stream.Readable) {
+ destroyStream(request.body, error);
+ }
+ if (!response || !response.body) return;
+ response.body.emit('error', error);
+ };
+
+ if (signal && signal.aborted) {
+ abort();
+ return;
+ }
+
+ const abortAndFinalize = function abortAndFinalize() {
+ abort();
+ finalize();
+ };
+
+ // send request
+ const req = send(options);
+ let reqTimeout;
+
+ if (signal) {
+ signal.addEventListener('abort', abortAndFinalize);
+ }
+
+ function finalize() {
+ req.abort();
+ if (signal) signal.removeEventListener('abort', abortAndFinalize);
+ clearTimeout(reqTimeout);
+ }
+
+ if (request.timeout) {
+ req.once('socket', function (socket) {
+ reqTimeout = setTimeout(function () {
+ reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));
+ finalize();
+ }, request.timeout);
+ });
+ }
+
+ req.on('error', function (err) {
+ reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
+
+ if (response && response.body) {
+ destroyStream(response.body, err);
+ }
+
+ finalize();
+ });
+
+ fixResponseChunkedTransferBadEnding(req, function (err) {
+ if (signal && signal.aborted) {
+ return;
+ }
+
+ if (response && response.body) {
+ destroyStream(response.body, err);
+ }
+ });
+
+ /* c8 ignore next 18 */
+ if (parseInt(process.version.substring(1)) < 14) {
+ // Before Node.js 14, pipeline() does not fully support async iterators and does not always
+ // properly handle when the socket close/end events are out of order.
+ req.on('socket', function (s) {
+ s.addListener('close', function (hadError) {
+ // if a data listener is still present we didn't end cleanly
+ const hasDataListener = s.listenerCount('data') > 0;
+
+ // if end happened before close but the socket didn't emit an error, do it now
+ if (response && hasDataListener && !hadError && !(signal && signal.aborted)) {
+ const err = new Error('Premature close');
+ err.code = 'ERR_STREAM_PREMATURE_CLOSE';
+ response.body.emit('error', err);
+ }
+ });
+ });
+ }
+
+ req.on('response', function (res) {
+ clearTimeout(reqTimeout);
+
+ const headers = createHeadersLenient(res.headers);
+
+ // HTTP fetch step 5
+ if (fetch.isRedirect(res.statusCode)) {
+ // HTTP fetch step 5.2
+ const location = headers.get('Location');
+
+ // HTTP fetch step 5.3
+ let locationURL = null;
+ try {
+ locationURL = location === null ? null : new URL$1(location, request.url).toString();
+ } catch (err) {
+ // error here can only be invalid URL in Location: header
+ // do not throw when options.redirect == manual
+ // let the user extract the errorneous redirect URL
+ if (request.redirect !== 'manual') {
+ reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));
+ finalize();
+ return;
+ }
+ }
+
+ // HTTP fetch step 5.5
+ switch (request.redirect) {
+ case 'error':
+ reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));
+ finalize();
+ return;
+ case 'manual':
+ // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
+ if (locationURL !== null) {
+ // handle corrupted header
+ try {
+ headers.set('Location', locationURL);
+ } catch (err) {
+ // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
+ reject(err);
+ }
+ }
+ break;
+ case 'follow':
+ // HTTP-redirect fetch step 2
+ if (locationURL === null) {
+ break;
+ }
+
+ // HTTP-redirect fetch step 5
+ if (request.counter >= request.follow) {
+ reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
+ finalize();
+ return;
+ }
+
+ // HTTP-redirect fetch step 6 (counter increment)
+ // Create a new Request object.
+ const requestOpts = {
+ headers: new Headers(request.headers),
+ follow: request.follow,
+ counter: request.counter + 1,
+ agent: request.agent,
+ compress: request.compress,
+ method: request.method,
+ body: request.body,
+ signal: request.signal,
+ timeout: request.timeout,
+ size: request.size
+ };
+
+ if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) {
+ for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {
+ requestOpts.headers.delete(name);
+ }
+ }
+
+ // HTTP-redirect fetch step 9
+ if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
+ reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
+ finalize();
+ return;
+ }
+
+ // HTTP-redirect fetch step 11
+ if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {
+ requestOpts.method = 'GET';
+ requestOpts.body = undefined;
+ requestOpts.headers.delete('content-length');
+ }
+
+ // HTTP-redirect fetch step 15
+ resolve(fetch(new Request(locationURL, requestOpts)));
+ finalize();
+ return;
+ }
+ }
+
+ // prepare response
+ res.once('end', function () {
+ if (signal) signal.removeEventListener('abort', abortAndFinalize);
+ });
+ let body = res.pipe(new PassThrough$1());
+
+ const response_options = {
+ url: request.url,
+ status: res.statusCode,
+ statusText: res.statusMessage,
+ headers: headers,
+ size: request.size,
+ timeout: request.timeout,
+ counter: request.counter
+ };
+
+ // HTTP-network fetch step 12.1.1.3
+ const codings = headers.get('Content-Encoding');
+
+ // HTTP-network fetch step 12.1.1.4: handle content codings
+
+ // in following scenarios we ignore compression support
+ // 1. compression support is disabled
+ // 2. HEAD request
+ // 3. no Content-Encoding header
+ // 4. no content response (204)
+ // 5. content not modified response (304)
+ if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // For Node v6+
+ // Be less strict when decoding compressed responses, since sometimes
+ // servers send slightly invalid responses that are still accepted
+ // by common browsers.
+ // Always using Z_SYNC_FLUSH is what cURL does.
+ const zlibOptions = {
+ flush: zlib.Z_SYNC_FLUSH,
+ finishFlush: zlib.Z_SYNC_FLUSH
+ };
+
+ // for gzip
+ if (codings == 'gzip' || codings == 'x-gzip') {
+ body = body.pipe(zlib.createGunzip(zlibOptions));
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // for deflate
+ if (codings == 'deflate' || codings == 'x-deflate') {
+ // handle the infamous raw deflate response from old servers
+ // a hack for old IIS and Apache servers
+ const raw = res.pipe(new PassThrough$1());
+ raw.once('data', function (chunk) {
+ // see http://stackoverflow.com/questions/37519828
+ if ((chunk[0] & 0x0F) === 0x08) {
+ body = body.pipe(zlib.createInflate());
+ } else {
+ body = body.pipe(zlib.createInflateRaw());
+ }
+ response = new Response(body, response_options);
+ resolve(response);
+ });
+ raw.on('end', function () {
+ // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted.
+ if (!response) {
+ response = new Response(body, response_options);
+ resolve(response);
+ }
+ });
+ return;
+ }
+
+ // for br
+ if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
+ body = body.pipe(zlib.createBrotliDecompress());
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // otherwise, use response as-is
+ response = new Response(body, response_options);
+ resolve(response);
+ });
+
+ writeToStream(req, request);
+ });
+}
+function fixResponseChunkedTransferBadEnding(request, errorCallback) {
+ let socket;
+
+ request.on('socket', function (s) {
+ socket = s;
+ });
+
+ request.on('response', function (response) {
+ const headers = response.headers;
+
+ if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) {
+ response.once('close', function (hadError) {
+ // if a data listener is still present we didn't end cleanly
+ const hasDataListener = socket.listenerCount('data') > 0;
+
+ if (hasDataListener && !hadError) {
+ const err = new Error('Premature close');
+ err.code = 'ERR_STREAM_PREMATURE_CLOSE';
+ errorCallback(err);
+ }
+ });
+ }
+ });
+}
+
+function destroyStream(stream, err) {
+ if (stream.destroy) {
+ stream.destroy(err);
+ } else {
+ // node < 8
+ stream.emit('error', err);
+ stream.end();
+ }
+}
+
+/**
+ * Redirect code matching
+ *
+ * @param Number code Status code
+ * @return Boolean
+ */
+fetch.isRedirect = function (code) {
+ return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
+};
+
+// expose Promise
+fetch.Promise = global.Promise;
+
+export default fetch;
+export { Headers, Request, Response, FetchError };
diff --git a/node_modules/node-fetch/lib/index.js b/node_modules/node-fetch/lib/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..337d6e52e5fc62c26f1f46ac115505fa8e327b5c
--- /dev/null
+++ b/node_modules/node-fetch/lib/index.js
@@ -0,0 +1,1787 @@
+'use strict';
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+
+var Stream = _interopDefault(require('stream'));
+var http = _interopDefault(require('http'));
+var Url = _interopDefault(require('url'));
+var whatwgUrl = _interopDefault(require('whatwg-url'));
+var https = _interopDefault(require('https'));
+var zlib = _interopDefault(require('zlib'));
+
+// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
+
+// fix for "Readable" isn't a named export issue
+const Readable = Stream.Readable;
+
+const BUFFER = Symbol('buffer');
+const TYPE = Symbol('type');
+
+class Blob {
+ constructor() {
+ this[TYPE] = '';
+
+ const blobParts = arguments[0];
+ const options = arguments[1];
+
+ const buffers = [];
+ let size = 0;
+
+ if (blobParts) {
+ const a = blobParts;
+ const length = Number(a.length);
+ for (let i = 0; i < length; i++) {
+ const element = a[i];
+ let buffer;
+ if (element instanceof Buffer) {
+ buffer = element;
+ } else if (ArrayBuffer.isView(element)) {
+ buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);
+ } else if (element instanceof ArrayBuffer) {
+ buffer = Buffer.from(element);
+ } else if (element instanceof Blob) {
+ buffer = element[BUFFER];
+ } else {
+ buffer = Buffer.from(typeof element === 'string' ? element : String(element));
+ }
+ size += buffer.length;
+ buffers.push(buffer);
+ }
+ }
+
+ this[BUFFER] = Buffer.concat(buffers);
+
+ let type = options && options.type !== undefined && String(options.type).toLowerCase();
+ if (type && !/[^\u0020-\u007E]/.test(type)) {
+ this[TYPE] = type;
+ }
+ }
+ get size() {
+ return this[BUFFER].length;
+ }
+ get type() {
+ return this[TYPE];
+ }
+ text() {
+ return Promise.resolve(this[BUFFER].toString());
+ }
+ arrayBuffer() {
+ const buf = this[BUFFER];
+ const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
+ return Promise.resolve(ab);
+ }
+ stream() {
+ const readable = new Readable();
+ readable._read = function () {};
+ readable.push(this[BUFFER]);
+ readable.push(null);
+ return readable;
+ }
+ toString() {
+ return '[object Blob]';
+ }
+ slice() {
+ const size = this.size;
+
+ const start = arguments[0];
+ const end = arguments[1];
+ let relativeStart, relativeEnd;
+ if (start === undefined) {
+ relativeStart = 0;
+ } else if (start < 0) {
+ relativeStart = Math.max(size + start, 0);
+ } else {
+ relativeStart = Math.min(start, size);
+ }
+ if (end === undefined) {
+ relativeEnd = size;
+ } else if (end < 0) {
+ relativeEnd = Math.max(size + end, 0);
+ } else {
+ relativeEnd = Math.min(end, size);
+ }
+ const span = Math.max(relativeEnd - relativeStart, 0);
+
+ const buffer = this[BUFFER];
+ const slicedBuffer = buffer.slice(relativeStart, relativeStart + span);
+ const blob = new Blob([], { type: arguments[2] });
+ blob[BUFFER] = slicedBuffer;
+ return blob;
+ }
+}
+
+Object.defineProperties(Blob.prototype, {
+ size: { enumerable: true },
+ type: { enumerable: true },
+ slice: { enumerable: true }
+});
+
+Object.defineProperty(Blob.prototype, Symbol.toStringTag, {
+ value: 'Blob',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+/**
+ * fetch-error.js
+ *
+ * FetchError interface for operational errors
+ */
+
+/**
+ * Create FetchError instance
+ *
+ * @param String message Error message for human
+ * @param String type Error type for machine
+ * @param String systemError For Node.js system error
+ * @return FetchError
+ */
+function FetchError(message, type, systemError) {
+ Error.call(this, message);
+
+ this.message = message;
+ this.type = type;
+
+ // when err.type is `system`, err.code contains system error code
+ if (systemError) {
+ this.code = this.errno = systemError.code;
+ }
+
+ // hide custom error implementation details from end-users
+ Error.captureStackTrace(this, this.constructor);
+}
+
+FetchError.prototype = Object.create(Error.prototype);
+FetchError.prototype.constructor = FetchError;
+FetchError.prototype.name = 'FetchError';
+
+let convert;
+try {
+ convert = require('encoding').convert;
+} catch (e) {}
+
+const INTERNALS = Symbol('Body internals');
+
+// fix an issue where "PassThrough" isn't a named export for node <10
+const PassThrough = Stream.PassThrough;
+
+/**
+ * Body mixin
+ *
+ * Ref: https://fetch.spec.whatwg.org/#body
+ *
+ * @param Stream body Readable stream
+ * @param Object opts Response options
+ * @return Void
+ */
+function Body(body) {
+ var _this = this;
+
+ var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
+ _ref$size = _ref.size;
+
+ let size = _ref$size === undefined ? 0 : _ref$size;
+ var _ref$timeout = _ref.timeout;
+ let timeout = _ref$timeout === undefined ? 0 : _ref$timeout;
+
+ if (body == null) {
+ // body is undefined or null
+ body = null;
+ } else if (isURLSearchParams(body)) {
+ // body is a URLSearchParams
+ body = Buffer.from(body.toString());
+ } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
+ // body is ArrayBuffer
+ body = Buffer.from(body);
+ } else if (ArrayBuffer.isView(body)) {
+ // body is ArrayBufferView
+ body = Buffer.from(body.buffer, body.byteOffset, body.byteLength);
+ } else if (body instanceof Stream) ; else {
+ // none of the above
+ // coerce to string then buffer
+ body = Buffer.from(String(body));
+ }
+ this[INTERNALS] = {
+ body,
+ disturbed: false,
+ error: null
+ };
+ this.size = size;
+ this.timeout = timeout;
+
+ if (body instanceof Stream) {
+ body.on('error', function (err) {
+ const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);
+ _this[INTERNALS].error = error;
+ });
+ }
+}
+
+Body.prototype = {
+ get body() {
+ return this[INTERNALS].body;
+ },
+
+ get bodyUsed() {
+ return this[INTERNALS].disturbed;
+ },
+
+ /**
+ * Decode response as ArrayBuffer
+ *
+ * @return Promise
+ */
+ arrayBuffer() {
+ return consumeBody.call(this).then(function (buf) {
+ return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
+ });
+ },
+
+ /**
+ * Return raw response as Blob
+ *
+ * @return Promise
+ */
+ blob() {
+ let ct = this.headers && this.headers.get('content-type') || '';
+ return consumeBody.call(this).then(function (buf) {
+ return Object.assign(
+ // Prevent copying
+ new Blob([], {
+ type: ct.toLowerCase()
+ }), {
+ [BUFFER]: buf
+ });
+ });
+ },
+
+ /**
+ * Decode response as json
+ *
+ * @return Promise
+ */
+ json() {
+ var _this2 = this;
+
+ return consumeBody.call(this).then(function (buffer) {
+ try {
+ return JSON.parse(buffer.toString());
+ } catch (err) {
+ return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));
+ }
+ });
+ },
+
+ /**
+ * Decode response as text
+ *
+ * @return Promise
+ */
+ text() {
+ return consumeBody.call(this).then(function (buffer) {
+ return buffer.toString();
+ });
+ },
+
+ /**
+ * Decode response as buffer (non-spec api)
+ *
+ * @return Promise
+ */
+ buffer() {
+ return consumeBody.call(this);
+ },
+
+ /**
+ * Decode response as text, while automatically detecting the encoding and
+ * trying to decode to UTF-8 (non-spec api)
+ *
+ * @return Promise
+ */
+ textConverted() {
+ var _this3 = this;
+
+ return consumeBody.call(this).then(function (buffer) {
+ return convertBody(buffer, _this3.headers);
+ });
+ }
+};
+
+// In browsers, all properties are enumerable.
+Object.defineProperties(Body.prototype, {
+ body: { enumerable: true },
+ bodyUsed: { enumerable: true },
+ arrayBuffer: { enumerable: true },
+ blob: { enumerable: true },
+ json: { enumerable: true },
+ text: { enumerable: true }
+});
+
+Body.mixIn = function (proto) {
+ for (const name of Object.getOwnPropertyNames(Body.prototype)) {
+ // istanbul ignore else: future proof
+ if (!(name in proto)) {
+ const desc = Object.getOwnPropertyDescriptor(Body.prototype, name);
+ Object.defineProperty(proto, name, desc);
+ }
+ }
+};
+
+/**
+ * Consume and convert an entire Body to a Buffer.
+ *
+ * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body
+ *
+ * @return Promise
+ */
+function consumeBody() {
+ var _this4 = this;
+
+ if (this[INTERNALS].disturbed) {
+ return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));
+ }
+
+ this[INTERNALS].disturbed = true;
+
+ if (this[INTERNALS].error) {
+ return Body.Promise.reject(this[INTERNALS].error);
+ }
+
+ let body = this.body;
+
+ // body is null
+ if (body === null) {
+ return Body.Promise.resolve(Buffer.alloc(0));
+ }
+
+ // body is blob
+ if (isBlob(body)) {
+ body = body.stream();
+ }
+
+ // body is buffer
+ if (Buffer.isBuffer(body)) {
+ return Body.Promise.resolve(body);
+ }
+
+ // istanbul ignore if: should never happen
+ if (!(body instanceof Stream)) {
+ return Body.Promise.resolve(Buffer.alloc(0));
+ }
+
+ // body is stream
+ // get ready to actually consume the body
+ let accum = [];
+ let accumBytes = 0;
+ let abort = false;
+
+ return new Body.Promise(function (resolve, reject) {
+ let resTimeout;
+
+ // allow timeout on slow response body
+ if (_this4.timeout) {
+ resTimeout = setTimeout(function () {
+ abort = true;
+ reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));
+ }, _this4.timeout);
+ }
+
+ // handle stream errors
+ body.on('error', function (err) {
+ if (err.name === 'AbortError') {
+ // if the request was aborted, reject with this Error
+ abort = true;
+ reject(err);
+ } else {
+ // other errors, such as incorrect content-encoding
+ reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));
+ }
+ });
+
+ body.on('data', function (chunk) {
+ if (abort || chunk === null) {
+ return;
+ }
+
+ if (_this4.size && accumBytes + chunk.length > _this4.size) {
+ abort = true;
+ reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));
+ return;
+ }
+
+ accumBytes += chunk.length;
+ accum.push(chunk);
+ });
+
+ body.on('end', function () {
+ if (abort) {
+ return;
+ }
+
+ clearTimeout(resTimeout);
+
+ try {
+ resolve(Buffer.concat(accum, accumBytes));
+ } catch (err) {
+ // handle streams that have accumulated too much data (issue #414)
+ reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));
+ }
+ });
+ });
+}
+
+/**
+ * Detect buffer encoding and convert to target encoding
+ * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
+ *
+ * @param Buffer buffer Incoming buffer
+ * @param String encoding Target encoding
+ * @return String
+ */
+function convertBody(buffer, headers) {
+ if (typeof convert !== 'function') {
+ throw new Error('The package `encoding` must be installed to use the textConverted() function');
+ }
+
+ const ct = headers.get('content-type');
+ let charset = 'utf-8';
+ let res, str;
+
+ // header
+ if (ct) {
+ res = /charset=([^;]*)/i.exec(ct);
+ }
+
+ // no charset in content type, peek at response body for at most 1024 bytes
+ str = buffer.slice(0, 1024).toString();
+
+ // html5
+ if (!res && str) {
+ res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;
+
+ this[MAP] = Object.create(null);
+
+ if (init instanceof Headers) {
+ const rawHeaders = init.raw();
+ const headerNames = Object.keys(rawHeaders);
+
+ for (const headerName of headerNames) {
+ for (const value of rawHeaders[headerName]) {
+ this.append(headerName, value);
+ }
+ }
+
+ return;
+ }
+
+ // We don't worry about converting prop to ByteString here as append()
+ // will handle it.
+ if (init == null) ; else if (typeof init === 'object') {
+ const method = init[Symbol.iterator];
+ if (method != null) {
+ if (typeof method !== 'function') {
+ throw new TypeError('Header pairs must be iterable');
+ }
+
+ // sequence>
+ // Note: per spec we have to first exhaust the lists then process them
+ const pairs = [];
+ for (const pair of init) {
+ if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {
+ throw new TypeError('Each header pair must be iterable');
+ }
+ pairs.push(Array.from(pair));
+ }
+
+ for (const pair of pairs) {
+ if (pair.length !== 2) {
+ throw new TypeError('Each header pair must be a name/value tuple');
+ }
+ this.append(pair[0], pair[1]);
+ }
+ } else {
+ // record
+ for (const key of Object.keys(init)) {
+ const value = init[key];
+ this.append(key, value);
+ }
+ }
+ } else {
+ throw new TypeError('Provided initializer must be an object');
+ }
+ }
+
+ /**
+ * Return combined header value given name
+ *
+ * @param String name Header name
+ * @return Mixed
+ */
+ get(name) {
+ name = `${name}`;
+ validateName(name);
+ const key = find(this[MAP], name);
+ if (key === undefined) {
+ return null;
+ }
+
+ return this[MAP][key].join(', ');
+ }
+
+ /**
+ * Iterate over all headers
+ *
+ * @param Function callback Executed for each item with parameters (value, name, thisArg)
+ * @param Boolean thisArg `this` context for callback function
+ * @return Void
+ */
+ forEach(callback) {
+ let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;
+
+ let pairs = getHeaders(this);
+ let i = 0;
+ while (i < pairs.length) {
+ var _pairs$i = pairs[i];
+ const name = _pairs$i[0],
+ value = _pairs$i[1];
+
+ callback.call(thisArg, value, name, this);
+ pairs = getHeaders(this);
+ i++;
+ }
+ }
+
+ /**
+ * Overwrite header values given name
+ *
+ * @param String name Header name
+ * @param String value Header value
+ * @return Void
+ */
+ set(name, value) {
+ name = `${name}`;
+ value = `${value}`;
+ validateName(name);
+ validateValue(value);
+ const key = find(this[MAP], name);
+ this[MAP][key !== undefined ? key : name] = [value];
+ }
+
+ /**
+ * Append a value onto existing header
+ *
+ * @param String name Header name
+ * @param String value Header value
+ * @return Void
+ */
+ append(name, value) {
+ name = `${name}`;
+ value = `${value}`;
+ validateName(name);
+ validateValue(value);
+ const key = find(this[MAP], name);
+ if (key !== undefined) {
+ this[MAP][key].push(value);
+ } else {
+ this[MAP][name] = [value];
+ }
+ }
+
+ /**
+ * Check for header name existence
+ *
+ * @param String name Header name
+ * @return Boolean
+ */
+ has(name) {
+ name = `${name}`;
+ validateName(name);
+ return find(this[MAP], name) !== undefined;
+ }
+
+ /**
+ * Delete all header values given name
+ *
+ * @param String name Header name
+ * @return Void
+ */
+ delete(name) {
+ name = `${name}`;
+ validateName(name);
+ const key = find(this[MAP], name);
+ if (key !== undefined) {
+ delete this[MAP][key];
+ }
+ }
+
+ /**
+ * Return raw headers (non-spec api)
+ *
+ * @return Object
+ */
+ raw() {
+ return this[MAP];
+ }
+
+ /**
+ * Get an iterator on keys.
+ *
+ * @return Iterator
+ */
+ keys() {
+ return createHeadersIterator(this, 'key');
+ }
+
+ /**
+ * Get an iterator on values.
+ *
+ * @return Iterator
+ */
+ values() {
+ return createHeadersIterator(this, 'value');
+ }
+
+ /**
+ * Get an iterator on entries.
+ *
+ * This is the default iterator of the Headers object.
+ *
+ * @return Iterator
+ */
+ [Symbol.iterator]() {
+ return createHeadersIterator(this, 'key+value');
+ }
+}
+Headers.prototype.entries = Headers.prototype[Symbol.iterator];
+
+Object.defineProperty(Headers.prototype, Symbol.toStringTag, {
+ value: 'Headers',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+Object.defineProperties(Headers.prototype, {
+ get: { enumerable: true },
+ forEach: { enumerable: true },
+ set: { enumerable: true },
+ append: { enumerable: true },
+ has: { enumerable: true },
+ delete: { enumerable: true },
+ keys: { enumerable: true },
+ values: { enumerable: true },
+ entries: { enumerable: true }
+});
+
+function getHeaders(headers) {
+ let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';
+
+ const keys = Object.keys(headers[MAP]).sort();
+ return keys.map(kind === 'key' ? function (k) {
+ return k.toLowerCase();
+ } : kind === 'value' ? function (k) {
+ return headers[MAP][k].join(', ');
+ } : function (k) {
+ return [k.toLowerCase(), headers[MAP][k].join(', ')];
+ });
+}
+
+const INTERNAL = Symbol('internal');
+
+function createHeadersIterator(target, kind) {
+ const iterator = Object.create(HeadersIteratorPrototype);
+ iterator[INTERNAL] = {
+ target,
+ kind,
+ index: 0
+ };
+ return iterator;
+}
+
+const HeadersIteratorPrototype = Object.setPrototypeOf({
+ next() {
+ // istanbul ignore if
+ if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
+ throw new TypeError('Value of `this` is not a HeadersIterator');
+ }
+
+ var _INTERNAL = this[INTERNAL];
+ const target = _INTERNAL.target,
+ kind = _INTERNAL.kind,
+ index = _INTERNAL.index;
+
+ const values = getHeaders(target, kind);
+ const len = values.length;
+ if (index >= len) {
+ return {
+ value: undefined,
+ done: true
+ };
+ }
+
+ this[INTERNAL].index = index + 1;
+
+ return {
+ value: values[index],
+ done: false
+ };
+ }
+}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));
+
+Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
+ value: 'HeadersIterator',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+/**
+ * Export the Headers object in a form that Node.js can consume.
+ *
+ * @param Headers headers
+ * @return Object
+ */
+function exportNodeCompatibleHeaders(headers) {
+ const obj = Object.assign({ __proto__: null }, headers[MAP]);
+
+ // http.request() only supports string as Host header. This hack makes
+ // specifying custom Host header possible.
+ const hostHeaderKey = find(headers[MAP], 'Host');
+ if (hostHeaderKey !== undefined) {
+ obj[hostHeaderKey] = obj[hostHeaderKey][0];
+ }
+
+ return obj;
+}
+
+/**
+ * Create a Headers object from an object of headers, ignoring those that do
+ * not conform to HTTP grammar productions.
+ *
+ * @param Object obj Object of headers
+ * @return Headers
+ */
+function createHeadersLenient(obj) {
+ const headers = new Headers();
+ for (const name of Object.keys(obj)) {
+ if (invalidTokenRegex.test(name)) {
+ continue;
+ }
+ if (Array.isArray(obj[name])) {
+ for (const val of obj[name]) {
+ if (invalidHeaderCharRegex.test(val)) {
+ continue;
+ }
+ if (headers[MAP][name] === undefined) {
+ headers[MAP][name] = [val];
+ } else {
+ headers[MAP][name].push(val);
+ }
+ }
+ } else if (!invalidHeaderCharRegex.test(obj[name])) {
+ headers[MAP][name] = [obj[name]];
+ }
+ }
+ return headers;
+}
+
+const INTERNALS$1 = Symbol('Response internals');
+
+// fix an issue where "STATUS_CODES" aren't a named export for node <10
+const STATUS_CODES = http.STATUS_CODES;
+
+/**
+ * Response class
+ *
+ * @param Stream body Readable stream
+ * @param Object opts Response options
+ * @return Void
+ */
+class Response {
+ constructor() {
+ let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
+ let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+
+ Body.call(this, body, opts);
+
+ const status = opts.status || 200;
+ const headers = new Headers(opts.headers);
+
+ if (body != null && !headers.has('Content-Type')) {
+ const contentType = extractContentType(body);
+ if (contentType) {
+ headers.append('Content-Type', contentType);
+ }
+ }
+
+ this[INTERNALS$1] = {
+ url: opts.url,
+ status,
+ statusText: opts.statusText || STATUS_CODES[status],
+ headers,
+ counter: opts.counter
+ };
+ }
+
+ get url() {
+ return this[INTERNALS$1].url || '';
+ }
+
+ get status() {
+ return this[INTERNALS$1].status;
+ }
+
+ /**
+ * Convenience property representing if the request ended normally
+ */
+ get ok() {
+ return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;
+ }
+
+ get redirected() {
+ return this[INTERNALS$1].counter > 0;
+ }
+
+ get statusText() {
+ return this[INTERNALS$1].statusText;
+ }
+
+ get headers() {
+ return this[INTERNALS$1].headers;
+ }
+
+ /**
+ * Clone this response
+ *
+ * @return Response
+ */
+ clone() {
+ return new Response(clone(this), {
+ url: this.url,
+ status: this.status,
+ statusText: this.statusText,
+ headers: this.headers,
+ ok: this.ok,
+ redirected: this.redirected
+ });
+ }
+}
+
+Body.mixIn(Response.prototype);
+
+Object.defineProperties(Response.prototype, {
+ url: { enumerable: true },
+ status: { enumerable: true },
+ ok: { enumerable: true },
+ redirected: { enumerable: true },
+ statusText: { enumerable: true },
+ headers: { enumerable: true },
+ clone: { enumerable: true }
+});
+
+Object.defineProperty(Response.prototype, Symbol.toStringTag, {
+ value: 'Response',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+const INTERNALS$2 = Symbol('Request internals');
+const URL = Url.URL || whatwgUrl.URL;
+
+// fix an issue where "format", "parse" aren't a named export for node <10
+const parse_url = Url.parse;
+const format_url = Url.format;
+
+/**
+ * Wrapper around `new URL` to handle arbitrary URLs
+ *
+ * @param {string} urlStr
+ * @return {void}
+ */
+function parseURL(urlStr) {
+ /*
+ Check whether the URL is absolute or not
+ Scheme: https://tools.ietf.org/html/rfc3986#section-3.1
+ Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3
+ */
+ if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) {
+ urlStr = new URL(urlStr).toString();
+ }
+
+ // Fallback to old implementation for arbitrary URLs
+ return parse_url(urlStr);
+}
+
+const streamDestructionSupported = 'destroy' in Stream.Readable.prototype;
+
+/**
+ * Check if a value is an instance of Request.
+ *
+ * @param Mixed input
+ * @return Boolean
+ */
+function isRequest(input) {
+ return typeof input === 'object' && typeof input[INTERNALS$2] === 'object';
+}
+
+function isAbortSignal(signal) {
+ const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);
+ return !!(proto && proto.constructor.name === 'AbortSignal');
+}
+
+/**
+ * Request class
+ *
+ * @param Mixed input Url or Request instance
+ * @param Object init Custom options
+ * @return Void
+ */
+class Request {
+ constructor(input) {
+ let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+
+ let parsedURL;
+
+ // normalize input
+ if (!isRequest(input)) {
+ if (input && input.href) {
+ // in order to support Node.js' Url objects; though WHATWG's URL objects
+ // will fall into this branch also (since their `toString()` will return
+ // `href` property anyway)
+ parsedURL = parseURL(input.href);
+ } else {
+ // coerce input to a string before attempting to parse
+ parsedURL = parseURL(`${input}`);
+ }
+ input = {};
+ } else {
+ parsedURL = parseURL(input.url);
+ }
+
+ let method = init.method || input.method || 'GET';
+ method = method.toUpperCase();
+
+ if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {
+ throw new TypeError('Request with GET/HEAD method cannot have body');
+ }
+
+ let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;
+
+ Body.call(this, inputBody, {
+ timeout: init.timeout || input.timeout || 0,
+ size: init.size || input.size || 0
+ });
+
+ const headers = new Headers(init.headers || input.headers || {});
+
+ if (inputBody != null && !headers.has('Content-Type')) {
+ const contentType = extractContentType(inputBody);
+ if (contentType) {
+ headers.append('Content-Type', contentType);
+ }
+ }
+
+ let signal = isRequest(input) ? input.signal : null;
+ if ('signal' in init) signal = init.signal;
+
+ if (signal != null && !isAbortSignal(signal)) {
+ throw new TypeError('Expected signal to be an instanceof AbortSignal');
+ }
+
+ this[INTERNALS$2] = {
+ method,
+ redirect: init.redirect || input.redirect || 'follow',
+ headers,
+ parsedURL,
+ signal
+ };
+
+ // node-fetch-only options
+ this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;
+ this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;
+ this.counter = init.counter || input.counter || 0;
+ this.agent = init.agent || input.agent;
+ }
+
+ get method() {
+ return this[INTERNALS$2].method;
+ }
+
+ get url() {
+ return format_url(this[INTERNALS$2].parsedURL);
+ }
+
+ get headers() {
+ return this[INTERNALS$2].headers;
+ }
+
+ get redirect() {
+ return this[INTERNALS$2].redirect;
+ }
+
+ get signal() {
+ return this[INTERNALS$2].signal;
+ }
+
+ /**
+ * Clone this request
+ *
+ * @return Request
+ */
+ clone() {
+ return new Request(this);
+ }
+}
+
+Body.mixIn(Request.prototype);
+
+Object.defineProperty(Request.prototype, Symbol.toStringTag, {
+ value: 'Request',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+Object.defineProperties(Request.prototype, {
+ method: { enumerable: true },
+ url: { enumerable: true },
+ headers: { enumerable: true },
+ redirect: { enumerable: true },
+ clone: { enumerable: true },
+ signal: { enumerable: true }
+});
+
+/**
+ * Convert a Request to Node.js http request options.
+ *
+ * @param Request A Request instance
+ * @return Object The options object to be passed to http.request
+ */
+function getNodeRequestOptions(request) {
+ const parsedURL = request[INTERNALS$2].parsedURL;
+ const headers = new Headers(request[INTERNALS$2].headers);
+
+ // fetch step 1.3
+ if (!headers.has('Accept')) {
+ headers.set('Accept', '*/*');
+ }
+
+ // Basic fetch
+ if (!parsedURL.protocol || !parsedURL.hostname) {
+ throw new TypeError('Only absolute URLs are supported');
+ }
+
+ if (!/^https?:$/.test(parsedURL.protocol)) {
+ throw new TypeError('Only HTTP(S) protocols are supported');
+ }
+
+ if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {
+ throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');
+ }
+
+ // HTTP-network-or-cache fetch steps 2.4-2.7
+ let contentLengthValue = null;
+ if (request.body == null && /^(POST|PUT)$/i.test(request.method)) {
+ contentLengthValue = '0';
+ }
+ if (request.body != null) {
+ const totalBytes = getTotalBytes(request);
+ if (typeof totalBytes === 'number') {
+ contentLengthValue = String(totalBytes);
+ }
+ }
+ if (contentLengthValue) {
+ headers.set('Content-Length', contentLengthValue);
+ }
+
+ // HTTP-network-or-cache fetch step 2.11
+ if (!headers.has('User-Agent')) {
+ headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');
+ }
+
+ // HTTP-network-or-cache fetch step 2.15
+ if (request.compress && !headers.has('Accept-Encoding')) {
+ headers.set('Accept-Encoding', 'gzip,deflate');
+ }
+
+ let agent = request.agent;
+ if (typeof agent === 'function') {
+ agent = agent(parsedURL);
+ }
+
+ if (!headers.has('Connection') && !agent) {
+ headers.set('Connection', 'close');
+ }
+
+ // HTTP-network fetch step 4.2
+ // chunked encoding is handled by Node.js
+
+ return Object.assign({}, parsedURL, {
+ method: request.method,
+ headers: exportNodeCompatibleHeaders(headers),
+ agent
+ });
+}
+
+/**
+ * abort-error.js
+ *
+ * AbortError interface for cancelled requests
+ */
+
+/**
+ * Create AbortError instance
+ *
+ * @param String message Error message for human
+ * @return AbortError
+ */
+function AbortError(message) {
+ Error.call(this, message);
+
+ this.type = 'aborted';
+ this.message = message;
+
+ // hide custom error implementation details from end-users
+ Error.captureStackTrace(this, this.constructor);
+}
+
+AbortError.prototype = Object.create(Error.prototype);
+AbortError.prototype.constructor = AbortError;
+AbortError.prototype.name = 'AbortError';
+
+const URL$1 = Url.URL || whatwgUrl.URL;
+
+// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
+const PassThrough$1 = Stream.PassThrough;
+
+const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) {
+ const orig = new URL$1(original).hostname;
+ const dest = new URL$1(destination).hostname;
+
+ return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest);
+};
+
+/**
+ * isSameProtocol reports whether the two provided URLs use the same protocol.
+ *
+ * Both domains must already be in canonical form.
+ * @param {string|URL} original
+ * @param {string|URL} destination
+ */
+const isSameProtocol = function isSameProtocol(destination, original) {
+ const orig = new URL$1(original).protocol;
+ const dest = new URL$1(destination).protocol;
+
+ return orig === dest;
+};
+
+/**
+ * Fetch function
+ *
+ * @param Mixed url Absolute url or Request instance
+ * @param Object opts Fetch options
+ * @return Promise
+ */
+function fetch(url, opts) {
+
+ // allow custom promise
+ if (!fetch.Promise) {
+ throw new Error('native promise missing, set fetch.Promise to your favorite alternative');
+ }
+
+ Body.Promise = fetch.Promise;
+
+ // wrap http.request into fetch
+ return new fetch.Promise(function (resolve, reject) {
+ // build request object
+ const request = new Request(url, opts);
+ const options = getNodeRequestOptions(request);
+
+ const send = (options.protocol === 'https:' ? https : http).request;
+ const signal = request.signal;
+
+ let response = null;
+
+ const abort = function abort() {
+ let error = new AbortError('The user aborted a request.');
+ reject(error);
+ if (request.body && request.body instanceof Stream.Readable) {
+ destroyStream(request.body, error);
+ }
+ if (!response || !response.body) return;
+ response.body.emit('error', error);
+ };
+
+ if (signal && signal.aborted) {
+ abort();
+ return;
+ }
+
+ const abortAndFinalize = function abortAndFinalize() {
+ abort();
+ finalize();
+ };
+
+ // send request
+ const req = send(options);
+ let reqTimeout;
+
+ if (signal) {
+ signal.addEventListener('abort', abortAndFinalize);
+ }
+
+ function finalize() {
+ req.abort();
+ if (signal) signal.removeEventListener('abort', abortAndFinalize);
+ clearTimeout(reqTimeout);
+ }
+
+ if (request.timeout) {
+ req.once('socket', function (socket) {
+ reqTimeout = setTimeout(function () {
+ reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));
+ finalize();
+ }, request.timeout);
+ });
+ }
+
+ req.on('error', function (err) {
+ reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
+
+ if (response && response.body) {
+ destroyStream(response.body, err);
+ }
+
+ finalize();
+ });
+
+ fixResponseChunkedTransferBadEnding(req, function (err) {
+ if (signal && signal.aborted) {
+ return;
+ }
+
+ if (response && response.body) {
+ destroyStream(response.body, err);
+ }
+ });
+
+ /* c8 ignore next 18 */
+ if (parseInt(process.version.substring(1)) < 14) {
+ // Before Node.js 14, pipeline() does not fully support async iterators and does not always
+ // properly handle when the socket close/end events are out of order.
+ req.on('socket', function (s) {
+ s.addListener('close', function (hadError) {
+ // if a data listener is still present we didn't end cleanly
+ const hasDataListener = s.listenerCount('data') > 0;
+
+ // if end happened before close but the socket didn't emit an error, do it now
+ if (response && hasDataListener && !hadError && !(signal && signal.aborted)) {
+ const err = new Error('Premature close');
+ err.code = 'ERR_STREAM_PREMATURE_CLOSE';
+ response.body.emit('error', err);
+ }
+ });
+ });
+ }
+
+ req.on('response', function (res) {
+ clearTimeout(reqTimeout);
+
+ const headers = createHeadersLenient(res.headers);
+
+ // HTTP fetch step 5
+ if (fetch.isRedirect(res.statusCode)) {
+ // HTTP fetch step 5.2
+ const location = headers.get('Location');
+
+ // HTTP fetch step 5.3
+ let locationURL = null;
+ try {
+ locationURL = location === null ? null : new URL$1(location, request.url).toString();
+ } catch (err) {
+ // error here can only be invalid URL in Location: header
+ // do not throw when options.redirect == manual
+ // let the user extract the errorneous redirect URL
+ if (request.redirect !== 'manual') {
+ reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));
+ finalize();
+ return;
+ }
+ }
+
+ // HTTP fetch step 5.5
+ switch (request.redirect) {
+ case 'error':
+ reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));
+ finalize();
+ return;
+ case 'manual':
+ // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
+ if (locationURL !== null) {
+ // handle corrupted header
+ try {
+ headers.set('Location', locationURL);
+ } catch (err) {
+ // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
+ reject(err);
+ }
+ }
+ break;
+ case 'follow':
+ // HTTP-redirect fetch step 2
+ if (locationURL === null) {
+ break;
+ }
+
+ // HTTP-redirect fetch step 5
+ if (request.counter >= request.follow) {
+ reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
+ finalize();
+ return;
+ }
+
+ // HTTP-redirect fetch step 6 (counter increment)
+ // Create a new Request object.
+ const requestOpts = {
+ headers: new Headers(request.headers),
+ follow: request.follow,
+ counter: request.counter + 1,
+ agent: request.agent,
+ compress: request.compress,
+ method: request.method,
+ body: request.body,
+ signal: request.signal,
+ timeout: request.timeout,
+ size: request.size
+ };
+
+ if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) {
+ for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {
+ requestOpts.headers.delete(name);
+ }
+ }
+
+ // HTTP-redirect fetch step 9
+ if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
+ reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
+ finalize();
+ return;
+ }
+
+ // HTTP-redirect fetch step 11
+ if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {
+ requestOpts.method = 'GET';
+ requestOpts.body = undefined;
+ requestOpts.headers.delete('content-length');
+ }
+
+ // HTTP-redirect fetch step 15
+ resolve(fetch(new Request(locationURL, requestOpts)));
+ finalize();
+ return;
+ }
+ }
+
+ // prepare response
+ res.once('end', function () {
+ if (signal) signal.removeEventListener('abort', abortAndFinalize);
+ });
+ let body = res.pipe(new PassThrough$1());
+
+ const response_options = {
+ url: request.url,
+ status: res.statusCode,
+ statusText: res.statusMessage,
+ headers: headers,
+ size: request.size,
+ timeout: request.timeout,
+ counter: request.counter
+ };
+
+ // HTTP-network fetch step 12.1.1.3
+ const codings = headers.get('Content-Encoding');
+
+ // HTTP-network fetch step 12.1.1.4: handle content codings
+
+ // in following scenarios we ignore compression support
+ // 1. compression support is disabled
+ // 2. HEAD request
+ // 3. no Content-Encoding header
+ // 4. no content response (204)
+ // 5. content not modified response (304)
+ if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // For Node v6+
+ // Be less strict when decoding compressed responses, since sometimes
+ // servers send slightly invalid responses that are still accepted
+ // by common browsers.
+ // Always using Z_SYNC_FLUSH is what cURL does.
+ const zlibOptions = {
+ flush: zlib.Z_SYNC_FLUSH,
+ finishFlush: zlib.Z_SYNC_FLUSH
+ };
+
+ // for gzip
+ if (codings == 'gzip' || codings == 'x-gzip') {
+ body = body.pipe(zlib.createGunzip(zlibOptions));
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // for deflate
+ if (codings == 'deflate' || codings == 'x-deflate') {
+ // handle the infamous raw deflate response from old servers
+ // a hack for old IIS and Apache servers
+ const raw = res.pipe(new PassThrough$1());
+ raw.once('data', function (chunk) {
+ // see http://stackoverflow.com/questions/37519828
+ if ((chunk[0] & 0x0F) === 0x08) {
+ body = body.pipe(zlib.createInflate());
+ } else {
+ body = body.pipe(zlib.createInflateRaw());
+ }
+ response = new Response(body, response_options);
+ resolve(response);
+ });
+ raw.on('end', function () {
+ // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted.
+ if (!response) {
+ response = new Response(body, response_options);
+ resolve(response);
+ }
+ });
+ return;
+ }
+
+ // for br
+ if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
+ body = body.pipe(zlib.createBrotliDecompress());
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // otherwise, use response as-is
+ response = new Response(body, response_options);
+ resolve(response);
+ });
+
+ writeToStream(req, request);
+ });
+}
+function fixResponseChunkedTransferBadEnding(request, errorCallback) {
+ let socket;
+
+ request.on('socket', function (s) {
+ socket = s;
+ });
+
+ request.on('response', function (response) {
+ const headers = response.headers;
+
+ if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) {
+ response.once('close', function (hadError) {
+ // if a data listener is still present we didn't end cleanly
+ const hasDataListener = socket.listenerCount('data') > 0;
+
+ if (hasDataListener && !hadError) {
+ const err = new Error('Premature close');
+ err.code = 'ERR_STREAM_PREMATURE_CLOSE';
+ errorCallback(err);
+ }
+ });
+ }
+ });
+}
+
+function destroyStream(stream, err) {
+ if (stream.destroy) {
+ stream.destroy(err);
+ } else {
+ // node < 8
+ stream.emit('error', err);
+ stream.end();
+ }
+}
+
+/**
+ * Redirect code matching
+ *
+ * @param Number code Status code
+ * @return Boolean
+ */
+fetch.isRedirect = function (code) {
+ return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
+};
+
+// expose Promise
+fetch.Promise = global.Promise;
+
+module.exports = exports = fetch;
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.default = exports;
+exports.Headers = Headers;
+exports.Request = Request;
+exports.Response = Response;
+exports.FetchError = FetchError;
diff --git a/node_modules/node-fetch/lib/index.mjs b/node_modules/node-fetch/lib/index.mjs
new file mode 100644
index 0000000000000000000000000000000000000000..ace669fd0fee2f3a0e6ca0d34e46c60377da7c78
--- /dev/null
+++ b/node_modules/node-fetch/lib/index.mjs
@@ -0,0 +1,1776 @@
+import Stream from 'stream';
+import http from 'http';
+import Url from 'url';
+import whatwgUrl from 'whatwg-url';
+import https from 'https';
+import zlib from 'zlib';
+
+// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
+
+// fix for "Readable" isn't a named export issue
+const Readable = Stream.Readable;
+
+const BUFFER = Symbol('buffer');
+const TYPE = Symbol('type');
+
+class Blob {
+ constructor() {
+ this[TYPE] = '';
+
+ const blobParts = arguments[0];
+ const options = arguments[1];
+
+ const buffers = [];
+ let size = 0;
+
+ if (blobParts) {
+ const a = blobParts;
+ const length = Number(a.length);
+ for (let i = 0; i < length; i++) {
+ const element = a[i];
+ let buffer;
+ if (element instanceof Buffer) {
+ buffer = element;
+ } else if (ArrayBuffer.isView(element)) {
+ buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);
+ } else if (element instanceof ArrayBuffer) {
+ buffer = Buffer.from(element);
+ } else if (element instanceof Blob) {
+ buffer = element[BUFFER];
+ } else {
+ buffer = Buffer.from(typeof element === 'string' ? element : String(element));
+ }
+ size += buffer.length;
+ buffers.push(buffer);
+ }
+ }
+
+ this[BUFFER] = Buffer.concat(buffers);
+
+ let type = options && options.type !== undefined && String(options.type).toLowerCase();
+ if (type && !/[^\u0020-\u007E]/.test(type)) {
+ this[TYPE] = type;
+ }
+ }
+ get size() {
+ return this[BUFFER].length;
+ }
+ get type() {
+ return this[TYPE];
+ }
+ text() {
+ return Promise.resolve(this[BUFFER].toString());
+ }
+ arrayBuffer() {
+ const buf = this[BUFFER];
+ const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
+ return Promise.resolve(ab);
+ }
+ stream() {
+ const readable = new Readable();
+ readable._read = function () {};
+ readable.push(this[BUFFER]);
+ readable.push(null);
+ return readable;
+ }
+ toString() {
+ return '[object Blob]';
+ }
+ slice() {
+ const size = this.size;
+
+ const start = arguments[0];
+ const end = arguments[1];
+ let relativeStart, relativeEnd;
+ if (start === undefined) {
+ relativeStart = 0;
+ } else if (start < 0) {
+ relativeStart = Math.max(size + start, 0);
+ } else {
+ relativeStart = Math.min(start, size);
+ }
+ if (end === undefined) {
+ relativeEnd = size;
+ } else if (end < 0) {
+ relativeEnd = Math.max(size + end, 0);
+ } else {
+ relativeEnd = Math.min(end, size);
+ }
+ const span = Math.max(relativeEnd - relativeStart, 0);
+
+ const buffer = this[BUFFER];
+ const slicedBuffer = buffer.slice(relativeStart, relativeStart + span);
+ const blob = new Blob([], { type: arguments[2] });
+ blob[BUFFER] = slicedBuffer;
+ return blob;
+ }
+}
+
+Object.defineProperties(Blob.prototype, {
+ size: { enumerable: true },
+ type: { enumerable: true },
+ slice: { enumerable: true }
+});
+
+Object.defineProperty(Blob.prototype, Symbol.toStringTag, {
+ value: 'Blob',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+/**
+ * fetch-error.js
+ *
+ * FetchError interface for operational errors
+ */
+
+/**
+ * Create FetchError instance
+ *
+ * @param String message Error message for human
+ * @param String type Error type for machine
+ * @param String systemError For Node.js system error
+ * @return FetchError
+ */
+function FetchError(message, type, systemError) {
+ Error.call(this, message);
+
+ this.message = message;
+ this.type = type;
+
+ // when err.type is `system`, err.code contains system error code
+ if (systemError) {
+ this.code = this.errno = systemError.code;
+ }
+
+ // hide custom error implementation details from end-users
+ Error.captureStackTrace(this, this.constructor);
+}
+
+FetchError.prototype = Object.create(Error.prototype);
+FetchError.prototype.constructor = FetchError;
+FetchError.prototype.name = 'FetchError';
+
+let convert;
+try {
+ convert = require('encoding').convert;
+} catch (e) {}
+
+const INTERNALS = Symbol('Body internals');
+
+// fix an issue where "PassThrough" isn't a named export for node <10
+const PassThrough = Stream.PassThrough;
+
+/**
+ * Body mixin
+ *
+ * Ref: https://fetch.spec.whatwg.org/#body
+ *
+ * @param Stream body Readable stream
+ * @param Object opts Response options
+ * @return Void
+ */
+function Body(body) {
+ var _this = this;
+
+ var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
+ _ref$size = _ref.size;
+
+ let size = _ref$size === undefined ? 0 : _ref$size;
+ var _ref$timeout = _ref.timeout;
+ let timeout = _ref$timeout === undefined ? 0 : _ref$timeout;
+
+ if (body == null) {
+ // body is undefined or null
+ body = null;
+ } else if (isURLSearchParams(body)) {
+ // body is a URLSearchParams
+ body = Buffer.from(body.toString());
+ } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
+ // body is ArrayBuffer
+ body = Buffer.from(body);
+ } else if (ArrayBuffer.isView(body)) {
+ // body is ArrayBufferView
+ body = Buffer.from(body.buffer, body.byteOffset, body.byteLength);
+ } else if (body instanceof Stream) ; else {
+ // none of the above
+ // coerce to string then buffer
+ body = Buffer.from(String(body));
+ }
+ this[INTERNALS] = {
+ body,
+ disturbed: false,
+ error: null
+ };
+ this.size = size;
+ this.timeout = timeout;
+
+ if (body instanceof Stream) {
+ body.on('error', function (err) {
+ const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);
+ _this[INTERNALS].error = error;
+ });
+ }
+}
+
+Body.prototype = {
+ get body() {
+ return this[INTERNALS].body;
+ },
+
+ get bodyUsed() {
+ return this[INTERNALS].disturbed;
+ },
+
+ /**
+ * Decode response as ArrayBuffer
+ *
+ * @return Promise
+ */
+ arrayBuffer() {
+ return consumeBody.call(this).then(function (buf) {
+ return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
+ });
+ },
+
+ /**
+ * Return raw response as Blob
+ *
+ * @return Promise
+ */
+ blob() {
+ let ct = this.headers && this.headers.get('content-type') || '';
+ return consumeBody.call(this).then(function (buf) {
+ return Object.assign(
+ // Prevent copying
+ new Blob([], {
+ type: ct.toLowerCase()
+ }), {
+ [BUFFER]: buf
+ });
+ });
+ },
+
+ /**
+ * Decode response as json
+ *
+ * @return Promise
+ */
+ json() {
+ var _this2 = this;
+
+ return consumeBody.call(this).then(function (buffer) {
+ try {
+ return JSON.parse(buffer.toString());
+ } catch (err) {
+ return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));
+ }
+ });
+ },
+
+ /**
+ * Decode response as text
+ *
+ * @return Promise
+ */
+ text() {
+ return consumeBody.call(this).then(function (buffer) {
+ return buffer.toString();
+ });
+ },
+
+ /**
+ * Decode response as buffer (non-spec api)
+ *
+ * @return Promise
+ */
+ buffer() {
+ return consumeBody.call(this);
+ },
+
+ /**
+ * Decode response as text, while automatically detecting the encoding and
+ * trying to decode to UTF-8 (non-spec api)
+ *
+ * @return Promise
+ */
+ textConverted() {
+ var _this3 = this;
+
+ return consumeBody.call(this).then(function (buffer) {
+ return convertBody(buffer, _this3.headers);
+ });
+ }
+};
+
+// In browsers, all properties are enumerable.
+Object.defineProperties(Body.prototype, {
+ body: { enumerable: true },
+ bodyUsed: { enumerable: true },
+ arrayBuffer: { enumerable: true },
+ blob: { enumerable: true },
+ json: { enumerable: true },
+ text: { enumerable: true }
+});
+
+Body.mixIn = function (proto) {
+ for (const name of Object.getOwnPropertyNames(Body.prototype)) {
+ // istanbul ignore else: future proof
+ if (!(name in proto)) {
+ const desc = Object.getOwnPropertyDescriptor(Body.prototype, name);
+ Object.defineProperty(proto, name, desc);
+ }
+ }
+};
+
+/**
+ * Consume and convert an entire Body to a Buffer.
+ *
+ * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body
+ *
+ * @return Promise
+ */
+function consumeBody() {
+ var _this4 = this;
+
+ if (this[INTERNALS].disturbed) {
+ return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));
+ }
+
+ this[INTERNALS].disturbed = true;
+
+ if (this[INTERNALS].error) {
+ return Body.Promise.reject(this[INTERNALS].error);
+ }
+
+ let body = this.body;
+
+ // body is null
+ if (body === null) {
+ return Body.Promise.resolve(Buffer.alloc(0));
+ }
+
+ // body is blob
+ if (isBlob(body)) {
+ body = body.stream();
+ }
+
+ // body is buffer
+ if (Buffer.isBuffer(body)) {
+ return Body.Promise.resolve(body);
+ }
+
+ // istanbul ignore if: should never happen
+ if (!(body instanceof Stream)) {
+ return Body.Promise.resolve(Buffer.alloc(0));
+ }
+
+ // body is stream
+ // get ready to actually consume the body
+ let accum = [];
+ let accumBytes = 0;
+ let abort = false;
+
+ return new Body.Promise(function (resolve, reject) {
+ let resTimeout;
+
+ // allow timeout on slow response body
+ if (_this4.timeout) {
+ resTimeout = setTimeout(function () {
+ abort = true;
+ reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));
+ }, _this4.timeout);
+ }
+
+ // handle stream errors
+ body.on('error', function (err) {
+ if (err.name === 'AbortError') {
+ // if the request was aborted, reject with this Error
+ abort = true;
+ reject(err);
+ } else {
+ // other errors, such as incorrect content-encoding
+ reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));
+ }
+ });
+
+ body.on('data', function (chunk) {
+ if (abort || chunk === null) {
+ return;
+ }
+
+ if (_this4.size && accumBytes + chunk.length > _this4.size) {
+ abort = true;
+ reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));
+ return;
+ }
+
+ accumBytes += chunk.length;
+ accum.push(chunk);
+ });
+
+ body.on('end', function () {
+ if (abort) {
+ return;
+ }
+
+ clearTimeout(resTimeout);
+
+ try {
+ resolve(Buffer.concat(accum, accumBytes));
+ } catch (err) {
+ // handle streams that have accumulated too much data (issue #414)
+ reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));
+ }
+ });
+ });
+}
+
+/**
+ * Detect buffer encoding and convert to target encoding
+ * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
+ *
+ * @param Buffer buffer Incoming buffer
+ * @param String encoding Target encoding
+ * @return String
+ */
+function convertBody(buffer, headers) {
+ if (typeof convert !== 'function') {
+ throw new Error('The package `encoding` must be installed to use the textConverted() function');
+ }
+
+ const ct = headers.get('content-type');
+ let charset = 'utf-8';
+ let res, str;
+
+ // header
+ if (ct) {
+ res = /charset=([^;]*)/i.exec(ct);
+ }
+
+ // no charset in content type, peek at response body for at most 1024 bytes
+ str = buffer.slice(0, 1024).toString();
+
+ // html5
+ if (!res && str) {
+ res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;
+
+ this[MAP] = Object.create(null);
+
+ if (init instanceof Headers) {
+ const rawHeaders = init.raw();
+ const headerNames = Object.keys(rawHeaders);
+
+ for (const headerName of headerNames) {
+ for (const value of rawHeaders[headerName]) {
+ this.append(headerName, value);
+ }
+ }
+
+ return;
+ }
+
+ // We don't worry about converting prop to ByteString here as append()
+ // will handle it.
+ if (init == null) ; else if (typeof init === 'object') {
+ const method = init[Symbol.iterator];
+ if (method != null) {
+ if (typeof method !== 'function') {
+ throw new TypeError('Header pairs must be iterable');
+ }
+
+ // sequence>
+ // Note: per spec we have to first exhaust the lists then process them
+ const pairs = [];
+ for (const pair of init) {
+ if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {
+ throw new TypeError('Each header pair must be iterable');
+ }
+ pairs.push(Array.from(pair));
+ }
+
+ for (const pair of pairs) {
+ if (pair.length !== 2) {
+ throw new TypeError('Each header pair must be a name/value tuple');
+ }
+ this.append(pair[0], pair[1]);
+ }
+ } else {
+ // record
+ for (const key of Object.keys(init)) {
+ const value = init[key];
+ this.append(key, value);
+ }
+ }
+ } else {
+ throw new TypeError('Provided initializer must be an object');
+ }
+ }
+
+ /**
+ * Return combined header value given name
+ *
+ * @param String name Header name
+ * @return Mixed
+ */
+ get(name) {
+ name = `${name}`;
+ validateName(name);
+ const key = find(this[MAP], name);
+ if (key === undefined) {
+ return null;
+ }
+
+ return this[MAP][key].join(', ');
+ }
+
+ /**
+ * Iterate over all headers
+ *
+ * @param Function callback Executed for each item with parameters (value, name, thisArg)
+ * @param Boolean thisArg `this` context for callback function
+ * @return Void
+ */
+ forEach(callback) {
+ let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;
+
+ let pairs = getHeaders(this);
+ let i = 0;
+ while (i < pairs.length) {
+ var _pairs$i = pairs[i];
+ const name = _pairs$i[0],
+ value = _pairs$i[1];
+
+ callback.call(thisArg, value, name, this);
+ pairs = getHeaders(this);
+ i++;
+ }
+ }
+
+ /**
+ * Overwrite header values given name
+ *
+ * @param String name Header name
+ * @param String value Header value
+ * @return Void
+ */
+ set(name, value) {
+ name = `${name}`;
+ value = `${value}`;
+ validateName(name);
+ validateValue(value);
+ const key = find(this[MAP], name);
+ this[MAP][key !== undefined ? key : name] = [value];
+ }
+
+ /**
+ * Append a value onto existing header
+ *
+ * @param String name Header name
+ * @param String value Header value
+ * @return Void
+ */
+ append(name, value) {
+ name = `${name}`;
+ value = `${value}`;
+ validateName(name);
+ validateValue(value);
+ const key = find(this[MAP], name);
+ if (key !== undefined) {
+ this[MAP][key].push(value);
+ } else {
+ this[MAP][name] = [value];
+ }
+ }
+
+ /**
+ * Check for header name existence
+ *
+ * @param String name Header name
+ * @return Boolean
+ */
+ has(name) {
+ name = `${name}`;
+ validateName(name);
+ return find(this[MAP], name) !== undefined;
+ }
+
+ /**
+ * Delete all header values given name
+ *
+ * @param String name Header name
+ * @return Void
+ */
+ delete(name) {
+ name = `${name}`;
+ validateName(name);
+ const key = find(this[MAP], name);
+ if (key !== undefined) {
+ delete this[MAP][key];
+ }
+ }
+
+ /**
+ * Return raw headers (non-spec api)
+ *
+ * @return Object
+ */
+ raw() {
+ return this[MAP];
+ }
+
+ /**
+ * Get an iterator on keys.
+ *
+ * @return Iterator
+ */
+ keys() {
+ return createHeadersIterator(this, 'key');
+ }
+
+ /**
+ * Get an iterator on values.
+ *
+ * @return Iterator
+ */
+ values() {
+ return createHeadersIterator(this, 'value');
+ }
+
+ /**
+ * Get an iterator on entries.
+ *
+ * This is the default iterator of the Headers object.
+ *
+ * @return Iterator
+ */
+ [Symbol.iterator]() {
+ return createHeadersIterator(this, 'key+value');
+ }
+}
+Headers.prototype.entries = Headers.prototype[Symbol.iterator];
+
+Object.defineProperty(Headers.prototype, Symbol.toStringTag, {
+ value: 'Headers',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+Object.defineProperties(Headers.prototype, {
+ get: { enumerable: true },
+ forEach: { enumerable: true },
+ set: { enumerable: true },
+ append: { enumerable: true },
+ has: { enumerable: true },
+ delete: { enumerable: true },
+ keys: { enumerable: true },
+ values: { enumerable: true },
+ entries: { enumerable: true }
+});
+
+function getHeaders(headers) {
+ let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';
+
+ const keys = Object.keys(headers[MAP]).sort();
+ return keys.map(kind === 'key' ? function (k) {
+ return k.toLowerCase();
+ } : kind === 'value' ? function (k) {
+ return headers[MAP][k].join(', ');
+ } : function (k) {
+ return [k.toLowerCase(), headers[MAP][k].join(', ')];
+ });
+}
+
+const INTERNAL = Symbol('internal');
+
+function createHeadersIterator(target, kind) {
+ const iterator = Object.create(HeadersIteratorPrototype);
+ iterator[INTERNAL] = {
+ target,
+ kind,
+ index: 0
+ };
+ return iterator;
+}
+
+const HeadersIteratorPrototype = Object.setPrototypeOf({
+ next() {
+ // istanbul ignore if
+ if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
+ throw new TypeError('Value of `this` is not a HeadersIterator');
+ }
+
+ var _INTERNAL = this[INTERNAL];
+ const target = _INTERNAL.target,
+ kind = _INTERNAL.kind,
+ index = _INTERNAL.index;
+
+ const values = getHeaders(target, kind);
+ const len = values.length;
+ if (index >= len) {
+ return {
+ value: undefined,
+ done: true
+ };
+ }
+
+ this[INTERNAL].index = index + 1;
+
+ return {
+ value: values[index],
+ done: false
+ };
+ }
+}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));
+
+Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
+ value: 'HeadersIterator',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+/**
+ * Export the Headers object in a form that Node.js can consume.
+ *
+ * @param Headers headers
+ * @return Object
+ */
+function exportNodeCompatibleHeaders(headers) {
+ const obj = Object.assign({ __proto__: null }, headers[MAP]);
+
+ // http.request() only supports string as Host header. This hack makes
+ // specifying custom Host header possible.
+ const hostHeaderKey = find(headers[MAP], 'Host');
+ if (hostHeaderKey !== undefined) {
+ obj[hostHeaderKey] = obj[hostHeaderKey][0];
+ }
+
+ return obj;
+}
+
+/**
+ * Create a Headers object from an object of headers, ignoring those that do
+ * not conform to HTTP grammar productions.
+ *
+ * @param Object obj Object of headers
+ * @return Headers
+ */
+function createHeadersLenient(obj) {
+ const headers = new Headers();
+ for (const name of Object.keys(obj)) {
+ if (invalidTokenRegex.test(name)) {
+ continue;
+ }
+ if (Array.isArray(obj[name])) {
+ for (const val of obj[name]) {
+ if (invalidHeaderCharRegex.test(val)) {
+ continue;
+ }
+ if (headers[MAP][name] === undefined) {
+ headers[MAP][name] = [val];
+ } else {
+ headers[MAP][name].push(val);
+ }
+ }
+ } else if (!invalidHeaderCharRegex.test(obj[name])) {
+ headers[MAP][name] = [obj[name]];
+ }
+ }
+ return headers;
+}
+
+const INTERNALS$1 = Symbol('Response internals');
+
+// fix an issue where "STATUS_CODES" aren't a named export for node <10
+const STATUS_CODES = http.STATUS_CODES;
+
+/**
+ * Response class
+ *
+ * @param Stream body Readable stream
+ * @param Object opts Response options
+ * @return Void
+ */
+class Response {
+ constructor() {
+ let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
+ let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+
+ Body.call(this, body, opts);
+
+ const status = opts.status || 200;
+ const headers = new Headers(opts.headers);
+
+ if (body != null && !headers.has('Content-Type')) {
+ const contentType = extractContentType(body);
+ if (contentType) {
+ headers.append('Content-Type', contentType);
+ }
+ }
+
+ this[INTERNALS$1] = {
+ url: opts.url,
+ status,
+ statusText: opts.statusText || STATUS_CODES[status],
+ headers,
+ counter: opts.counter
+ };
+ }
+
+ get url() {
+ return this[INTERNALS$1].url || '';
+ }
+
+ get status() {
+ return this[INTERNALS$1].status;
+ }
+
+ /**
+ * Convenience property representing if the request ended normally
+ */
+ get ok() {
+ return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;
+ }
+
+ get redirected() {
+ return this[INTERNALS$1].counter > 0;
+ }
+
+ get statusText() {
+ return this[INTERNALS$1].statusText;
+ }
+
+ get headers() {
+ return this[INTERNALS$1].headers;
+ }
+
+ /**
+ * Clone this response
+ *
+ * @return Response
+ */
+ clone() {
+ return new Response(clone(this), {
+ url: this.url,
+ status: this.status,
+ statusText: this.statusText,
+ headers: this.headers,
+ ok: this.ok,
+ redirected: this.redirected
+ });
+ }
+}
+
+Body.mixIn(Response.prototype);
+
+Object.defineProperties(Response.prototype, {
+ url: { enumerable: true },
+ status: { enumerable: true },
+ ok: { enumerable: true },
+ redirected: { enumerable: true },
+ statusText: { enumerable: true },
+ headers: { enumerable: true },
+ clone: { enumerable: true }
+});
+
+Object.defineProperty(Response.prototype, Symbol.toStringTag, {
+ value: 'Response',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+const INTERNALS$2 = Symbol('Request internals');
+const URL = Url.URL || whatwgUrl.URL;
+
+// fix an issue where "format", "parse" aren't a named export for node <10
+const parse_url = Url.parse;
+const format_url = Url.format;
+
+/**
+ * Wrapper around `new URL` to handle arbitrary URLs
+ *
+ * @param {string} urlStr
+ * @return {void}
+ */
+function parseURL(urlStr) {
+ /*
+ Check whether the URL is absolute or not
+ Scheme: https://tools.ietf.org/html/rfc3986#section-3.1
+ Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3
+ */
+ if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) {
+ urlStr = new URL(urlStr).toString();
+ }
+
+ // Fallback to old implementation for arbitrary URLs
+ return parse_url(urlStr);
+}
+
+const streamDestructionSupported = 'destroy' in Stream.Readable.prototype;
+
+/**
+ * Check if a value is an instance of Request.
+ *
+ * @param Mixed input
+ * @return Boolean
+ */
+function isRequest(input) {
+ return typeof input === 'object' && typeof input[INTERNALS$2] === 'object';
+}
+
+function isAbortSignal(signal) {
+ const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);
+ return !!(proto && proto.constructor.name === 'AbortSignal');
+}
+
+/**
+ * Request class
+ *
+ * @param Mixed input Url or Request instance
+ * @param Object init Custom options
+ * @return Void
+ */
+class Request {
+ constructor(input) {
+ let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+
+ let parsedURL;
+
+ // normalize input
+ if (!isRequest(input)) {
+ if (input && input.href) {
+ // in order to support Node.js' Url objects; though WHATWG's URL objects
+ // will fall into this branch also (since their `toString()` will return
+ // `href` property anyway)
+ parsedURL = parseURL(input.href);
+ } else {
+ // coerce input to a string before attempting to parse
+ parsedURL = parseURL(`${input}`);
+ }
+ input = {};
+ } else {
+ parsedURL = parseURL(input.url);
+ }
+
+ let method = init.method || input.method || 'GET';
+ method = method.toUpperCase();
+
+ if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {
+ throw new TypeError('Request with GET/HEAD method cannot have body');
+ }
+
+ let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;
+
+ Body.call(this, inputBody, {
+ timeout: init.timeout || input.timeout || 0,
+ size: init.size || input.size || 0
+ });
+
+ const headers = new Headers(init.headers || input.headers || {});
+
+ if (inputBody != null && !headers.has('Content-Type')) {
+ const contentType = extractContentType(inputBody);
+ if (contentType) {
+ headers.append('Content-Type', contentType);
+ }
+ }
+
+ let signal = isRequest(input) ? input.signal : null;
+ if ('signal' in init) signal = init.signal;
+
+ if (signal != null && !isAbortSignal(signal)) {
+ throw new TypeError('Expected signal to be an instanceof AbortSignal');
+ }
+
+ this[INTERNALS$2] = {
+ method,
+ redirect: init.redirect || input.redirect || 'follow',
+ headers,
+ parsedURL,
+ signal
+ };
+
+ // node-fetch-only options
+ this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;
+ this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;
+ this.counter = init.counter || input.counter || 0;
+ this.agent = init.agent || input.agent;
+ }
+
+ get method() {
+ return this[INTERNALS$2].method;
+ }
+
+ get url() {
+ return format_url(this[INTERNALS$2].parsedURL);
+ }
+
+ get headers() {
+ return this[INTERNALS$2].headers;
+ }
+
+ get redirect() {
+ return this[INTERNALS$2].redirect;
+ }
+
+ get signal() {
+ return this[INTERNALS$2].signal;
+ }
+
+ /**
+ * Clone this request
+ *
+ * @return Request
+ */
+ clone() {
+ return new Request(this);
+ }
+}
+
+Body.mixIn(Request.prototype);
+
+Object.defineProperty(Request.prototype, Symbol.toStringTag, {
+ value: 'Request',
+ writable: false,
+ enumerable: false,
+ configurable: true
+});
+
+Object.defineProperties(Request.prototype, {
+ method: { enumerable: true },
+ url: { enumerable: true },
+ headers: { enumerable: true },
+ redirect: { enumerable: true },
+ clone: { enumerable: true },
+ signal: { enumerable: true }
+});
+
+/**
+ * Convert a Request to Node.js http request options.
+ *
+ * @param Request A Request instance
+ * @return Object The options object to be passed to http.request
+ */
+function getNodeRequestOptions(request) {
+ const parsedURL = request[INTERNALS$2].parsedURL;
+ const headers = new Headers(request[INTERNALS$2].headers);
+
+ // fetch step 1.3
+ if (!headers.has('Accept')) {
+ headers.set('Accept', '*/*');
+ }
+
+ // Basic fetch
+ if (!parsedURL.protocol || !parsedURL.hostname) {
+ throw new TypeError('Only absolute URLs are supported');
+ }
+
+ if (!/^https?:$/.test(parsedURL.protocol)) {
+ throw new TypeError('Only HTTP(S) protocols are supported');
+ }
+
+ if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {
+ throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');
+ }
+
+ // HTTP-network-or-cache fetch steps 2.4-2.7
+ let contentLengthValue = null;
+ if (request.body == null && /^(POST|PUT)$/i.test(request.method)) {
+ contentLengthValue = '0';
+ }
+ if (request.body != null) {
+ const totalBytes = getTotalBytes(request);
+ if (typeof totalBytes === 'number') {
+ contentLengthValue = String(totalBytes);
+ }
+ }
+ if (contentLengthValue) {
+ headers.set('Content-Length', contentLengthValue);
+ }
+
+ // HTTP-network-or-cache fetch step 2.11
+ if (!headers.has('User-Agent')) {
+ headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');
+ }
+
+ // HTTP-network-or-cache fetch step 2.15
+ if (request.compress && !headers.has('Accept-Encoding')) {
+ headers.set('Accept-Encoding', 'gzip,deflate');
+ }
+
+ let agent = request.agent;
+ if (typeof agent === 'function') {
+ agent = agent(parsedURL);
+ }
+
+ if (!headers.has('Connection') && !agent) {
+ headers.set('Connection', 'close');
+ }
+
+ // HTTP-network fetch step 4.2
+ // chunked encoding is handled by Node.js
+
+ return Object.assign({}, parsedURL, {
+ method: request.method,
+ headers: exportNodeCompatibleHeaders(headers),
+ agent
+ });
+}
+
+/**
+ * abort-error.js
+ *
+ * AbortError interface for cancelled requests
+ */
+
+/**
+ * Create AbortError instance
+ *
+ * @param String message Error message for human
+ * @return AbortError
+ */
+function AbortError(message) {
+ Error.call(this, message);
+
+ this.type = 'aborted';
+ this.message = message;
+
+ // hide custom error implementation details from end-users
+ Error.captureStackTrace(this, this.constructor);
+}
+
+AbortError.prototype = Object.create(Error.prototype);
+AbortError.prototype.constructor = AbortError;
+AbortError.prototype.name = 'AbortError';
+
+const URL$1 = Url.URL || whatwgUrl.URL;
+
+// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
+const PassThrough$1 = Stream.PassThrough;
+
+const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) {
+ const orig = new URL$1(original).hostname;
+ const dest = new URL$1(destination).hostname;
+
+ return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest);
+};
+
+/**
+ * isSameProtocol reports whether the two provided URLs use the same protocol.
+ *
+ * Both domains must already be in canonical form.
+ * @param {string|URL} original
+ * @param {string|URL} destination
+ */
+const isSameProtocol = function isSameProtocol(destination, original) {
+ const orig = new URL$1(original).protocol;
+ const dest = new URL$1(destination).protocol;
+
+ return orig === dest;
+};
+
+/**
+ * Fetch function
+ *
+ * @param Mixed url Absolute url or Request instance
+ * @param Object opts Fetch options
+ * @return Promise
+ */
+function fetch(url, opts) {
+
+ // allow custom promise
+ if (!fetch.Promise) {
+ throw new Error('native promise missing, set fetch.Promise to your favorite alternative');
+ }
+
+ Body.Promise = fetch.Promise;
+
+ // wrap http.request into fetch
+ return new fetch.Promise(function (resolve, reject) {
+ // build request object
+ const request = new Request(url, opts);
+ const options = getNodeRequestOptions(request);
+
+ const send = (options.protocol === 'https:' ? https : http).request;
+ const signal = request.signal;
+
+ let response = null;
+
+ const abort = function abort() {
+ let error = new AbortError('The user aborted a request.');
+ reject(error);
+ if (request.body && request.body instanceof Stream.Readable) {
+ destroyStream(request.body, error);
+ }
+ if (!response || !response.body) return;
+ response.body.emit('error', error);
+ };
+
+ if (signal && signal.aborted) {
+ abort();
+ return;
+ }
+
+ const abortAndFinalize = function abortAndFinalize() {
+ abort();
+ finalize();
+ };
+
+ // send request
+ const req = send(options);
+ let reqTimeout;
+
+ if (signal) {
+ signal.addEventListener('abort', abortAndFinalize);
+ }
+
+ function finalize() {
+ req.abort();
+ if (signal) signal.removeEventListener('abort', abortAndFinalize);
+ clearTimeout(reqTimeout);
+ }
+
+ if (request.timeout) {
+ req.once('socket', function (socket) {
+ reqTimeout = setTimeout(function () {
+ reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));
+ finalize();
+ }, request.timeout);
+ });
+ }
+
+ req.on('error', function (err) {
+ reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
+
+ if (response && response.body) {
+ destroyStream(response.body, err);
+ }
+
+ finalize();
+ });
+
+ fixResponseChunkedTransferBadEnding(req, function (err) {
+ if (signal && signal.aborted) {
+ return;
+ }
+
+ if (response && response.body) {
+ destroyStream(response.body, err);
+ }
+ });
+
+ /* c8 ignore next 18 */
+ if (parseInt(process.version.substring(1)) < 14) {
+ // Before Node.js 14, pipeline() does not fully support async iterators and does not always
+ // properly handle when the socket close/end events are out of order.
+ req.on('socket', function (s) {
+ s.addListener('close', function (hadError) {
+ // if a data listener is still present we didn't end cleanly
+ const hasDataListener = s.listenerCount('data') > 0;
+
+ // if end happened before close but the socket didn't emit an error, do it now
+ if (response && hasDataListener && !hadError && !(signal && signal.aborted)) {
+ const err = new Error('Premature close');
+ err.code = 'ERR_STREAM_PREMATURE_CLOSE';
+ response.body.emit('error', err);
+ }
+ });
+ });
+ }
+
+ req.on('response', function (res) {
+ clearTimeout(reqTimeout);
+
+ const headers = createHeadersLenient(res.headers);
+
+ // HTTP fetch step 5
+ if (fetch.isRedirect(res.statusCode)) {
+ // HTTP fetch step 5.2
+ const location = headers.get('Location');
+
+ // HTTP fetch step 5.3
+ let locationURL = null;
+ try {
+ locationURL = location === null ? null : new URL$1(location, request.url).toString();
+ } catch (err) {
+ // error here can only be invalid URL in Location: header
+ // do not throw when options.redirect == manual
+ // let the user extract the errorneous redirect URL
+ if (request.redirect !== 'manual') {
+ reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));
+ finalize();
+ return;
+ }
+ }
+
+ // HTTP fetch step 5.5
+ switch (request.redirect) {
+ case 'error':
+ reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));
+ finalize();
+ return;
+ case 'manual':
+ // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
+ if (locationURL !== null) {
+ // handle corrupted header
+ try {
+ headers.set('Location', locationURL);
+ } catch (err) {
+ // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
+ reject(err);
+ }
+ }
+ break;
+ case 'follow':
+ // HTTP-redirect fetch step 2
+ if (locationURL === null) {
+ break;
+ }
+
+ // HTTP-redirect fetch step 5
+ if (request.counter >= request.follow) {
+ reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
+ finalize();
+ return;
+ }
+
+ // HTTP-redirect fetch step 6 (counter increment)
+ // Create a new Request object.
+ const requestOpts = {
+ headers: new Headers(request.headers),
+ follow: request.follow,
+ counter: request.counter + 1,
+ agent: request.agent,
+ compress: request.compress,
+ method: request.method,
+ body: request.body,
+ signal: request.signal,
+ timeout: request.timeout,
+ size: request.size
+ };
+
+ if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) {
+ for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {
+ requestOpts.headers.delete(name);
+ }
+ }
+
+ // HTTP-redirect fetch step 9
+ if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
+ reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
+ finalize();
+ return;
+ }
+
+ // HTTP-redirect fetch step 11
+ if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {
+ requestOpts.method = 'GET';
+ requestOpts.body = undefined;
+ requestOpts.headers.delete('content-length');
+ }
+
+ // HTTP-redirect fetch step 15
+ resolve(fetch(new Request(locationURL, requestOpts)));
+ finalize();
+ return;
+ }
+ }
+
+ // prepare response
+ res.once('end', function () {
+ if (signal) signal.removeEventListener('abort', abortAndFinalize);
+ });
+ let body = res.pipe(new PassThrough$1());
+
+ const response_options = {
+ url: request.url,
+ status: res.statusCode,
+ statusText: res.statusMessage,
+ headers: headers,
+ size: request.size,
+ timeout: request.timeout,
+ counter: request.counter
+ };
+
+ // HTTP-network fetch step 12.1.1.3
+ const codings = headers.get('Content-Encoding');
+
+ // HTTP-network fetch step 12.1.1.4: handle content codings
+
+ // in following scenarios we ignore compression support
+ // 1. compression support is disabled
+ // 2. HEAD request
+ // 3. no Content-Encoding header
+ // 4. no content response (204)
+ // 5. content not modified response (304)
+ if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // For Node v6+
+ // Be less strict when decoding compressed responses, since sometimes
+ // servers send slightly invalid responses that are still accepted
+ // by common browsers.
+ // Always using Z_SYNC_FLUSH is what cURL does.
+ const zlibOptions = {
+ flush: zlib.Z_SYNC_FLUSH,
+ finishFlush: zlib.Z_SYNC_FLUSH
+ };
+
+ // for gzip
+ if (codings == 'gzip' || codings == 'x-gzip') {
+ body = body.pipe(zlib.createGunzip(zlibOptions));
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // for deflate
+ if (codings == 'deflate' || codings == 'x-deflate') {
+ // handle the infamous raw deflate response from old servers
+ // a hack for old IIS and Apache servers
+ const raw = res.pipe(new PassThrough$1());
+ raw.once('data', function (chunk) {
+ // see http://stackoverflow.com/questions/37519828
+ if ((chunk[0] & 0x0F) === 0x08) {
+ body = body.pipe(zlib.createInflate());
+ } else {
+ body = body.pipe(zlib.createInflateRaw());
+ }
+ response = new Response(body, response_options);
+ resolve(response);
+ });
+ raw.on('end', function () {
+ // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted.
+ if (!response) {
+ response = new Response(body, response_options);
+ resolve(response);
+ }
+ });
+ return;
+ }
+
+ // for br
+ if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
+ body = body.pipe(zlib.createBrotliDecompress());
+ response = new Response(body, response_options);
+ resolve(response);
+ return;
+ }
+
+ // otherwise, use response as-is
+ response = new Response(body, response_options);
+ resolve(response);
+ });
+
+ writeToStream(req, request);
+ });
+}
+function fixResponseChunkedTransferBadEnding(request, errorCallback) {
+ let socket;
+
+ request.on('socket', function (s) {
+ socket = s;
+ });
+
+ request.on('response', function (response) {
+ const headers = response.headers;
+
+ if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) {
+ response.once('close', function (hadError) {
+ // if a data listener is still present we didn't end cleanly
+ const hasDataListener = socket.listenerCount('data') > 0;
+
+ if (hasDataListener && !hadError) {
+ const err = new Error('Premature close');
+ err.code = 'ERR_STREAM_PREMATURE_CLOSE';
+ errorCallback(err);
+ }
+ });
+ }
+ });
+}
+
+function destroyStream(stream, err) {
+ if (stream.destroy) {
+ stream.destroy(err);
+ } else {
+ // node < 8
+ stream.emit('error', err);
+ stream.end();
+ }
+}
+
+/**
+ * Redirect code matching
+ *
+ * @param Number code Status code
+ * @return Boolean
+ */
+fetch.isRedirect = function (code) {
+ return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
+};
+
+// expose Promise
+fetch.Promise = global.Promise;
+
+export default fetch;
+export { Headers, Request, Response, FetchError };
diff --git a/node_modules/node-fetch/package.json b/node_modules/node-fetch/package.json
new file mode 100644
index 0000000000000000000000000000000000000000..fb18d9959632da9db1c8f71c960e1ecb5332146a
--- /dev/null
+++ b/node_modules/node-fetch/package.json
@@ -0,0 +1,116 @@
+{
+ "_from": "node-fetch@^2.6.1",
+ "_id": "node-fetch@2.6.11",
+ "_inBundle": false,
+ "_integrity": "sha512-4I6pdBY1EthSqDmJkiNk3JIT8cswwR9nfeW/cPdUagJYEQG7R95WRH74wpz7ma8Gh/9dI9FP+OU+0E4FvtA55w==",
+ "_location": "/node-fetch",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "node-fetch@^2.6.1",
+ "name": "node-fetch",
+ "escapedName": "node-fetch",
+ "rawSpec": "^2.6.1",
+ "saveSpec": null,
+ "fetchSpec": "^2.6.1"
+ },
+ "_requiredBy": [
+ "/dropbox"
+ ],
+ "_resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.11.tgz",
+ "_shasum": "cde7fc71deef3131ef80a738919f999e6edfff25",
+ "_spec": "node-fetch@^2.6.1",
+ "_where": "C:\\Users\\lenovo\\TSA\\node_modules\\dropbox",
+ "author": {
+ "name": "David Frank"
+ },
+ "browser": "./browser.js",
+ "bugs": {
+ "url": "https://github.com/bitinn/node-fetch/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "whatwg-url": "^5.0.0"
+ },
+ "deprecated": false,
+ "description": "A light-weight module that brings window.fetch to node.js",
+ "devDependencies": {
+ "@ungap/url-search-params": "^0.1.2",
+ "abort-controller": "^1.1.0",
+ "abortcontroller-polyfill": "^1.3.0",
+ "babel-core": "^6.26.3",
+ "babel-plugin-istanbul": "^4.1.6",
+ "babel-plugin-transform-async-generator-functions": "^6.24.1",
+ "babel-polyfill": "^6.26.0",
+ "babel-preset-env": "1.4.0",
+ "babel-register": "^6.16.3",
+ "chai": "^3.5.0",
+ "chai-as-promised": "^7.1.1",
+ "chai-iterator": "^1.1.1",
+ "chai-string": "~1.3.0",
+ "codecov": "3.3.0",
+ "cross-env": "^5.2.0",
+ "form-data": "^2.3.3",
+ "is-builtin-module": "^1.0.0",
+ "mocha": "^5.0.0",
+ "nyc": "11.9.0",
+ "parted": "^0.1.1",
+ "promise": "^8.0.3",
+ "resumer": "0.0.0",
+ "rollup": "^0.63.4",
+ "rollup-plugin-babel": "^3.0.7",
+ "string-to-arraybuffer": "^1.0.2",
+ "teeny-request": "3.7.0"
+ },
+ "engines": {
+ "node": "4.x || >=6.0.0"
+ },
+ "files": [
+ "lib/index.js",
+ "lib/index.mjs",
+ "lib/index.es.js",
+ "browser.js"
+ ],
+ "homepage": "https://github.com/bitinn/node-fetch",
+ "keywords": [
+ "fetch",
+ "http",
+ "promise"
+ ],
+ "license": "MIT",
+ "main": "lib/index.js",
+ "module": "lib/index.mjs",
+ "name": "node-fetch",
+ "peerDependencies": {
+ "encoding": "^0.1.0"
+ },
+ "peerDependenciesMeta": {
+ "encoding": {
+ "optional": true
+ }
+ },
+ "release": {
+ "branches": [
+ "+([0-9]).x",
+ "main",
+ "next",
+ {
+ "name": "beta",
+ "prerelease": true
+ }
+ ]
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/bitinn/node-fetch.git"
+ },
+ "scripts": {
+ "build": "cross-env BABEL_ENV=rollup rollup -c",
+ "coverage": "cross-env BABEL_ENV=coverage nyc --reporter json --reporter text mocha -R spec test/test.js && codecov -f coverage/coverage-final.json",
+ "prepare": "npm run build",
+ "report": "cross-env BABEL_ENV=coverage nyc --reporter lcov --reporter text mocha -R spec test/test.js",
+ "test": "cross-env BABEL_ENV=test mocha --require babel-register --throw-deprecation test/test.js"
+ },
+ "version": "2.6.11"
+}
diff --git a/node_modules/tr46/.npmignore b/node_modules/tr46/.npmignore
new file mode 100644
index 0000000000000000000000000000000000000000..96e9161fde31e9906718f689d5cc135e507a51e1
--- /dev/null
+++ b/node_modules/tr46/.npmignore
@@ -0,0 +1,4 @@
+scripts/
+test/
+
+!lib/mapping_table.json
diff --git a/node_modules/tr46/dropbox/CODE_OF_CONDUCT.md b/node_modules/tr46/dropbox/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000000000000000000000000000000000000..0492b588e450a5681b5f70cc938245e5cc0bbb76
--- /dev/null
+++ b/node_modules/tr46/dropbox/CODE_OF_CONDUCT.md
@@ -0,0 +1,5 @@
+# Dropbox Code Of Conduct
+
+*Dropbox believes that an inclusive development environment fosters greater technical achievement. To encourage a diverse group of contributors we've adopted this code of conduct.*
+
+Please read the Official Dropbox [Code of Conduct](https://opensource.dropbox.com/coc/) before contributing.
\ No newline at end of file
diff --git a/node_modules/tr46/dropbox/CONTRIBUTING.md b/node_modules/tr46/dropbox/CONTRIBUTING.md
new file mode 100644
index 0000000000000000000000000000000000000000..ec595d58ade3040ceca5ee8533fba6c5bf9ad688
--- /dev/null
+++ b/node_modules/tr46/dropbox/CONTRIBUTING.md
@@ -0,0 +1,62 @@
+# Contributing to the Dropbox SDK for Javascript
+We value and rely on the feedback from our community. This comes in the form of bug reports, feature requests, and general guidance. We welcome your issues and pull requests and try our hardest to be timely in both response and resolution. Please read through this document before submitting issues or pull requests to ensure we have the necessary information to help you resolve your issue.
+
+## Filing Bug Reports
+You can file a bug report on the [GitHub Issues][issues] page.
+
+1. Search through existing issues to ensure that your issue has not been reported. If it is a common issue, there is likely already an issue.
+
+2. Please ensure you are using the latest version of the SDK. While this may be a valid issue, we only will fix bugs affecting the latest version and your bug may have been fixed in a newer version.
+
+3. Provide as much information as you can regarding the language version, SDK version, and any other relevant information about your environment so we can help resolve the issue as quickly as possible.
+
+## Submitting Pull Requests
+
+We are more than happy to recieve pull requests helping us improve the state of our SDK. You can open a new pull request on the [GitHub Pull Requests][pr] page.
+
+1. Please ensure that you have read the [License][license], [Code of Conduct][coc] and have signed the [Contributing License Agreement (CLA)][cla].
+
+2. Please add tests confirming the new functionality works. Pull requests will not be merged without passing continuous integration tests unless the pull requests aims to fix existing issues with these tests.
+
+3. If the pull request is modifying typescript definitions, please remember to change the template found under `generator/typescript` and run the generation instead of manually changing types. If there is an issue with the generation, please file an issue.
+
+## Updating Generated Code
+
+Generated code can be updated by running the following commands:
+
+```
+$ git submodule init
+$ git submodule update --remote --recursive
+$ cd generator/stone
+$ python setup.py install
+$ cd ..
+$ python generate_routes.py
+```
+
+This will generate typescript definitions and route code.
+
+## Testing the Code
+
+Tests live under the `test/` folder and are then broken down into the type of test it is. To run both the unit tests and the typescript tests, you can use:
+
+```
+$ npm test
+```
+
+If you would like to run the integration tests locally, you can run:
+
+```
+export DROPBOX_TOKEN={fill in user token}
+export DROPBOX_TEAM_TOKEN={fill in team token}
+export DROPBOX_USER_ID={fill in assume user id}
+export DROPBOX_SHARED_LINK={fill in shared link}
+$ npm run test:integration
+```
+
+Note: If you do not have all of these tokens available, we run integration tests as a part of pull request validation and you are able to rely on those if you are unable to obtain yourself.
+
+[issues]: https://github.com/dropbox/dropbox-sdk-js/issues
+[pr]: https://github.com/dropbox/dropbox-sdk-js/pulls
+[coc]: https://github.com/dropbox/dropbox-sdk-js/blob/main/CODE_OF_CONDUCT.md
+[license]: https://github.com/dropbox/dropbox-sdk-js/blob/main/LICENSE
+[cla]: https://opensource.dropbox.com/cla/
\ No newline at end of file
diff --git a/node_modules/tr46/dropbox/LICENSE b/node_modules/tr46/dropbox/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..9f5dd9376dc46facaabeb20998fef69be5fe3d16
--- /dev/null
+++ b/node_modules/tr46/dropbox/LICENSE
@@ -0,0 +1,20 @@
+Copyright (c) 2020 Dropbox Inc., http://www.dropbox.com/
+
+ Permission is hereby granted, free of charge, to any person obtaining
+ a copy of this software and associated documentation files (the
+ "Software"), to deal in the Software without restriction, including
+ without limitation the rights to use, copy, modify, merge, publish,
+ distribute, sublicense, and/or sell copies of the Software, and to
+ permit persons to whom the Software is furnished to do so, subject to
+ the following conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
\ No newline at end of file
diff --git a/node_modules/tr46/dropbox/README.md b/node_modules/tr46/dropbox/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..f06cccdaaa58d9ff69024ce109d26c7213e0e9df
--- /dev/null
+++ b/node_modules/tr46/dropbox/README.md
@@ -0,0 +1,70 @@
+[![Logo][logo]][repo]
+
+[](https://www.npmjs.com/package/dropbox)
+[](https://www.npmjs.com/package/dropbox)
+[](https://codecov.io/gh/dropbox/dropbox-sdk-js)
+
+The offical Dropbox SDK for Javascript.
+
+Documentation can be found on [GitHub Pages][documentation]
+
+## Installation
+
+Create an app via the [Developer Console][devconsole]
+
+Install via [npm](https://www.npmjs.com/)
+
+```
+$ npm install --save dropbox
+```
+
+Install from source:
+
+```
+$ git clone https://github.com/dropbox/dropbox-sdk-js.git
+$ cd dropbox-sdk-js
+$ npm install
+```
+
+If you are using the repository from the browser, you can use any CDNs that hosts the Dropbox package by including a script tag with the link to the package. However, we highly recommend you do not directly import the latest version and instead choose a specific version. When we update and release a breaking change, this could break production code which we hope to avoid. Note, we follow [semver](https://semver.org/) naming conventions which means that any major version update could contain a breaking change.
+
+After installation, follow one of our [Examples][examples] or read the [Documentation][documentation].
+
+You can also view our [OAuth guide][oauthguide].
+
+## Examples
+
+We provide [Examples][examples] to help get you started with a lot of the basic functionality in the SDK. We provide most examples in both Javascript and Typescript with some having a Node equivalent.
+
+- **OAuth**
+ - Auth - [ [JS](https://github.com/dropbox/dropbox-sdk-js/tree/main/examples/javascript/auth) ] - A simple auth example to get an access token and list the files in the root of your Dropbox account.
+ - Simple Backend [ [JS](https://github.com/dropbox/dropbox-sdk-js/tree/main/examples/javascript/simple-backend) ] - A simple example of a node backend doing a multi-step auth flow for Short Lived Tokens.
+ - PKCE Backend [ [JS](https://github.com/dropbox/dropbox-sdk-js/tree/main/examples/javascript/PKCE-backend) ] - A simple example of a node backend doing a multi-step auth flow using PKCE and Short Lived Tokens.
+ - PKCE Browser [ [JS](https://github.com/dropbox/dropbox-sdk-js/tree/main/examples/javascript/pkce-browser) ] - A simple example of a frontend doing a multi-step auth flow using PKCE and Short Lived Tokens.
+
+- **Other Examples**
+ - Basic - [ [TS](https://github.com/dropbox/dropbox-sdk-js/tree/main/examples/typescript/node), [JS](https://github.com/dropbox/dropbox-sdk-js/tree/main/examples/javascript/basic) ] - A simple example that takes in a token and fetches files from your Dropbox account.
+ - Download - [ [TS](https://github.com/dropbox/dropbox-sdk-js/tree/main/examples/typescript/node), [JS](https://github.com/dropbox/dropbox-sdk-js/tree/main/examples/javascript/download) ] - An example showing how to download a shared file.
+ - Team As User - [ [TS](https://github.com/dropbox/dropbox-sdk-js/tree/main/examples/typescript/node), [JS](https://github.com/dropbox/dropbox-sdk-js/tree/main/examples/javascript/team-as-user) ] - An example showing how to act as a user.
+ - Team - [ [TS](https://github.com/dropbox/dropbox-sdk-js/tree/main/examples/typescript/node), [JS](https://github.com/dropbox/dropbox-sdk-js/tree/main/examples/javascript/team) ] - An example showing how to use the team functionality and list team devices.
+ - Upload [ [TS](https://github.com/dropbox/dropbox-sdk-js/tree/main/examples/typescript/node), [JS](https://github.com/dropbox/dropbox-sdk-js/tree/main/examples/javascript/upload) ] - An example showing how to upload a file to Dropbox.
+
+## Getting Help
+
+If you find a bug, please see [CONTRIBUTING.md][contributing] for information on how to report it.
+
+If you need help that is not specific to this SDK, please reach out to [Dropbox Support][support].
+
+## License
+
+This SDK is distributed under the MIT license, please see [LICENSE][license] for more information.
+
+[logo]: https://cfl.dropboxstatic.com/static/images/sdk/javascript_banner.png
+[repo]: https://github.com/dropbox/dropbox-sdk-js
+[documentation]: https://dropbox.github.io/dropbox-sdk-js/
+[examples]: https://github.com/dropbox/dropbox-sdk-js/tree/main/examples
+[license]: https://github.com/dropbox/dropbox-sdk-js/blob/main/LICENSE
+[contributing]: https://github.com/dropbox/dropbox-sdk-js/blob/main/CONTRIBUTING.md
+[devconsole]: https://dropbox.com/developers/apps
+[oauthguide]: https://www.dropbox.com/lp/developers/reference/oauth-guide
+[support]: https://www.dropbox.com/developers/contact
diff --git a/node_modules/tr46/dropbox/UPGRADING.md b/node_modules/tr46/dropbox/UPGRADING.md
new file mode 100644
index 0000000000000000000000000000000000000000..64c20a1fb5d4b3fac762e499997633a17a5a07d9
--- /dev/null
+++ b/node_modules/tr46/dropbox/UPGRADING.md
@@ -0,0 +1,163 @@
+# Upgrading the Dropbox SDK
+
+This document is designed to show you how to upgrade to the latest version of the SDK accomodating any breaking changes introduced by major version updates. If you find any issues with either this guide on upgrading or the changes introduced in the new version, please see [CONTRIBUTING.md][contributing]
+
+# Upgrading from v9.X.X to v10.0.0
+
+## 1. Deprecating the `authenticateWithCordova` function
+
+The `authenticateWithCordova` function used an in-app browser within the Cordova framework to authenticate users via OAuth. As a part of hardening security, we are following [Google’s recommendation](https://developers.googleblog.com/2016/08/modernizing-oauth-interactions-in-native-apps.html) to remove support for authentication via a “web-view” or in-app browsers. Since the `authenticateWithCordova` function relies on running in an in-app browser, we have made the choice to deprecate this function.
+
+Instead, apps will need to implement logic to handle this use case. The high level logic would be as follows:
+
+1. getAuthenticationUrl with your app’s parameters. For Native Apps, we highly encourage using PKCE to increase your app’s security.
+2. Open the authentication URL in the default system browser
+3. Redirect back into your app upon completion of the OAuth flow.
+
+We recommend using a custom URI for redirect to ensure you are redirecting directly back into your app. You can read up on this process more in detail on the [OAuth site](https://www.oauth.com/oauth2-servers/redirect-uris/redirect-uris-native-apps/).
+
+# Upgrading from v8.X.X to v9.0.0
+
+## 1. Unblocking browser PKCE flow
+
+Previously, there was an issue in which Node and the Browser use different processes to generate the `codeVerifier` and `codeChallenge`. In order to remedy this, both `generatePKCECodes` and `getAuthenticationUrl` now return promises due to the how the browser digests hashes.
+
+Previous Implementation(synchronous):
+```
+var authUrl = dbxAuth.getAuthenticationUrl(redirectUri, null, 'code', 'offline', null, 'none', false)
+// logic for navigating to authUrl
+```
+New Implementation(async):
+```
+dbxAuth.getAuthenticationUrl(redirectUri, null, 'code', 'offline', null, 'none', false)
+ .then((authUrl) => {
+ // logic for navigating to authUrl
+ });
+```
+# Upgrading from v7.X.X to v8.0.0
+
+## 1. Throwing Errors as `DropboxResponseError` rather than a literal object
+
+We have created a new Error class called `DropboxResponseError` which contains the same members as the literal that was thrown, but in a cleaner format. It also allows you to leverage the fact this class now extends the builtin `Error` class.
+
+# Upgrading from v6.X.X to v7.0.0
+
+## 1. Fixing the Typescript argument parameter bug ([#41](https://github.com/dropbox/dropbox-sdk-js/issues/41))
+
+We noticed a long lasting bug where the Typescript definitions of routes with no arg would require a `void` argument. This required users to make calls like this:
+
+```
+var result = dbx.usersGetCurrentAccount(null);
+```
+
+We have since fixed this to no longer require the null parameter.
+
+# Upgrading from v5.X.X to v6.0.0
+
+## 1. Unifying Dropbox and DropboxTeam
+
+We made the decision to unify the Dropbox and DropboxTeam objects to further simplify the logic in the SDK. Migrating is very straightforward, a reference like this:
+
+```
+var dbx = new DropboxTeam({
+ accessToken: 'my_token'
+});
+```
+
+Can be rewritten as:
+
+```
+var dbx = new Dropbox({
+ accessToken: 'my_token'
+});
+```
+
+Additionally, when using features like assume user, select admin, or path root they are not set as a part of the constructor rather than creating a new client. Logic like this:
+
+```
+var dbx = new DropboxTeam({
+ accessToken: 'my_token'
+});
+var dbx_user = dbx.actAsUser(user_id);
+dbx_user.usersGetCurrentAccount();
+```
+
+Can be rewritten as:
+
+```
+var dbx = new Dropbox({
+ accessToken: 'my_token',
+ selectUser: 'my_user_id'
+});
+dbx.usersGetcurrentAccount();
+```
+
+## 2. Moving authentication to DropboxAuth
+
+Another change that was made was to move all auth related functionality into the DropboxAuth object. The main Dropbox object can be constructed the same way but this will internally create a DropboxAuth object. In order to access any auth functions from the main client you must change your code as such:
+
+```
+dbx.get_authentication_url(...);
+```
+
+Would become something like this:
+
+```
+dbx.auth.get_authentication_url(...);
+```
+
+However, we recommend creating a DropboxAuth object before creating a client and then constructing as such:
+
+```
+var dbxAuth = new DropboxAuth();
+... // Do auth logic
+var dbx = new Dropbox(dbxAuth);
+```
+
+That way if you need to create another instance of the client, you can easily plug in the same auth object.
+
+## 3. Changing Typescript export format
+
+We have updated the Typescript definitions to be a part of `Dropbox` namespace rather than the `DropboxTypes` namespace. This would look like:
+
+```
+const result: DropboxTypes.users.FullAccount dbx.usersGetCurrentAccount();
+```
+
+Would become:
+
+```
+const result: Dropbox.users.FullAccount dbx.usersGetCurrentAccount();
+```
+
+## 4. Updating the Response object
+
+We have wrapped the raw responses into the `DropboxResponse` object in order to expose more information out to users. This change looks like:
+
+```
+var response = dbx.usersGetcurrentAccount();
+console.log(response.fileBlob); //or fileBinary if using workers
+```
+
+Would become:
+
+```
+var response = dbx.usersGetcurrentAccount();
+console.log(response.result.fileBlob); //or fileBinary if using workers
+```
+
+This also exposes the other components of the response like the status and headers which was not previously available.
+
+```
+var response = dbx.usersGetcurrentAccount();
+console.log(response.status);
+console.log(response.headers);
+```
+
+## 5. Default behavior for `fetch`.
+
+Previously we have provided guidance to SDK users that they should not rely on the Dropbox SDK's global fetch and that it would be deprecated in future versions. In 6.0.0 onwards, we now include the `node-fetch` dependency as part of the NPM package. For browser environments, we fallback to `window.fetch` by default.
+
+As a result, you should not pass in your own `fetch` to the Dropbox constructor unless you have a specific reason to do so (mocking, etc). Note that if you opt to pass in fetch to support your use case, you may need to bind your fetch to the appropriate context e.g. `fetch.bind(your_context)`.
+
+[contributing]: https://github.com/dropbox/dropbox-sdk-js/blob/main/CONTRIBUTING.md
diff --git a/node_modules/tr46/dropbox/cjs/index.js b/node_modules/tr46/dropbox/cjs/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..c09420a33152dbe6de57cfb27938bf06ed446306
--- /dev/null
+++ b/node_modules/tr46/dropbox/cjs/index.js
@@ -0,0 +1,41 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+
+var _dropbox = require("./src/dropbox.js");
+
+Object.defineProperty(exports, "Dropbox", {
+ enumerable: true,
+ get: function get() {
+ return _dropbox["default"];
+ }
+});
+
+var _auth = require("./src/auth.js");
+
+Object.defineProperty(exports, "DropboxAuth", {
+ enumerable: true,
+ get: function get() {
+ return _auth["default"];
+ }
+});
+
+var _response = require("./src/response.js");
+
+Object.defineProperty(exports, "DropboxResponse", {
+ enumerable: true,
+ get: function get() {
+ return _response.DropboxResponse;
+ }
+});
+
+var _error = require("./src/error.js");
+
+Object.defineProperty(exports, "DropboxResponseError", {
+ enumerable: true,
+ get: function get() {
+ return _error.DropboxResponseError;
+ }
+});
\ No newline at end of file
diff --git a/node_modules/tr46/dropbox/cjs/lib/routes.js b/node_modules/tr46/dropbox/cjs/lib/routes.js
new file mode 100644
index 0000000000000000000000000000000000000000..9aa3d526ea1522098ea9e61de339b38ba5adc9a1
--- /dev/null
+++ b/node_modules/tr46/dropbox/cjs/lib/routes.js
@@ -0,0 +1,4048 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+// Auto-generated by Stone, do not modify.
+var routes = {};
+/**
+ * Sets a user's profile photo.
+ * Route attributes:
+ * scope: account_info.write
+ * @function Dropbox#accountSetProfilePhoto
+ * @arg {AccountSetProfilePhotoArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+routes.accountSetProfilePhoto = function (arg) {
+ return this.request('account/set_profile_photo', arg, 'user', 'api', 'rpc', 'account_info.write');
+};
+/**
+ * Creates an OAuth 2.0 access token from the supplied OAuth 1.0 access token.
+ * @function Dropbox#authTokenFromOauth1
+ * @arg {AuthTokenFromOAuth1Arg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.authTokenFromOauth1 = function (arg) {
+ return this.request('auth/token/from_oauth1', arg, 'app', 'api', 'rpc', null);
+};
+/**
+ * Disables the access token used to authenticate the call. If there is a
+ * corresponding refresh token for the access token, this disables that refresh
+ * token, as well as any other access tokens for that refresh token.
+ * @function Dropbox#authTokenRevoke
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.authTokenRevoke = function () {
+ return this.request('auth/token/revoke', null, 'user', 'api', 'rpc', null);
+};
+/**
+ * This endpoint performs App Authentication, validating the supplied app key
+ * and secret, and returns the supplied string, to allow you to test your code
+ * and connection to the Dropbox API. It has no other effect. If you receive an
+ * HTTP 200 response with the supplied query, it indicates at least part of the
+ * Dropbox API infrastructure is working and that the app key and secret valid.
+ * @function Dropbox#checkApp
+ * @arg {CheckEchoArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.checkApp = function (arg) {
+ return this.request('check/app', arg, 'app', 'api', 'rpc', null);
+};
+/**
+ * This endpoint performs User Authentication, validating the supplied access
+ * token, and returns the supplied string, to allow you to test your code and
+ * connection to the Dropbox API. It has no other effect. If you receive an HTTP
+ * 200 response with the supplied query, it indicates at least part of the
+ * Dropbox API infrastructure is working and that the access token is valid.
+ * Route attributes:
+ * scope: account_info.read
+ * @function Dropbox#checkUser
+ * @arg {CheckEchoArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.checkUser = function (arg) {
+ return this.request('check/user', arg, 'user', 'api', 'rpc', 'account_info.read');
+};
+/**
+ * Removes all manually added contacts. You'll still keep contacts who are on
+ * your team or who you imported. New contacts will be added when you share.
+ * Route attributes:
+ * scope: contacts.write
+ * @function Dropbox#contactsDeleteManualContacts
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.contactsDeleteManualContacts = function () {
+ return this.request('contacts/delete_manual_contacts', null, 'user', 'api', 'rpc', 'contacts.write');
+};
+/**
+ * Removes manually added contacts from the given list.
+ * Route attributes:
+ * scope: contacts.write
+ * @function Dropbox#contactsDeleteManualContactsBatch
+ * @arg {ContactsDeleteManualContactsArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.contactsDeleteManualContactsBatch = function (arg) {
+ return this.request('contacts/delete_manual_contacts_batch', arg, 'user', 'api', 'rpc', 'contacts.write');
+};
+/**
+ * Add property groups to a Dropbox file. See templates/add_for_user or
+ * templates/add_for_team to create new templates.
+ * Route attributes:
+ * scope: files.metadata.write
+ * @function Dropbox#filePropertiesPropertiesAdd
+ * @arg {FilePropertiesAddPropertiesArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filePropertiesPropertiesAdd = function (arg) {
+ return this.request('file_properties/properties/add', arg, 'user', 'api', 'rpc', 'files.metadata.write');
+};
+/**
+ * Overwrite property groups associated with a file. This endpoint should be
+ * used instead of properties/update when property groups are being updated via
+ * a "snapshot" instead of via a "delta". In other words, this endpoint will
+ * delete all omitted fields from a property group, whereas properties/update
+ * will only delete fields that are explicitly marked for deletion.
+ * Route attributes:
+ * scope: files.metadata.write
+ * @function Dropbox#filePropertiesPropertiesOverwrite
+ * @arg {FilePropertiesOverwritePropertyGroupArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filePropertiesPropertiesOverwrite = function (arg) {
+ return this.request('file_properties/properties/overwrite', arg, 'user', 'api', 'rpc', 'files.metadata.write');
+};
+/**
+ * Permanently removes the specified property group from the file. To remove
+ * specific property field key value pairs, see properties/update. To update a
+ * template, see templates/update_for_user or templates/update_for_team. To
+ * remove a template, see templates/remove_for_user or
+ * templates/remove_for_team.
+ * Route attributes:
+ * scope: files.metadata.write
+ * @function Dropbox#filePropertiesPropertiesRemove
+ * @arg {FilePropertiesRemovePropertiesArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filePropertiesPropertiesRemove = function (arg) {
+ return this.request('file_properties/properties/remove', arg, 'user', 'api', 'rpc', 'files.metadata.write');
+};
+/**
+ * Search across property templates for particular property field values.
+ * Route attributes:
+ * scope: files.metadata.read
+ * @function Dropbox#filePropertiesPropertiesSearch
+ * @arg {FilePropertiesPropertiesSearchArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filePropertiesPropertiesSearch = function (arg) {
+ return this.request('file_properties/properties/search', arg, 'user', 'api', 'rpc', 'files.metadata.read');
+};
+/**
+ * Once a cursor has been retrieved from properties/search, use this to paginate
+ * through all search results.
+ * Route attributes:
+ * scope: files.metadata.read
+ * @function Dropbox#filePropertiesPropertiesSearchContinue
+ * @arg {FilePropertiesPropertiesSearchContinueArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filePropertiesPropertiesSearchContinue = function (arg) {
+ return this.request('file_properties/properties/search/continue', arg, 'user', 'api', 'rpc', 'files.metadata.read');
+};
+/**
+ * Add, update or remove properties associated with the supplied file and
+ * templates. This endpoint should be used instead of properties/overwrite when
+ * property groups are being updated via a "delta" instead of via a "snapshot" .
+ * In other words, this endpoint will not delete any omitted fields from a
+ * property group, whereas properties/overwrite will delete any fields that are
+ * omitted from a property group.
+ * Route attributes:
+ * scope: files.metadata.write
+ * @function Dropbox#filePropertiesPropertiesUpdate
+ * @arg {FilePropertiesUpdatePropertiesArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filePropertiesPropertiesUpdate = function (arg) {
+ return this.request('file_properties/properties/update', arg, 'user', 'api', 'rpc', 'files.metadata.write');
+};
+/**
+ * Add a template associated with a team. See properties/add to add properties
+ * to a file or folder. Note: this endpoint will create team-owned templates.
+ * Route attributes:
+ * scope: files.team_metadata.write
+ * @function Dropbox#filePropertiesTemplatesAddForTeam
+ * @arg {FilePropertiesAddTemplateArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filePropertiesTemplatesAddForTeam = function (arg) {
+ return this.request('file_properties/templates/add_for_team', arg, 'team', 'api', 'rpc', 'files.team_metadata.write');
+};
+/**
+ * Add a template associated with a user. See properties/add to add properties
+ * to a file. This endpoint can't be called on a team member or admin's behalf.
+ * Route attributes:
+ * scope: files.metadata.write
+ * @function Dropbox#filePropertiesTemplatesAddForUser
+ * @arg {FilePropertiesAddTemplateArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filePropertiesTemplatesAddForUser = function (arg) {
+ return this.request('file_properties/templates/add_for_user', arg, 'user', 'api', 'rpc', 'files.metadata.write');
+};
+/**
+ * Get the schema for a specified template.
+ * Route attributes:
+ * scope: files.team_metadata.write
+ * @function Dropbox#filePropertiesTemplatesGetForTeam
+ * @arg {FilePropertiesGetTemplateArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filePropertiesTemplatesGetForTeam = function (arg) {
+ return this.request('file_properties/templates/get_for_team', arg, 'team', 'api', 'rpc', 'files.team_metadata.write');
+};
+/**
+ * Get the schema for a specified template. This endpoint can't be called on a
+ * team member or admin's behalf.
+ * Route attributes:
+ * scope: files.metadata.read
+ * @function Dropbox#filePropertiesTemplatesGetForUser
+ * @arg {FilePropertiesGetTemplateArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filePropertiesTemplatesGetForUser = function (arg) {
+ return this.request('file_properties/templates/get_for_user', arg, 'user', 'api', 'rpc', 'files.metadata.read');
+};
+/**
+ * Get the template identifiers for a team. To get the schema of each template
+ * use templates/get_for_team.
+ * Route attributes:
+ * scope: files.team_metadata.write
+ * @function Dropbox#filePropertiesTemplatesListForTeam
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filePropertiesTemplatesListForTeam = function () {
+ return this.request('file_properties/templates/list_for_team', null, 'team', 'api', 'rpc', 'files.team_metadata.write');
+};
+/**
+ * Get the template identifiers for a team. To get the schema of each template
+ * use templates/get_for_user. This endpoint can't be called on a team member or
+ * admin's behalf.
+ * Route attributes:
+ * scope: files.metadata.read
+ * @function Dropbox#filePropertiesTemplatesListForUser
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filePropertiesTemplatesListForUser = function () {
+ return this.request('file_properties/templates/list_for_user', null, 'user', 'api', 'rpc', 'files.metadata.read');
+};
+/**
+ * Permanently removes the specified template created from
+ * templates/add_for_user. All properties associated with the template will also
+ * be removed. This action cannot be undone.
+ * Route attributes:
+ * scope: files.team_metadata.write
+ * @function Dropbox#filePropertiesTemplatesRemoveForTeam
+ * @arg {FilePropertiesRemoveTemplateArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filePropertiesTemplatesRemoveForTeam = function (arg) {
+ return this.request('file_properties/templates/remove_for_team', arg, 'team', 'api', 'rpc', 'files.team_metadata.write');
+};
+/**
+ * Permanently removes the specified template created from
+ * templates/add_for_user. All properties associated with the template will also
+ * be removed. This action cannot be undone.
+ * Route attributes:
+ * scope: files.metadata.write
+ * @function Dropbox#filePropertiesTemplatesRemoveForUser
+ * @arg {FilePropertiesRemoveTemplateArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filePropertiesTemplatesRemoveForUser = function (arg) {
+ return this.request('file_properties/templates/remove_for_user', arg, 'user', 'api', 'rpc', 'files.metadata.write');
+};
+/**
+ * Update a template associated with a team. This route can update the template
+ * name, the template description and add optional properties to templates.
+ * Route attributes:
+ * scope: files.team_metadata.write
+ * @function Dropbox#filePropertiesTemplatesUpdateForTeam
+ * @arg {FilePropertiesUpdateTemplateArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filePropertiesTemplatesUpdateForTeam = function (arg) {
+ return this.request('file_properties/templates/update_for_team', arg, 'team', 'api', 'rpc', 'files.team_metadata.write');
+};
+/**
+ * Update a template associated with a user. This route can update the template
+ * name, the template description and add optional properties to templates. This
+ * endpoint can't be called on a team member or admin's behalf.
+ * Route attributes:
+ * scope: files.metadata.write
+ * @function Dropbox#filePropertiesTemplatesUpdateForUser
+ * @arg {FilePropertiesUpdateTemplateArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filePropertiesTemplatesUpdateForUser = function (arg) {
+ return this.request('file_properties/templates/update_for_user', arg, 'user', 'api', 'rpc', 'files.metadata.write');
+};
+/**
+ * Returns the total number of file requests owned by this user. Includes both
+ * open and closed file requests.
+ * Route attributes:
+ * scope: file_requests.read
+ * @function Dropbox#fileRequestsCount
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.fileRequestsCount = function () {
+ return this.request('file_requests/count', null, 'user', 'api', 'rpc', 'file_requests.read');
+};
+/**
+ * Creates a file request for this user.
+ * Route attributes:
+ * scope: file_requests.write
+ * @function Dropbox#fileRequestsCreate
+ * @arg {FileRequestsCreateFileRequestArgs} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.fileRequestsCreate = function (arg) {
+ return this.request('file_requests/create', arg, 'user', 'api', 'rpc', 'file_requests.write');
+};
+/**
+ * Delete a batch of closed file requests.
+ * Route attributes:
+ * scope: file_requests.write
+ * @function Dropbox#fileRequestsDelete
+ * @arg {FileRequestsDeleteFileRequestArgs} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.fileRequestsDelete = function (arg) {
+ return this.request('file_requests/delete', arg, 'user', 'api', 'rpc', 'file_requests.write');
+};
+/**
+ * Delete all closed file requests owned by this user.
+ * Route attributes:
+ * scope: file_requests.write
+ * @function Dropbox#fileRequestsDeleteAllClosed
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.fileRequestsDeleteAllClosed = function () {
+ return this.request('file_requests/delete_all_closed', null, 'user', 'api', 'rpc', 'file_requests.write');
+};
+/**
+ * Returns the specified file request.
+ * Route attributes:
+ * scope: file_requests.read
+ * @function Dropbox#fileRequestsGet
+ * @arg {FileRequestsGetFileRequestArgs} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.fileRequestsGet = function (arg) {
+ return this.request('file_requests/get', arg, 'user', 'api', 'rpc', 'file_requests.read');
+};
+/**
+ * Returns a list of file requests owned by this user. For apps with the app
+ * folder permission, this will only return file requests with destinations in
+ * the app folder.
+ * Route attributes:
+ * scope: file_requests.read
+ * @function Dropbox#fileRequestsListV2
+ * @arg {FileRequestsListFileRequestsArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.fileRequestsListV2 = function (arg) {
+ return this.request('file_requests/list_v2', arg, 'user', 'api', 'rpc', 'file_requests.read');
+};
+/**
+ * Returns a list of file requests owned by this user. For apps with the app
+ * folder permission, this will only return file requests with destinations in
+ * the app folder.
+ * Route attributes:
+ * scope: file_requests.read
+ * @function Dropbox#fileRequestsList
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.fileRequestsList = function () {
+ return this.request('file_requests/list', null, 'user', 'api', 'rpc', 'file_requests.read');
+};
+/**
+ * Once a cursor has been retrieved from list_v2, use this to paginate through
+ * all file requests. The cursor must come from a previous call to list_v2 or
+ * list/continue.
+ * Route attributes:
+ * scope: file_requests.read
+ * @function Dropbox#fileRequestsListContinue
+ * @arg {FileRequestsListFileRequestsContinueArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.fileRequestsListContinue = function (arg) {
+ return this.request('file_requests/list/continue', arg, 'user', 'api', 'rpc', 'file_requests.read');
+};
+/**
+ * Update a file request.
+ * Route attributes:
+ * scope: file_requests.write
+ * @function Dropbox#fileRequestsUpdate
+ * @arg {FileRequestsUpdateFileRequestArgs} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.fileRequestsUpdate = function (arg) {
+ return this.request('file_requests/update', arg, 'user', 'api', 'rpc', 'file_requests.write');
+};
+/**
+ * Returns the metadata for a file or folder. This is an alpha endpoint
+ * compatible with the properties API. Note: Metadata for the root folder is
+ * unsupported.
+ * Route attributes:
+ * scope: files.metadata.read
+ * @function Dropbox#filesAlphaGetMetadata
+ * @deprecated
+ * @arg {FilesAlphaGetMetadataArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesAlphaGetMetadata = function (arg) {
+ return this.request('files/alpha/get_metadata', arg, 'user', 'api', 'rpc', 'files.metadata.read');
+};
+/**
+ * Create a new file with the contents provided in the request. Note that the
+ * behavior of this alpha endpoint is unstable and subject to change. Do not use
+ * this to upload a file larger than 150 MB. Instead, create an upload session
+ * with upload_session/start.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesAlphaUpload
+ * @deprecated
+ * @arg {FilesUploadArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesAlphaUpload = function (arg) {
+ return this.request('files/alpha/upload', arg, 'user', 'content', 'upload', 'files.content.write');
+};
+/**
+ * Copy a file or folder to a different location in the user's Dropbox. If the
+ * source path is a folder all its contents will be copied.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesCopyV2
+ * @arg {FilesRelocationArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesCopyV2 = function (arg) {
+ return this.request('files/copy_v2', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Copy a file or folder to a different location in the user's Dropbox. If the
+ * source path is a folder all its contents will be copied.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesCopy
+ * @deprecated
+ * @arg {FilesRelocationArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesCopy = function (arg) {
+ return this.request('files/copy', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Copy multiple files or folders to different locations at once in the user's
+ * Dropbox. This route will replace copy_batch. The main difference is this
+ * route will return status for each entry, while copy_batch raises failure if
+ * any entry fails. This route will either finish synchronously, or return a job
+ * ID and do the async copy job in background. Please use copy_batch/check_v2 to
+ * check the job status.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesCopyBatchV2
+ * @arg {Object} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesCopyBatchV2 = function (arg) {
+ return this.request('files/copy_batch_v2', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Copy multiple files or folders to different locations at once in the user's
+ * Dropbox. This route will return job ID immediately and do the async copy job
+ * in background. Please use copy_batch/check to check the job status.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesCopyBatch
+ * @deprecated
+ * @arg {FilesRelocationBatchArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesCopyBatch = function (arg) {
+ return this.request('files/copy_batch', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Returns the status of an asynchronous job for copy_batch_v2. It returns list
+ * of results for each entry.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesCopyBatchCheckV2
+ * @arg {AsyncPollArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesCopyBatchCheckV2 = function (arg) {
+ return this.request('files/copy_batch/check_v2', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Returns the status of an asynchronous job for copy_batch. If success, it
+ * returns list of results for each entry.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesCopyBatchCheck
+ * @deprecated
+ * @arg {AsyncPollArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesCopyBatchCheck = function (arg) {
+ return this.request('files/copy_batch/check', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Get a copy reference to a file or folder. This reference string can be used
+ * to save that file or folder to another user's Dropbox by passing it to
+ * copy_reference/save.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesCopyReferenceGet
+ * @arg {FilesGetCopyReferenceArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesCopyReferenceGet = function (arg) {
+ return this.request('files/copy_reference/get', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Save a copy reference returned by copy_reference/get to the user's Dropbox.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesCopyReferenceSave
+ * @arg {FilesSaveCopyReferenceArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesCopyReferenceSave = function (arg) {
+ return this.request('files/copy_reference/save', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Create a folder at a given path.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesCreateFolderV2
+ * @arg {FilesCreateFolderArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesCreateFolderV2 = function (arg) {
+ return this.request('files/create_folder_v2', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Create a folder at a given path.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesCreateFolder
+ * @deprecated
+ * @arg {FilesCreateFolderArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesCreateFolder = function (arg) {
+ return this.request('files/create_folder', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Create multiple folders at once. This route is asynchronous for large
+ * batches, which returns a job ID immediately and runs the create folder batch
+ * asynchronously. Otherwise, creates the folders and returns the result
+ * synchronously for smaller inputs. You can force asynchronous behaviour by
+ * using the CreateFolderBatchArg.force_async flag. Use
+ * create_folder_batch/check to check the job status.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesCreateFolderBatch
+ * @arg {FilesCreateFolderBatchArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesCreateFolderBatch = function (arg) {
+ return this.request('files/create_folder_batch', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Returns the status of an asynchronous job for create_folder_batch. If
+ * success, it returns list of result for each entry.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesCreateFolderBatchCheck
+ * @arg {AsyncPollArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesCreateFolderBatchCheck = function (arg) {
+ return this.request('files/create_folder_batch/check', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Delete the file or folder at a given path. If the path is a folder, all its
+ * contents will be deleted too. A successful response indicates that the file
+ * or folder was deleted. The returned metadata will be the corresponding
+ * FileMetadata or FolderMetadata for the item at time of deletion, and not a
+ * DeletedMetadata object.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesDeleteV2
+ * @arg {FilesDeleteArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesDeleteV2 = function (arg) {
+ return this.request('files/delete_v2', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Delete the file or folder at a given path. If the path is a folder, all its
+ * contents will be deleted too. A successful response indicates that the file
+ * or folder was deleted. The returned metadata will be the corresponding
+ * FileMetadata or FolderMetadata for the item at time of deletion, and not a
+ * DeletedMetadata object.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesDelete
+ * @deprecated
+ * @arg {FilesDeleteArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesDelete = function (arg) {
+ return this.request('files/delete', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Delete multiple files/folders at once. This route is asynchronous, which
+ * returns a job ID immediately and runs the delete batch asynchronously. Use
+ * delete_batch/check to check the job status.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesDeleteBatch
+ * @arg {FilesDeleteBatchArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesDeleteBatch = function (arg) {
+ return this.request('files/delete_batch', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Returns the status of an asynchronous job for delete_batch. If success, it
+ * returns list of result for each entry.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesDeleteBatchCheck
+ * @arg {AsyncPollArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesDeleteBatchCheck = function (arg) {
+ return this.request('files/delete_batch/check', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Download a file from a user's Dropbox.
+ * Route attributes:
+ * scope: files.content.read
+ * @function Dropbox#filesDownload
+ * @arg {FilesDownloadArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesDownload = function (arg) {
+ return this.request('files/download', arg, 'user', 'content', 'download', 'files.content.read');
+};
+/**
+ * Download a folder from the user's Dropbox, as a zip file. The folder must be
+ * less than 20 GB in size and any single file within must be less than 4 GB in
+ * size. The resulting zip must have fewer than 10,000 total file and folder
+ * entries, including the top level folder. The input cannot be a single file.
+ * Note: this endpoint does not support HTTP range requests.
+ * Route attributes:
+ * scope: files.content.read
+ * @function Dropbox#filesDownloadZip
+ * @arg {FilesDownloadZipArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesDownloadZip = function (arg) {
+ return this.request('files/download_zip', arg, 'user', 'content', 'download', 'files.content.read');
+};
+/**
+ * Export a file from a user's Dropbox. This route only supports exporting files
+ * that cannot be downloaded directly and whose ExportResult.file_metadata has
+ * ExportInfo.export_as populated.
+ * Route attributes:
+ * scope: files.content.read
+ * @function Dropbox#filesExport
+ * @arg {FilesExportArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesExport = function (arg) {
+ return this.request('files/export', arg, 'user', 'content', 'download', 'files.content.read');
+};
+/**
+ * Return the lock metadata for the given list of paths.
+ * Route attributes:
+ * scope: files.content.read
+ * @function Dropbox#filesGetFileLockBatch
+ * @arg {FilesLockFileBatchArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesGetFileLockBatch = function (arg) {
+ return this.request('files/get_file_lock_batch', arg, 'user', 'api', 'rpc', 'files.content.read');
+};
+/**
+ * Returns the metadata for a file or folder. Note: Metadata for the root folder
+ * is unsupported.
+ * Route attributes:
+ * scope: files.metadata.read
+ * @function Dropbox#filesGetMetadata
+ * @arg {FilesGetMetadataArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesGetMetadata = function (arg) {
+ return this.request('files/get_metadata', arg, 'user', 'api', 'rpc', 'files.metadata.read');
+};
+/**
+ * Get a preview for a file. Currently, PDF previews are generated for files
+ * with the following extensions: .ai, .doc, .docm, .docx, .eps, .gdoc,
+ * .gslides, .odp, .odt, .pps, .ppsm, .ppsx, .ppt, .pptm, .pptx, .rtf. HTML
+ * previews are generated for files with the following extensions: .csv, .ods,
+ * .xls, .xlsm, .gsheet, .xlsx. Other formats will return an unsupported
+ * extension error.
+ * Route attributes:
+ * scope: files.content.read
+ * @function Dropbox#filesGetPreview
+ * @arg {FilesPreviewArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesGetPreview = function (arg) {
+ return this.request('files/get_preview', arg, 'user', 'content', 'download', 'files.content.read');
+};
+/**
+ * Get a temporary link to stream content of a file. This link will expire in
+ * four hours and afterwards you will get 410 Gone. This URL should not be used
+ * to display content directly in the browser. The Content-Type of the link is
+ * determined automatically by the file's mime type.
+ * Route attributes:
+ * scope: files.content.read
+ * @function Dropbox#filesGetTemporaryLink
+ * @arg {FilesGetTemporaryLinkArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesGetTemporaryLink = function (arg) {
+ return this.request('files/get_temporary_link', arg, 'user', 'api', 'rpc', 'files.content.read');
+};
+/**
+ * Get a one-time use temporary upload link to upload a file to a Dropbox
+ * location. This endpoint acts as a delayed upload. The returned temporary
+ * upload link may be used to make a POST request with the data to be uploaded.
+ * The upload will then be perfomed with the CommitInfo previously provided to
+ * get_temporary_upload_link but evaluated only upon consumption. Hence, errors
+ * stemming from invalid CommitInfo with respect to the state of the user's
+ * Dropbox will only be communicated at consumption time. Additionally, these
+ * errors are surfaced as generic HTTP 409 Conflict responses, potentially
+ * hiding issue details. The maximum temporary upload link duration is 4 hours.
+ * Upon consumption or expiration, a new link will have to be generated.
+ * Multiple links may exist for a specific upload path at any given time. The
+ * POST request on the temporary upload link must have its Content-Type set to
+ * "application/octet-stream". Example temporary upload link consumption
+ * request: curl -X POST
+ * https://content.dropboxapi.com/apitul/1/bNi2uIYF51cVBND --header
+ * "Content-Type: application/octet-stream" --data-binary @local_file.txt A
+ * successful temporary upload link consumption request returns the content hash
+ * of the uploaded data in JSON format. Example successful temporary upload
+ * link consumption response: {"content-hash":
+ * "599d71033d700ac892a0e48fa61b125d2f5994"} An unsuccessful temporary upload
+ * link consumption request returns any of the following status codes: HTTP 400
+ * Bad Request: Content-Type is not one of application/octet-stream and
+ * text/plain or request is invalid. HTTP 409 Conflict: The temporary upload
+ * link does not exist or is currently unavailable, the upload failed, or
+ * another error happened. HTTP 410 Gone: The temporary upload link is expired
+ * or consumed. Example unsuccessful temporary upload link consumption
+ * response: Temporary upload link has been recently consumed.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesGetTemporaryUploadLink
+ * @arg {FilesGetTemporaryUploadLinkArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesGetTemporaryUploadLink = function (arg) {
+ return this.request('files/get_temporary_upload_link', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Get a thumbnail for an image. This method currently supports files with the
+ * following file extensions: jpg, jpeg, png, tiff, tif, gif, webp, ppm and bmp.
+ * Photos that are larger than 20MB in size won't be converted to a thumbnail.
+ * Route attributes:
+ * scope: files.content.read
+ * @function Dropbox#filesGetThumbnail
+ * @arg {FilesThumbnailArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesGetThumbnail = function (arg) {
+ return this.request('files/get_thumbnail', arg, 'user', 'content', 'download', 'files.content.read');
+};
+/**
+ * Get a thumbnail for an image. This method currently supports files with the
+ * following file extensions: jpg, jpeg, png, tiff, tif, gif, webp, ppm and bmp.
+ * Photos that are larger than 20MB in size won't be converted to a thumbnail.
+ * Route attributes:
+ * scope: files.content.read
+ * @function Dropbox#filesGetThumbnailV2
+ * @arg {FilesThumbnailV2Arg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesGetThumbnailV2 = function (arg) {
+ return this.request('files/get_thumbnail_v2', arg, 'app, user', 'content', 'download', 'files.content.read');
+};
+/**
+ * Get thumbnails for a list of images. We allow up to 25 thumbnails in a single
+ * batch. This method currently supports files with the following file
+ * extensions: jpg, jpeg, png, tiff, tif, gif, webp, ppm and bmp. Photos that
+ * are larger than 20MB in size won't be converted to a thumbnail.
+ * Route attributes:
+ * scope: files.content.read
+ * @function Dropbox#filesGetThumbnailBatch
+ * @arg {FilesGetThumbnailBatchArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesGetThumbnailBatch = function (arg) {
+ return this.request('files/get_thumbnail_batch', arg, 'user', 'content', 'rpc', 'files.content.read');
+};
+/**
+ * Starts returning the contents of a folder. If the result's
+ * ListFolderResult.has_more field is true, call list_folder/continue with the
+ * returned ListFolderResult.cursor to retrieve more entries. If you're using
+ * ListFolderArg.recursive set to true to keep a local cache of the contents of
+ * a Dropbox account, iterate through each entry in order and process them as
+ * follows to keep your local state in sync: For each FileMetadata, store the
+ * new entry at the given path in your local state. If the required parent
+ * folders don't exist yet, create them. If there's already something else at
+ * the given path, replace it and remove all its children. For each
+ * FolderMetadata, store the new entry at the given path in your local state. If
+ * the required parent folders don't exist yet, create them. If there's already
+ * something else at the given path, replace it but leave the children as they
+ * are. Check the new entry's FolderSharingInfo.read_only and set all its
+ * children's read-only statuses to match. For each DeletedMetadata, if your
+ * local state has something at the given path, remove it and all its children.
+ * If there's nothing at the given path, ignore this entry. Note:
+ * auth.RateLimitError may be returned if multiple list_folder or
+ * list_folder/continue calls with same parameters are made simultaneously by
+ * same API app for same user. If your app implements retry logic, please hold
+ * off the retry until the previous request finishes.
+ * Route attributes:
+ * scope: files.metadata.read
+ * @function Dropbox#filesListFolder
+ * @arg {FilesListFolderArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesListFolder = function (arg) {
+ return this.request('files/list_folder', arg, 'app, user', 'api', 'rpc', 'files.metadata.read');
+};
+/**
+ * Once a cursor has been retrieved from list_folder, use this to paginate
+ * through all files and retrieve updates to the folder, following the same
+ * rules as documented for list_folder.
+ * Route attributes:
+ * scope: files.metadata.read
+ * @function Dropbox#filesListFolderContinue
+ * @arg {FilesListFolderContinueArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesListFolderContinue = function (arg) {
+ return this.request('files/list_folder/continue', arg, 'app, user', 'api', 'rpc', 'files.metadata.read');
+};
+/**
+ * A way to quickly get a cursor for the folder's state. Unlike list_folder,
+ * list_folder/get_latest_cursor doesn't return any entries. This endpoint is
+ * for app which only needs to know about new files and modifications and
+ * doesn't need to know about files that already exist in Dropbox.
+ * Route attributes:
+ * scope: files.metadata.read
+ * @function Dropbox#filesListFolderGetLatestCursor
+ * @arg {FilesListFolderArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesListFolderGetLatestCursor = function (arg) {
+ return this.request('files/list_folder/get_latest_cursor', arg, 'user', 'api', 'rpc', 'files.metadata.read');
+};
+/**
+ * A longpoll endpoint to wait for changes on an account. In conjunction with
+ * list_folder/continue, this call gives you a low-latency way to monitor an
+ * account for file changes. The connection will block until there are changes
+ * available or a timeout occurs. This endpoint is useful mostly for client-side
+ * apps. If you're looking for server-side notifications, check out our webhooks
+ * documentation https://www.dropbox.com/developers/reference/webhooks.
+ * Route attributes:
+ * scope: files.metadata.read
+ * @function Dropbox#filesListFolderLongpoll
+ * @arg {FilesListFolderLongpollArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesListFolderLongpoll = function (arg) {
+ return this.request('files/list_folder/longpoll', arg, 'noauth', 'notify', 'rpc', 'files.metadata.read');
+};
+/**
+ * Returns revisions for files based on a file path or a file id. The file path
+ * or file id is identified from the latest file entry at the given file path or
+ * id. This end point allows your app to query either by file path or file id by
+ * setting the mode parameter appropriately. In the ListRevisionsMode.path
+ * (default) mode, all revisions at the same file path as the latest file entry
+ * are returned. If revisions with the same file id are desired, then mode must
+ * be set to ListRevisionsMode.id. The ListRevisionsMode.id mode is useful to
+ * retrieve revisions for a given file across moves or renames.
+ * Route attributes:
+ * scope: files.metadata.read
+ * @function Dropbox#filesListRevisions
+ * @arg {FilesListRevisionsArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesListRevisions = function (arg) {
+ return this.request('files/list_revisions', arg, 'user', 'api', 'rpc', 'files.metadata.read');
+};
+/**
+ * Lock the files at the given paths. A locked file will be writable only by the
+ * lock holder. A successful response indicates that the file has been locked.
+ * Returns a list of the locked file paths and their metadata after this
+ * operation.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesLockFileBatch
+ * @arg {FilesLockFileBatchArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesLockFileBatch = function (arg) {
+ return this.request('files/lock_file_batch', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Move a file or folder to a different location in the user's Dropbox. If the
+ * source path is a folder all its contents will be moved. Note that we do not
+ * currently support case-only renaming.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesMoveV2
+ * @arg {FilesRelocationArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesMoveV2 = function (arg) {
+ return this.request('files/move_v2', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Move a file or folder to a different location in the user's Dropbox. If the
+ * source path is a folder all its contents will be moved.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesMove
+ * @deprecated
+ * @arg {FilesRelocationArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesMove = function (arg) {
+ return this.request('files/move', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Move multiple files or folders to different locations at once in the user's
+ * Dropbox. Note that we do not currently support case-only renaming. This route
+ * will replace move_batch. The main difference is this route will return status
+ * for each entry, while move_batch raises failure if any entry fails. This
+ * route will either finish synchronously, or return a job ID and do the async
+ * move job in background. Please use move_batch/check_v2 to check the job
+ * status.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesMoveBatchV2
+ * @arg {FilesMoveBatchArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesMoveBatchV2 = function (arg) {
+ return this.request('files/move_batch_v2', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Move multiple files or folders to different locations at once in the user's
+ * Dropbox. This route will return job ID immediately and do the async moving
+ * job in background. Please use move_batch/check to check the job status.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesMoveBatch
+ * @deprecated
+ * @arg {FilesRelocationBatchArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesMoveBatch = function (arg) {
+ return this.request('files/move_batch', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Returns the status of an asynchronous job for move_batch_v2. It returns list
+ * of results for each entry.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesMoveBatchCheckV2
+ * @arg {AsyncPollArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesMoveBatchCheckV2 = function (arg) {
+ return this.request('files/move_batch/check_v2', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Returns the status of an asynchronous job for move_batch. If success, it
+ * returns list of results for each entry.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesMoveBatchCheck
+ * @deprecated
+ * @arg {AsyncPollArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesMoveBatchCheck = function (arg) {
+ return this.request('files/move_batch/check', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Creates a new Paper doc with the provided content.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesPaperCreate
+ * @arg {FilesPaperCreateArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesPaperCreate = function (arg) {
+ return this.request('files/paper/create', arg, 'user', 'api', 'upload', 'files.content.write');
+};
+/**
+ * Updates an existing Paper doc with the provided content.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesPaperUpdate
+ * @arg {FilesPaperUpdateArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesPaperUpdate = function (arg) {
+ return this.request('files/paper/update', arg, 'user', 'api', 'upload', 'files.content.write');
+};
+/**
+ * Permanently delete the file or folder at a given path (see
+ * https://www.dropbox.com/en/help/40). If the given file or folder is not yet
+ * deleted, this route will first delete it. It is possible for this route to
+ * successfully delete, then fail to permanently delete. Note: This endpoint is
+ * only available for Dropbox Business apps.
+ * Route attributes:
+ * scope: files.permanent_delete
+ * @function Dropbox#filesPermanentlyDelete
+ * @arg {FilesDeleteArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesPermanentlyDelete = function (arg) {
+ return this.request('files/permanently_delete', arg, 'user', 'api', 'rpc', 'files.permanent_delete');
+};
+/**
+ * Route attributes:
+ * scope: files.metadata.write
+ * @function Dropbox#filesPropertiesAdd
+ * @deprecated
+ * @arg {FilePropertiesAddPropertiesArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesPropertiesAdd = function (arg) {
+ return this.request('files/properties/add', arg, 'user', 'api', 'rpc', 'files.metadata.write');
+};
+/**
+ * Route attributes:
+ * scope: files.metadata.write
+ * @function Dropbox#filesPropertiesOverwrite
+ * @deprecated
+ * @arg {FilePropertiesOverwritePropertyGroupArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesPropertiesOverwrite = function (arg) {
+ return this.request('files/properties/overwrite', arg, 'user', 'api', 'rpc', 'files.metadata.write');
+};
+/**
+ * Route attributes:
+ * scope: files.metadata.write
+ * @function Dropbox#filesPropertiesRemove
+ * @deprecated
+ * @arg {FilePropertiesRemovePropertiesArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesPropertiesRemove = function (arg) {
+ return this.request('files/properties/remove', arg, 'user', 'api', 'rpc', 'files.metadata.write');
+};
+/**
+ * Route attributes:
+ * scope: files.metadata.read
+ * @function Dropbox#filesPropertiesTemplateGet
+ * @deprecated
+ * @arg {FilePropertiesGetTemplateArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesPropertiesTemplateGet = function (arg) {
+ return this.request('files/properties/template/get', arg, 'user', 'api', 'rpc', 'files.metadata.read');
+};
+/**
+ * Route attributes:
+ * scope: files.metadata.read
+ * @function Dropbox#filesPropertiesTemplateList
+ * @deprecated
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesPropertiesTemplateList = function () {
+ return this.request('files/properties/template/list', null, 'user', 'api', 'rpc', 'files.metadata.read');
+};
+/**
+ * Route attributes:
+ * scope: files.metadata.write
+ * @function Dropbox#filesPropertiesUpdate
+ * @deprecated
+ * @arg {FilePropertiesUpdatePropertiesArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesPropertiesUpdate = function (arg) {
+ return this.request('files/properties/update', arg, 'user', 'api', 'rpc', 'files.metadata.write');
+};
+/**
+ * Restore a specific revision of a file to the given path.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesRestore
+ * @arg {FilesRestoreArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesRestore = function (arg) {
+ return this.request('files/restore', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Save the data from a specified URL into a file in user's Dropbox. Note that
+ * the transfer from the URL must complete within 5 minutes, or the operation
+ * will time out and the job will fail. If the given path already exists, the
+ * file will be renamed to avoid the conflict (e.g. myfile (1).txt).
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesSaveUrl
+ * @arg {FilesSaveUrlArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesSaveUrl = function (arg) {
+ return this.request('files/save_url', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Check the status of a save_url job.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesSaveUrlCheckJobStatus
+ * @arg {AsyncPollArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesSaveUrlCheckJobStatus = function (arg) {
+ return this.request('files/save_url/check_job_status', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Searches for files and folders. Note: Recent changes will be reflected in
+ * search results within a few seconds and older revisions of existing files may
+ * still match your query for up to a few days.
+ * Route attributes:
+ * scope: files.metadata.read
+ * @function Dropbox#filesSearch
+ * @deprecated
+ * @arg {FilesSearchArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesSearch = function (arg) {
+ return this.request('files/search', arg, 'user', 'api', 'rpc', 'files.metadata.read');
+};
+/**
+ * Searches for files and folders. Note: search_v2 along with search/continue_v2
+ * can only be used to retrieve a maximum of 10,000 matches. Recent changes may
+ * not immediately be reflected in search results due to a short delay in
+ * indexing. Duplicate results may be returned across pages. Some results may
+ * not be returned.
+ * Route attributes:
+ * scope: files.metadata.read
+ * @function Dropbox#filesSearchV2
+ * @arg {FilesSearchV2Arg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesSearchV2 = function (arg) {
+ return this.request('files/search_v2', arg, 'user', 'api', 'rpc', 'files.metadata.read');
+};
+/**
+ * Fetches the next page of search results returned from search_v2. Note:
+ * search_v2 along with search/continue_v2 can only be used to retrieve a
+ * maximum of 10,000 matches. Recent changes may not immediately be reflected in
+ * search results due to a short delay in indexing. Duplicate results may be
+ * returned across pages. Some results may not be returned.
+ * Route attributes:
+ * scope: files.metadata.read
+ * @function Dropbox#filesSearchContinueV2
+ * @arg {FilesSearchV2ContinueArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesSearchContinueV2 = function (arg) {
+ return this.request('files/search/continue_v2', arg, 'user', 'api', 'rpc', 'files.metadata.read');
+};
+/**
+ * Add a tag to an item. A tag is a string. The strings are automatically
+ * converted to lowercase letters. No more than 20 tags can be added to a given
+ * item.
+ * Route attributes:
+ * scope: files.metadata.write
+ * @function Dropbox#filesTagsAdd
+ * @arg {FilesAddTagArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesTagsAdd = function (arg) {
+ return this.request('files/tags/add', arg, 'user', 'api', 'rpc', 'files.metadata.write');
+};
+/**
+ * Get list of tags assigned to items.
+ * Route attributes:
+ * scope: files.metadata.read
+ * @function Dropbox#filesTagsGet
+ * @arg {FilesGetTagsArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesTagsGet = function (arg) {
+ return this.request('files/tags/get', arg, 'user', 'api', 'rpc', 'files.metadata.read');
+};
+/**
+ * Remove a tag from an item.
+ * Route attributes:
+ * scope: files.metadata.write
+ * @function Dropbox#filesTagsRemove
+ * @arg {FilesRemoveTagArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesTagsRemove = function (arg) {
+ return this.request('files/tags/remove', arg, 'user', 'api', 'rpc', 'files.metadata.write');
+};
+/**
+ * Unlock the files at the given paths. A locked file can only be unlocked by
+ * the lock holder or, if a business account, a team admin. A successful
+ * response indicates that the file has been unlocked. Returns a list of the
+ * unlocked file paths and their metadata after this operation.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesUnlockFileBatch
+ * @arg {FilesUnlockFileBatchArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesUnlockFileBatch = function (arg) {
+ return this.request('files/unlock_file_batch', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Create a new file with the contents provided in the request. Do not use this
+ * to upload a file larger than 150 MB. Instead, create an upload session with
+ * upload_session/start. Calls to this endpoint will count as data transport
+ * calls for any Dropbox Business teams with a limit on the number of data
+ * transport calls allowed per month. For more information, see the Data
+ * transport limit page
+ * https://www.dropbox.com/developers/reference/data-transport-limit.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesUpload
+ * @arg {FilesUploadArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesUpload = function (arg) {
+ return this.request('files/upload', arg, 'user', 'content', 'upload', 'files.content.write');
+};
+/**
+ * Append more data to an upload session. When the parameter close is set, this
+ * call will close the session. A single request should not upload more than 150
+ * MB. The maximum size of a file one can upload to an upload session is 350 GB.
+ * Calls to this endpoint will count as data transport calls for any Dropbox
+ * Business teams with a limit on the number of data transport calls allowed per
+ * month. For more information, see the Data transport limit page
+ * https://www.dropbox.com/developers/reference/data-transport-limit.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesUploadSessionAppendV2
+ * @arg {FilesUploadSessionAppendArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesUploadSessionAppendV2 = function (arg) {
+ return this.request('files/upload_session/append_v2', arg, 'user', 'content', 'upload', 'files.content.write');
+};
+/**
+ * Append more data to an upload session. A single request should not upload
+ * more than 150 MB. The maximum size of a file one can upload to an upload
+ * session is 350 GB. Calls to this endpoint will count as data transport calls
+ * for any Dropbox Business teams with a limit on the number of data transport
+ * calls allowed per month. For more information, see the Data transport limit
+ * page https://www.dropbox.com/developers/reference/data-transport-limit.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesUploadSessionAppend
+ * @deprecated
+ * @arg {FilesUploadSessionCursor} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesUploadSessionAppend = function (arg) {
+ return this.request('files/upload_session/append', arg, 'user', 'content', 'upload', 'files.content.write');
+};
+/**
+ * Finish an upload session and save the uploaded data to the given file path. A
+ * single request should not upload more than 150 MB. The maximum size of a file
+ * one can upload to an upload session is 350 GB. Calls to this endpoint will
+ * count as data transport calls for any Dropbox Business teams with a limit on
+ * the number of data transport calls allowed per month. For more information,
+ * see the Data transport limit page
+ * https://www.dropbox.com/developers/reference/data-transport-limit.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesUploadSessionFinish
+ * @arg {FilesUploadSessionFinishArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesUploadSessionFinish = function (arg) {
+ return this.request('files/upload_session/finish', arg, 'user', 'content', 'upload', 'files.content.write');
+};
+/**
+ * This route helps you commit many files at once into a user's Dropbox. Use
+ * upload_session/start and upload_session/append_v2 to upload file contents. We
+ * recommend uploading many files in parallel to increase throughput. Once the
+ * file contents have been uploaded, rather than calling upload_session/finish,
+ * use this route to finish all your upload sessions in a single request.
+ * UploadSessionStartArg.close or UploadSessionAppendArg.close needs to be true
+ * for the last upload_session/start or upload_session/append_v2 call. The
+ * maximum size of a file one can upload to an upload session is 350 GB. This
+ * route will return a job_id immediately and do the async commit job in
+ * background. Use upload_session/finish_batch/check to check the job status.
+ * For the same account, this route should be executed serially. That means you
+ * should not start the next job before current job finishes. We allow up to
+ * 1000 entries in a single request. Calls to this endpoint will count as data
+ * transport calls for any Dropbox Business teams with a limit on the number of
+ * data transport calls allowed per month. For more information, see the Data
+ * transport limit page
+ * https://www.dropbox.com/developers/reference/data-transport-limit.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesUploadSessionFinishBatch
+ * @deprecated
+ * @arg {FilesUploadSessionFinishBatchArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesUploadSessionFinishBatch = function (arg) {
+ return this.request('files/upload_session/finish_batch', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * This route helps you commit many files at once into a user's Dropbox. Use
+ * upload_session/start and upload_session/append_v2 to upload file contents. We
+ * recommend uploading many files in parallel to increase throughput. Once the
+ * file contents have been uploaded, rather than calling upload_session/finish,
+ * use this route to finish all your upload sessions in a single request.
+ * UploadSessionStartArg.close or UploadSessionAppendArg.close needs to be true
+ * for the last upload_session/start or upload_session/append_v2 call of each
+ * upload session. The maximum size of a file one can upload to an upload
+ * session is 350 GB. We allow up to 1000 entries in a single request. Calls to
+ * this endpoint will count as data transport calls for any Dropbox Business
+ * teams with a limit on the number of data transport calls allowed per month.
+ * For more information, see the Data transport limit page
+ * https://www.dropbox.com/developers/reference/data-transport-limit.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesUploadSessionFinishBatchV2
+ * @arg {FilesUploadSessionFinishBatchArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesUploadSessionFinishBatchV2 = function (arg) {
+ return this.request('files/upload_session/finish_batch_v2', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Returns the status of an asynchronous job for upload_session/finish_batch. If
+ * success, it returns list of result for each entry.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesUploadSessionFinishBatchCheck
+ * @arg {AsyncPollArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesUploadSessionFinishBatchCheck = function (arg) {
+ return this.request('files/upload_session/finish_batch/check', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Upload sessions allow you to upload a single file in one or more requests,
+ * for example where the size of the file is greater than 150 MB. This call
+ * starts a new upload session with the given data. You can then use
+ * upload_session/append_v2 to add more data and upload_session/finish to save
+ * all the data to a file in Dropbox. A single request should not upload more
+ * than 150 MB. The maximum size of a file one can upload to an upload session
+ * is 350 GB. An upload session can be used for a maximum of 7 days. Attempting
+ * to use an UploadSessionStartResult.session_id with upload_session/append_v2
+ * or upload_session/finish more than 7 days after its creation will return a
+ * UploadSessionLookupError.not_found. Calls to this endpoint will count as data
+ * transport calls for any Dropbox Business teams with a limit on the number of
+ * data transport calls allowed per month. For more information, see the Data
+ * transport limit page
+ * https://www.dropbox.com/developers/reference/data-transport-limit. By
+ * default, upload sessions require you to send content of the file in
+ * sequential order via consecutive upload_session/start,
+ * upload_session/append_v2, upload_session/finish calls. For better
+ * performance, you can instead optionally use a UploadSessionType.concurrent
+ * upload session. To start a new concurrent session, set
+ * UploadSessionStartArg.session_type to UploadSessionType.concurrent. After
+ * that, you can send file data in concurrent upload_session/append_v2 requests.
+ * Finally finish the session with upload_session/finish. There are couple of
+ * constraints with concurrent sessions to make them work. You can not send data
+ * with upload_session/start or upload_session/finish call, only with
+ * upload_session/append_v2 call. Also data uploaded in upload_session/append_v2
+ * call must be multiple of 4194304 bytes (except for last
+ * upload_session/append_v2 with UploadSessionStartArg.close to true, that may
+ * contain any remaining data).
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesUploadSessionStart
+ * @arg {FilesUploadSessionStartArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesUploadSessionStart = function (arg) {
+ return this.request('files/upload_session/start', arg, 'user', 'content', 'upload', 'files.content.write');
+};
+/**
+ * This route starts batch of upload_sessions. Please refer to
+ * `upload_session/start` usage. Calls to this endpoint will count as data
+ * transport calls for any Dropbox Business teams with a limit on the number of
+ * data transport calls allowed per month. For more information, see the Data
+ * transport limit page
+ * https://www.dropbox.com/developers/reference/data-transport-limit.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#filesUploadSessionStartBatch
+ * @arg {FilesUploadSessionStartBatchArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.filesUploadSessionStartBatch = function (arg) {
+ return this.request('files/upload_session/start_batch', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * This route is used for refreshing the info that is found in the id_token
+ * during the OIDC flow. This route doesn't require any arguments and will use
+ * the scopes approved for the given access token.
+ * Route attributes:
+ * scope: openid
+ * @function Dropbox#openidUserinfo
+ * @arg {OpenidUserInfoArgs} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.openidUserinfo = function (arg) {
+ return this.request('openid/userinfo', arg, 'user', 'api', 'rpc', 'openid');
+};
+/**
+ * Marks the given Paper doc as archived. This action can be performed or undone
+ * by anyone with edit permissions to the doc. Note that this endpoint will
+ * continue to work for content created by users on the older version of Paper.
+ * To check which version of Paper a user is on, use /users/features/get_values.
+ * If the paper_as_files feature is enabled, then the user is running the new
+ * version of Paper. This endpoint will be retired in September 2020. Refer to
+ * the Paper Migration Guide
+ * https://www.dropbox.com/lp/developers/reference/paper-migration-guide for
+ * more information.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#paperDocsArchive
+ * @deprecated
+ * @arg {PaperRefPaperDoc} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.paperDocsArchive = function (arg) {
+ return this.request('paper/docs/archive', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Creates a new Paper doc with the provided content. Note that this endpoint
+ * will continue to work for content created by users on the older version of
+ * Paper. To check which version of Paper a user is on, use
+ * /users/features/get_values. If the paper_as_files feature is enabled, then
+ * the user is running the new version of Paper. This endpoint will be retired
+ * in September 2020. Refer to the Paper Migration Guide
+ * https://www.dropbox.com/lp/developers/reference/paper-migration-guide for
+ * more information.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#paperDocsCreate
+ * @deprecated
+ * @arg {PaperPaperDocCreateArgs} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.paperDocsCreate = function (arg) {
+ return this.request('paper/docs/create', arg, 'user', 'api', 'upload', 'files.content.write');
+};
+/**
+ * Exports and downloads Paper doc either as HTML or markdown. Note that this
+ * endpoint will continue to work for content created by users on the older
+ * version of Paper. To check which version of Paper a user is on, use
+ * /users/features/get_values. If the paper_as_files feature is enabled, then
+ * the user is running the new version of Paper. Refer to the Paper Migration
+ * Guide https://www.dropbox.com/lp/developers/reference/paper-migration-guide
+ * for migration information.
+ * Route attributes:
+ * scope: files.content.read
+ * @function Dropbox#paperDocsDownload
+ * @deprecated
+ * @arg {PaperPaperDocExport} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.paperDocsDownload = function (arg) {
+ return this.request('paper/docs/download', arg, 'user', 'api', 'download', 'files.content.read');
+};
+/**
+ * Lists the users who are explicitly invited to the Paper folder in which the
+ * Paper doc is contained. For private folders all users (including owner)
+ * shared on the folder are listed and for team folders all non-team users
+ * shared on the folder are returned. Note that this endpoint will continue to
+ * work for content created by users on the older version of Paper. To check
+ * which version of Paper a user is on, use /users/features/get_values. If the
+ * paper_as_files feature is enabled, then the user is running the new version
+ * of Paper. Refer to the Paper Migration Guide
+ * https://www.dropbox.com/lp/developers/reference/paper-migration-guide for
+ * migration information.
+ * Route attributes:
+ * scope: sharing.read
+ * @function Dropbox#paperDocsFolderUsersList
+ * @deprecated
+ * @arg {PaperListUsersOnFolderArgs} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.paperDocsFolderUsersList = function (arg) {
+ return this.request('paper/docs/folder_users/list', arg, 'user', 'api', 'rpc', 'sharing.read');
+};
+/**
+ * Once a cursor has been retrieved from docs/folder_users/list, use this to
+ * paginate through all users on the Paper folder. Note that this endpoint will
+ * continue to work for content created by users on the older version of Paper.
+ * To check which version of Paper a user is on, use /users/features/get_values.
+ * If the paper_as_files feature is enabled, then the user is running the new
+ * version of Paper. Refer to the Paper Migration Guide
+ * https://www.dropbox.com/lp/developers/reference/paper-migration-guide for
+ * migration information.
+ * Route attributes:
+ * scope: sharing.read
+ * @function Dropbox#paperDocsFolderUsersListContinue
+ * @deprecated
+ * @arg {PaperListUsersOnFolderContinueArgs} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.paperDocsFolderUsersListContinue = function (arg) {
+ return this.request('paper/docs/folder_users/list/continue', arg, 'user', 'api', 'rpc', 'sharing.read');
+};
+/**
+ * Retrieves folder information for the given Paper doc. This includes: -
+ * folder sharing policy; permissions for subfolders are set by the top-level
+ * folder. - full 'filepath', i.e. the list of folders (both folderId and
+ * folderName) from the root folder to the folder directly containing the
+ * Paper doc. If the Paper doc is not in any folder (aka unfiled) the response
+ * will be empty. Note that this endpoint will continue to work for content
+ * created by users on the older version of Paper. To check which version of
+ * Paper a user is on, use /users/features/get_values. If the paper_as_files
+ * feature is enabled, then the user is running the new version of Paper. Refer
+ * to the Paper Migration Guide
+ * https://www.dropbox.com/lp/developers/reference/paper-migration-guide for
+ * migration information.
+ * Route attributes:
+ * scope: sharing.read
+ * @function Dropbox#paperDocsGetFolderInfo
+ * @deprecated
+ * @arg {PaperRefPaperDoc} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.paperDocsGetFolderInfo = function (arg) {
+ return this.request('paper/docs/get_folder_info', arg, 'user', 'api', 'rpc', 'sharing.read');
+};
+/**
+ * Return the list of all Paper docs according to the argument specifications.
+ * To iterate over through the full pagination, pass the cursor to
+ * docs/list/continue. Note that this endpoint will continue to work for content
+ * created by users on the older version of Paper. To check which version of
+ * Paper a user is on, use /users/features/get_values. If the paper_as_files
+ * feature is enabled, then the user is running the new version of Paper. Refer
+ * to the Paper Migration Guide
+ * https://www.dropbox.com/lp/developers/reference/paper-migration-guide for
+ * migration information.
+ * Route attributes:
+ * scope: files.metadata.read
+ * @function Dropbox#paperDocsList
+ * @deprecated
+ * @arg {PaperListPaperDocsArgs} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.paperDocsList = function (arg) {
+ return this.request('paper/docs/list', arg, 'user', 'api', 'rpc', 'files.metadata.read');
+};
+/**
+ * Once a cursor has been retrieved from docs/list, use this to paginate through
+ * all Paper doc. Note that this endpoint will continue to work for content
+ * created by users on the older version of Paper. To check which version of
+ * Paper a user is on, use /users/features/get_values. If the paper_as_files
+ * feature is enabled, then the user is running the new version of Paper. Refer
+ * to the Paper Migration Guide
+ * https://www.dropbox.com/lp/developers/reference/paper-migration-guide for
+ * migration information.
+ * Route attributes:
+ * scope: files.metadata.read
+ * @function Dropbox#paperDocsListContinue
+ * @deprecated
+ * @arg {PaperListPaperDocsContinueArgs} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.paperDocsListContinue = function (arg) {
+ return this.request('paper/docs/list/continue', arg, 'user', 'api', 'rpc', 'files.metadata.read');
+};
+/**
+ * Permanently deletes the given Paper doc. This operation is final as the doc
+ * cannot be recovered. This action can be performed only by the doc owner. Note
+ * that this endpoint will continue to work for content created by users on the
+ * older version of Paper. To check which version of Paper a user is on, use
+ * /users/features/get_values. If the paper_as_files feature is enabled, then
+ * the user is running the new version of Paper. Refer to the Paper Migration
+ * Guide https://www.dropbox.com/lp/developers/reference/paper-migration-guide
+ * for migration information.
+ * Route attributes:
+ * scope: files.permanent_delete
+ * @function Dropbox#paperDocsPermanentlyDelete
+ * @deprecated
+ * @arg {PaperRefPaperDoc} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.paperDocsPermanentlyDelete = function (arg) {
+ return this.request('paper/docs/permanently_delete', arg, 'user', 'api', 'rpc', 'files.permanent_delete');
+};
+/**
+ * Gets the default sharing policy for the given Paper doc. Note that this
+ * endpoint will continue to work for content created by users on the older
+ * version of Paper. To check which version of Paper a user is on, use
+ * /users/features/get_values. If the paper_as_files feature is enabled, then
+ * the user is running the new version of Paper. Refer to the Paper Migration
+ * Guide https://www.dropbox.com/lp/developers/reference/paper-migration-guide
+ * for migration information.
+ * Route attributes:
+ * scope: sharing.read
+ * @function Dropbox#paperDocsSharingPolicyGet
+ * @deprecated
+ * @arg {PaperRefPaperDoc} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.paperDocsSharingPolicyGet = function (arg) {
+ return this.request('paper/docs/sharing_policy/get', arg, 'user', 'api', 'rpc', 'sharing.read');
+};
+/**
+ * Sets the default sharing policy for the given Paper doc. The default
+ * 'team_sharing_policy' can be changed only by teams, omit this field for
+ * personal accounts. The 'public_sharing_policy' policy can't be set to the
+ * value 'disabled' because this setting can be changed only via the team admin
+ * console. Note that this endpoint will continue to work for content created by
+ * users on the older version of Paper. To check which version of Paper a user
+ * is on, use /users/features/get_values. If the paper_as_files feature is
+ * enabled, then the user is running the new version of Paper. Refer to the
+ * Paper Migration Guide
+ * https://www.dropbox.com/lp/developers/reference/paper-migration-guide for
+ * migration information.
+ * Route attributes:
+ * scope: sharing.write
+ * @function Dropbox#paperDocsSharingPolicySet
+ * @deprecated
+ * @arg {PaperPaperDocSharingPolicy} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.paperDocsSharingPolicySet = function (arg) {
+ return this.request('paper/docs/sharing_policy/set', arg, 'user', 'api', 'rpc', 'sharing.write');
+};
+/**
+ * Updates an existing Paper doc with the provided content. Note that this
+ * endpoint will continue to work for content created by users on the older
+ * version of Paper. To check which version of Paper a user is on, use
+ * /users/features/get_values. If the paper_as_files feature is enabled, then
+ * the user is running the new version of Paper. This endpoint will be retired
+ * in September 2020. Refer to the Paper Migration Guide
+ * https://www.dropbox.com/lp/developers/reference/paper-migration-guide for
+ * more information.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#paperDocsUpdate
+ * @deprecated
+ * @arg {PaperPaperDocUpdateArgs} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.paperDocsUpdate = function (arg) {
+ return this.request('paper/docs/update', arg, 'user', 'api', 'upload', 'files.content.write');
+};
+/**
+ * Allows an owner or editor to add users to a Paper doc or change their
+ * permissions using their email address or Dropbox account ID. The doc owner's
+ * permissions cannot be changed. Note that this endpoint will continue to work
+ * for content created by users on the older version of Paper. To check which
+ * version of Paper a user is on, use /users/features/get_values. If the
+ * paper_as_files feature is enabled, then the user is running the new version
+ * of Paper. Refer to the Paper Migration Guide
+ * https://www.dropbox.com/lp/developers/reference/paper-migration-guide for
+ * migration information.
+ * Route attributes:
+ * scope: sharing.write
+ * @function Dropbox#paperDocsUsersAdd
+ * @deprecated
+ * @arg {PaperAddPaperDocUser} arg - The request parameters.
+ * @returns {Promise.>, DropboxResponseError.>}
+ */
+
+
+routes.paperDocsUsersAdd = function (arg) {
+ return this.request('paper/docs/users/add', arg, 'user', 'api', 'rpc', 'sharing.write');
+};
+/**
+ * Lists all users who visited the Paper doc or users with explicit access. This
+ * call excludes users who have been removed. The list is sorted by the date of
+ * the visit or the share date. The list will include both users, the explicitly
+ * shared ones as well as those who came in using the Paper url link. Note that
+ * this endpoint will continue to work for content created by users on the older
+ * version of Paper. To check which version of Paper a user is on, use
+ * /users/features/get_values. If the paper_as_files feature is enabled, then
+ * the user is running the new version of Paper. Refer to the Paper Migration
+ * Guide https://www.dropbox.com/lp/developers/reference/paper-migration-guide
+ * for migration information.
+ * Route attributes:
+ * scope: sharing.read
+ * @function Dropbox#paperDocsUsersList
+ * @deprecated
+ * @arg {PaperListUsersOnPaperDocArgs} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.paperDocsUsersList = function (arg) {
+ return this.request('paper/docs/users/list', arg, 'user', 'api', 'rpc', 'sharing.read');
+};
+/**
+ * Once a cursor has been retrieved from docs/users/list, use this to paginate
+ * through all users on the Paper doc. Note that this endpoint will continue to
+ * work for content created by users on the older version of Paper. To check
+ * which version of Paper a user is on, use /users/features/get_values. If the
+ * paper_as_files feature is enabled, then the user is running the new version
+ * of Paper. Refer to the Paper Migration Guide
+ * https://www.dropbox.com/lp/developers/reference/paper-migration-guide for
+ * migration information.
+ * Route attributes:
+ * scope: sharing.read
+ * @function Dropbox#paperDocsUsersListContinue
+ * @deprecated
+ * @arg {PaperListUsersOnPaperDocContinueArgs} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.paperDocsUsersListContinue = function (arg) {
+ return this.request('paper/docs/users/list/continue', arg, 'user', 'api', 'rpc', 'sharing.read');
+};
+/**
+ * Allows an owner or editor to remove users from a Paper doc using their email
+ * address or Dropbox account ID. The doc owner cannot be removed. Note that
+ * this endpoint will continue to work for content created by users on the older
+ * version of Paper. To check which version of Paper a user is on, use
+ * /users/features/get_values. If the paper_as_files feature is enabled, then
+ * the user is running the new version of Paper. Refer to the Paper Migration
+ * Guide https://www.dropbox.com/lp/developers/reference/paper-migration-guide
+ * for migration information.
+ * Route attributes:
+ * scope: sharing.write
+ * @function Dropbox#paperDocsUsersRemove
+ * @deprecated
+ * @arg {PaperRemovePaperDocUser} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.paperDocsUsersRemove = function (arg) {
+ return this.request('paper/docs/users/remove', arg, 'user', 'api', 'rpc', 'sharing.write');
+};
+/**
+ * Create a new Paper folder with the provided info. Note that this endpoint
+ * will continue to work for content created by users on the older version of
+ * Paper. To check which version of Paper a user is on, use
+ * /users/features/get_values. If the paper_as_files feature is enabled, then
+ * the user is running the new version of Paper. Refer to the Paper Migration
+ * Guide https://www.dropbox.com/lp/developers/reference/paper-migration-guide
+ * for migration information.
+ * Route attributes:
+ * scope: files.content.write
+ * @function Dropbox#paperFoldersCreate
+ * @deprecated
+ * @arg {PaperPaperFolderCreateArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.paperFoldersCreate = function (arg) {
+ return this.request('paper/folders/create', arg, 'user', 'api', 'rpc', 'files.content.write');
+};
+/**
+ * Adds specified members to a file.
+ * Route attributes:
+ * scope: sharing.write
+ * @function Dropbox#sharingAddFileMember
+ * @arg {SharingAddFileMemberArgs} arg - The request parameters.
+ * @returns {Promise.>, DropboxResponseError.>}
+ */
+
+
+routes.sharingAddFileMember = function (arg) {
+ return this.request('sharing/add_file_member', arg, 'user', 'api', 'rpc', 'sharing.write');
+};
+/**
+ * Allows an owner or editor (if the ACL update policy allows) of a shared
+ * folder to add another member. For the new member to get access to all the
+ * functionality for this folder, you will need to call mount_folder on their
+ * behalf.
+ * Route attributes:
+ * scope: sharing.write
+ * @function Dropbox#sharingAddFolderMember
+ * @arg {SharingAddFolderMemberArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.sharingAddFolderMember = function (arg) {
+ return this.request('sharing/add_folder_member', arg, 'user', 'api', 'rpc', 'sharing.write');
+};
+/**
+ * Returns the status of an asynchronous job.
+ * Route attributes:
+ * scope: sharing.write
+ * @function Dropbox#sharingCheckJobStatus
+ * @arg {AsyncPollArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.>}
+ */
+
+
+routes.sharingCheckJobStatus = function (arg) {
+ return this.request('sharing/check_job_status', arg, 'user', 'api', 'rpc', 'sharing.write');
+};
+/**
+ * Returns the status of an asynchronous job for sharing a folder.
+ * Route attributes:
+ * scope: sharing.write
+ * @function Dropbox#sharingCheckRemoveMemberJobStatus
+ * @arg {AsyncPollArg} arg - The request parameters.
+ * @returns {Promise., DropboxResponseError.