file_path stringlengths 3 280 | file_language stringclasses 66 values | content stringlengths 1 1.04M | repo_name stringlengths 5 92 | repo_stars int64 0 154k | repo_description stringlengths 0 402 | repo_primary_language stringclasses 108 values | developer_username stringlengths 1 25 | developer_name stringlengths 0 30 | developer_company stringlengths 0 82 |
|---|---|---|---|---|---|---|---|---|---|
crates/swc_node_bundler/tests/pass/deno-001/full/input/async/deferred.ts | TypeScript | // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
// TODO(ry) It'd be better to make Deferred a class that inherits from
// Promise, rather than an interface. This is possible in ES2016, however
// typescript produces broken code when targeting ES5 code.
// See https://github.com/Microsoft/TypeScript/issues/15202
// At the time of writing, the github issue is closed but the problem remains.
export interface Deferred<T> extends Promise<T> {
resolve: (value?: T | PromiseLike<T>) => void;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
reject: (reason?: any) => void;
}
/** Creates a Promise with the `reject` and `resolve` functions
* placed as methods on the promise object itself. It allows you to do:
*
* const p = deferred<number>();
* // ...
* p.resolve(42);
*/
export function deferred<T>(): Deferred<T> {
let methods;
const promise = new Promise<T>((resolve, reject): void => {
methods = { resolve, reject };
});
return Object.assign(promise, methods) as Deferred<T>;
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/full/input/async/delay.ts | TypeScript | // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
/* Resolves after the given number of milliseconds. */
export function delay(ms: number): Promise<void> {
return new Promise((res): number =>
setTimeout((): void => {
res();
}, ms)
);
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/full/input/async/mod.ts | TypeScript | // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
export * from "./deferred";
export * from "./delay";
export * from "./mux_async_iterator";
export * from "./pool";
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/full/input/async/mux_async_iterator.ts | TypeScript | // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
import { Deferred, deferred } from "./deferred.ts";
interface TaggedYieldedValue<T> {
iterator: AsyncIterableIterator<T>;
value: T;
}
/** The MuxAsyncIterator class multiplexes multiple async iterators into a
* single stream. It currently makes an assumption:
* - The final result (the value returned and not yielded from the iterator)
* does not matter; if there is any, it is discarded.
*/
export class MuxAsyncIterator<T> implements AsyncIterable<T> {
private iteratorCount = 0;
private yields: Array<TaggedYieldedValue<T>> = [];
// eslint-disable-next-line @typescript-eslint/no-explicit-any
private throws: any[] = [];
private signal: Deferred<void> = deferred();
add(iterator: AsyncIterableIterator<T>): void {
++this.iteratorCount;
this.callIteratorNext(iterator);
}
private async callIteratorNext(
iterator: AsyncIterableIterator<T>
): Promise<void> {
try {
const { value, done } = await iterator.next();
if (done) {
--this.iteratorCount;
} else {
this.yields.push({ iterator, value });
}
} catch (e) {
this.throws.push(e);
}
this.signal.resolve();
}
async *iterate(): AsyncIterableIterator<T> {
while (this.iteratorCount > 0) {
// Sleep until any of the wrapped iterators yields.
await this.signal;
// Note that while we're looping over `yields`, new items may be added.
for (let i = 0; i < this.yields.length; i++) {
const { iterator, value } = this.yields[i];
yield value;
this.callIteratorNext(iterator);
}
if (this.throws.length) {
for (const e of this.throws) {
throw e;
}
this.throws.length = 0;
}
// Clear the `yields` list and reset the `signal` promise.
this.yields.length = 0;
this.signal = deferred();
}
}
[Symbol.asyncIterator](): AsyncIterableIterator<T> {
return this.iterate();
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/full/input/async/pool.ts | TypeScript | // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
/**
* pooledMap transforms values from an (async) iterable into another async
* iterable. The transforms are done concurrently, with a max concurrency
* defined by the poolLimit.
*
* @param poolLimit The maximum count of items being processed concurrently.
* @param array The input array for mapping.
* @param iteratorFn The function to call for every item of the array.
*/
export function pooledMap<T, R>(
poolLimit: number,
array: Iterable<T> | AsyncIterable<T>,
iteratorFn: (data: T) => Promise<R>
): AsyncIterableIterator<R> {
// Create the async iterable that is returned from this function.
const res = new TransformStream<Promise<R>, R>({
async transform(
p: Promise<R>,
controller: TransformStreamDefaultController<R>
): Promise<void> {
controller.enqueue(await p);
},
});
// Start processing items from the iterator
(async (): Promise<void> => {
const writer = res.writable.getWriter();
const executing: Array<Promise<unknown>> = [];
for await (const item of array) {
const p = Promise.resolve().then(() => iteratorFn(item));
writer.write(p);
const e: Promise<unknown> = p.then(() =>
executing.splice(executing.indexOf(e), 1)
);
executing.push(e);
if (executing.length >= poolLimit) {
await Promise.race(executing);
}
}
// Wait until all ongoing events have processed, then close the writer.
await Promise.all(executing);
writer.close();
})();
return res.readable.getIterator();
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/full/input/bytes/mod.ts | TypeScript | // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
/** Find first index of binary pattern from a. If not found, then return -1
* @param source source array
* @param pat pattern to find in source array
*/
export function findIndex(source: Uint8Array, pat: Uint8Array): number {
const s = pat[0];
for (let i = 0; i < source.length; i++) {
if (source[i] !== s) continue;
const pin = i;
let matched = 1;
let j = i;
while (matched < pat.length) {
j++;
if (source[j] !== pat[j - pin]) {
break;
}
matched++;
}
if (matched === pat.length) {
return pin;
}
}
return -1;
}
/** Find last index of binary pattern from a. If not found, then return -1.
* @param source source array
* @param pat pattern to find in source array
*/
export function findLastIndex(source: Uint8Array, pat: Uint8Array): number {
const e = pat[pat.length - 1];
for (let i = source.length - 1; i >= 0; i--) {
if (source[i] !== e) continue;
const pin = i;
let matched = 1;
let j = i;
while (matched < pat.length) {
j--;
if (source[j] !== pat[pat.length - 1 - (pin - j)]) {
break;
}
matched++;
}
if (matched === pat.length) {
return pin - pat.length + 1;
}
}
return -1;
}
/** Check whether binary arrays are equal to each other.
* @param source first array to check equality
* @param match second array to check equality
*/
export function equal(source: Uint8Array, match: Uint8Array): boolean {
if (source.length !== match.length) return false;
for (let i = 0; i < match.length; i++) {
if (source[i] !== match[i]) return false;
}
return true;
}
/** Check whether binary array starts with prefix.
* @param source srouce array
* @param prefix prefix array to check in source
*/
export function hasPrefix(source: Uint8Array, prefix: Uint8Array): boolean {
for (let i = 0, max = prefix.length; i < max; i++) {
if (source[i] !== prefix[i]) return false;
}
return true;
}
/** Check whether binary array ends with suffix.
* @param source source array
* @param suffix suffix array to check in source
*/
export function hasSuffix(source: Uint8Array, suffix: Uint8Array): boolean {
for (
let srci = source.length - 1, sfxi = suffix.length - 1;
sfxi >= 0;
srci--, sfxi--
) {
if (source[srci] !== suffix[sfxi]) return false;
}
return true;
}
/** Repeat bytes. returns a new byte slice consisting of `count` copies of `b`.
* @param origin The origin bytes
* @param count The count you want to repeat.
*/
export function repeat(origin: Uint8Array, count: number): Uint8Array {
if (count === 0) {
return new Uint8Array();
}
if (count < 0) {
throw new Error("bytes: negative repeat count");
} else if ((origin.length * count) / count !== origin.length) {
throw new Error("bytes: repeat count causes overflow");
}
const int = Math.floor(count);
if (int !== count) {
throw new Error("bytes: repeat count must be an integer");
}
const nb = new Uint8Array(origin.length * count);
let bp = copyBytes(origin, nb);
for (; bp < nb.length; bp *= 2) {
copyBytes(nb.slice(0, bp), nb, bp);
}
return nb;
}
/** Concatenate two binary arrays and return new one.
* @param origin origin array to concatenate
* @param b array to concatenate with origin
*/
export function concat(origin: Uint8Array, b: Uint8Array): Uint8Array {
const output = new Uint8Array(origin.length + b.length);
output.set(origin, 0);
output.set(b, origin.length);
return output;
}
/** Check source array contains pattern array.
* @param source source array
* @param pat patter array
*/
export function contains(source: Uint8Array, pat: Uint8Array): boolean {
return findIndex(source, pat) != -1;
}
/**
* Copy bytes from one Uint8Array to another. Bytes from `src` which don't fit
* into `dst` will not be copied.
*
* @param src Source byte array
* @param dst Destination byte array
* @param off Offset into `dst` at which to begin writing values from `src`.
* @return number of bytes copied
*/
export function copyBytes(src: Uint8Array, dst: Uint8Array, off = 0): number {
off = Math.max(0, Math.min(off, dst.byteLength));
const dstBytesAvailable = dst.byteLength - off;
if (src.byteLength > dstBytesAvailable) {
src = src.subarray(0, dstBytesAvailable);
}
dst.set(src, off);
return src.byteLength;
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/full/input/encoding/utf8.ts | TypeScript | /** A default TextEncoder instance */
export const encoder = new TextEncoder();
/** Shorthand for new TextEncoder().encode() */
export function encode(input?: string): Uint8Array {
return encoder.encode(input);
}
/** A default TextDecoder instance */
export const decoder = new TextDecoder();
/** Shorthand for new TextDecoder().decode() */
export function decode(input?: Uint8Array): string {
return decoder.decode(input);
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/full/input/entry.js | JavaScript | import { listenAndServe } from "./http/server";
listenAndServe({ port: 8080 }, async (req) => {});
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/full/input/http/_io.ts | TypeScript | import { BufReader, BufWriter } from "../io/bufio";
import { TextProtoReader } from "../textproto/mod";
import { assert } from "../_util/assert";
import { encoder } from "../encoding/utf8";
import { ServerRequest, Response } from "./server";
import { STATUS_TEXT } from "./http_status";
export function emptyReader(): Deno.Reader {
return {
read(_: Uint8Array): Promise<number | null> {
return Promise.resolve(null);
},
};
}
export function bodyReader(contentLength: number, r: BufReader): Deno.Reader {
let totalRead = 0;
let finished = false;
async function read(buf: Uint8Array): Promise<number | null> {
if (finished) return null;
let result: number | null;
const remaining = contentLength - totalRead;
if (remaining >= buf.byteLength) {
result = await r.read(buf);
} else {
const readBuf = buf.subarray(0, remaining);
result = await r.read(readBuf);
}
if (result !== null) {
totalRead += result;
}
finished = totalRead === contentLength;
return result;
}
return { read };
}
export function chunkedBodyReader(h: Headers, r: BufReader): Deno.Reader {
// Based on https://tools.ietf.org/html/rfc2616#section-19.4.6
const tp = new TextProtoReader(r);
let finished = false;
const chunks: Array<{
offset: number;
data: Uint8Array;
}> = [];
async function read(buf: Uint8Array): Promise<number | null> {
if (finished) return null;
const [chunk] = chunks;
if (chunk) {
const chunkRemaining = chunk.data.byteLength - chunk.offset;
const readLength = Math.min(chunkRemaining, buf.byteLength);
for (let i = 0; i < readLength; i++) {
buf[i] = chunk.data[chunk.offset + i];
}
chunk.offset += readLength;
if (chunk.offset === chunk.data.byteLength) {
chunks.shift();
// Consume \r\n;
if ((await tp.readLine()) === null) {
throw new Deno.errors.UnexpectedEof();
}
}
return readLength;
}
const line = await tp.readLine();
if (line === null) throw new Deno.errors.UnexpectedEof();
// TODO: handle chunk extension
const [chunkSizeString] = line.split(";");
const chunkSize = parseInt(chunkSizeString, 16);
if (Number.isNaN(chunkSize) || chunkSize < 0) {
throw new Error("Invalid chunk size");
}
if (chunkSize > 0) {
if (chunkSize > buf.byteLength) {
let eof = await r.readFull(buf);
if (eof === null) {
throw new Deno.errors.UnexpectedEof();
}
const restChunk = new Uint8Array(chunkSize - buf.byteLength);
eof = await r.readFull(restChunk);
if (eof === null) {
throw new Deno.errors.UnexpectedEof();
} else {
chunks.push({
offset: 0,
data: restChunk,
});
}
return buf.byteLength;
} else {
const bufToFill = buf.subarray(0, chunkSize);
const eof = await r.readFull(bufToFill);
if (eof === null) {
throw new Deno.errors.UnexpectedEof();
}
// Consume \r\n
if ((await tp.readLine()) === null) {
throw new Deno.errors.UnexpectedEof();
}
return chunkSize;
}
} else {
assert(chunkSize === 0);
// Consume \r\n
if ((await r.readLine()) === null) {
throw new Deno.errors.UnexpectedEof();
}
await readTrailers(h, r);
finished = true;
return null;
}
}
return { read };
}
function isProhibidedForTrailer(key: string): boolean {
const s = new Set(["transfer-encoding", "content-length", "trailer"]);
return s.has(key.toLowerCase());
}
/** Read trailer headers from reader and append values to headers. "trailer"
* field will be deleted. */
export async function readTrailers(
headers: Headers,
r: BufReader
): Promise<void> {
const trailers = parseTrailer(headers.get("trailer"));
if (trailers == null) return;
const trailerNames = [...trailers.keys()];
const tp = new TextProtoReader(r);
const result = await tp.readMIMEHeader();
if (result == null) {
throw new Deno.errors.InvalidData("Missing trailer header.");
}
const undeclared = [...result.keys()].filter(
(k) => !trailerNames.includes(k)
);
if (undeclared.length > 0) {
throw new Deno.errors.InvalidData(
`Undeclared trailers: ${Deno.inspect(undeclared)}.`
);
}
for (const [k, v] of result) {
headers.append(k, v);
}
const missingTrailers = trailerNames.filter((k) => !result.has(k));
if (missingTrailers.length > 0) {
throw new Deno.errors.InvalidData(
`Missing trailers: ${Deno.inspect(missingTrailers)}.`
);
}
headers.delete("trailer");
}
function parseTrailer(field: string | null): Headers | undefined {
if (field == null) {
return undefined;
}
const trailerNames = field.split(",").map((v) => v.trim().toLowerCase());
if (trailerNames.length === 0) {
throw new Deno.errors.InvalidData("Empty trailer header.");
}
const prohibited = trailerNames.filter((k) => isProhibidedForTrailer(k));
if (prohibited.length > 0) {
throw new Deno.errors.InvalidData(
`Prohibited trailer names: ${Deno.inspect(prohibited)}.`
);
}
return new Headers(trailerNames.map((key) => [key, ""]));
}
export async function writeChunkedBody(
w: Deno.Writer,
r: Deno.Reader
): Promise<void> {
const writer = BufWriter.create(w);
for await (const chunk of Deno.iter(r)) {
if (chunk.byteLength <= 0) continue;
const start = encoder.encode(`${chunk.byteLength.toString(16)}\r\n`);
const end = encoder.encode("\r\n");
await writer.write(start);
await writer.write(chunk);
await writer.write(end);
}
const endChunk = encoder.encode("0\r\n\r\n");
await writer.write(endChunk);
}
/** Write trailer headers to writer. It should mostly should be called after
* `writeResponse()`. */
export async function writeTrailers(
w: Deno.Writer,
headers: Headers,
trailers: Headers
): Promise<void> {
const trailer = headers.get("trailer");
if (trailer === null) {
throw new TypeError("Missing trailer header.");
}
const transferEncoding = headers.get("transfer-encoding");
if (transferEncoding === null || !transferEncoding.match(/^chunked/)) {
throw new TypeError(
`Trailers are only allowed for "transfer-encoding: chunked", got "transfer-encoding: ${transferEncoding}".`
);
}
const writer = BufWriter.create(w);
const trailerNames = trailer.split(",").map((s) => s.trim().toLowerCase());
const prohibitedTrailers = trailerNames.filter((k) =>
isProhibidedForTrailer(k)
);
if (prohibitedTrailers.length > 0) {
throw new TypeError(
`Prohibited trailer names: ${Deno.inspect(prohibitedTrailers)}.`
);
}
const undeclared = [...trailers.keys()].filter(
(k) => !trailerNames.includes(k)
);
if (undeclared.length > 0) {
throw new TypeError(
`Undeclared trailers: ${Deno.inspect(undeclared)}.`
);
}
for (const [key, value] of trailers) {
await writer.write(encoder.encode(`${key}: ${value}\r\n`));
}
await writer.write(encoder.encode("\r\n"));
await writer.flush();
}
export async function writeResponse(
w: Deno.Writer,
r: Response
): Promise<void> {
const protoMajor = 1;
const protoMinor = 1;
const statusCode = r.status || 200;
const statusText = STATUS_TEXT.get(statusCode);
const writer = BufWriter.create(w);
if (!statusText) {
throw new Deno.errors.InvalidData("Bad status code");
}
if (!r.body) {
r.body = new Uint8Array();
}
if (typeof r.body === "string") {
r.body = encoder.encode(r.body);
}
let out = `HTTP/${protoMajor}.${protoMinor} ${statusCode} ${statusText}\r\n`;
const headers = r.headers ?? new Headers();
if (r.body && !headers.get("content-length")) {
if (r.body instanceof Uint8Array) {
out += `content-length: ${r.body.byteLength}\r\n`;
} else if (!headers.get("transfer-encoding")) {
out += "transfer-encoding: chunked\r\n";
}
}
for (const [key, value] of headers) {
out += `${key}: ${value}\r\n`;
}
out += `\r\n`;
const header = encoder.encode(out);
const n = await writer.write(header);
assert(n === header.byteLength);
if (r.body instanceof Uint8Array) {
const n = await writer.write(r.body);
assert(n === r.body.byteLength);
} else if (headers.has("content-length")) {
const contentLength = headers.get("content-length");
assert(contentLength != null);
const bodyLength = parseInt(contentLength);
const n = await Deno.copy(r.body, writer);
assert(n === bodyLength);
} else {
await writeChunkedBody(writer, r.body);
}
if (r.trailers) {
const t = await r.trailers();
await writeTrailers(writer, headers, t);
}
await writer.flush();
}
/**
* ParseHTTPVersion parses a HTTP version string.
* "HTTP/1.0" returns (1, 0).
* Ported from https://github.com/golang/go/blob/f5c43b9/src/net/http/request.go#L766-L792
*/
export function parseHTTPVersion(vers: string): [number, number] {
switch (vers) {
case "HTTP/1.1":
return [1, 1];
case "HTTP/1.0":
return [1, 0];
default: {
const Big = 1000000; // arbitrary upper bound
if (!vers.startsWith("HTTP/")) {
break;
}
const dot = vers.indexOf(".");
if (dot < 0) {
break;
}
const majorStr = vers.substring(vers.indexOf("/") + 1, dot);
const major = Number(majorStr);
if (!Number.isInteger(major) || major < 0 || major > Big) {
break;
}
const minorStr = vers.substring(dot + 1);
const minor = Number(minorStr);
if (!Number.isInteger(minor) || minor < 0 || minor > Big) {
break;
}
return [major, minor];
}
}
throw new Error(`malformed HTTP version ${vers}`);
}
export async function readRequest(
conn: Deno.Conn,
bufr: BufReader
): Promise<ServerRequest | null> {
const tp = new TextProtoReader(bufr);
const firstLine = await tp.readLine(); // e.g. GET /index.html HTTP/1.0
if (firstLine === null) return null;
const headers = await tp.readMIMEHeader();
if (headers === null) throw new Deno.errors.UnexpectedEof();
const req = new ServerRequest();
req.conn = conn;
req.r = bufr;
[req.method, req.url, req.proto] = firstLine.split(" ", 3);
[req.protoMinor, req.protoMajor] = parseHTTPVersion(req.proto);
req.headers = headers;
fixLength(req);
return req;
}
function fixLength(req: ServerRequest): void {
const contentLength = req.headers.get("Content-Length");
if (contentLength) {
const arrClen = contentLength.split(",");
if (arrClen.length > 1) {
const distinct = [...new Set(arrClen.map((e): string => e.trim()))];
if (distinct.length > 1) {
throw Error("cannot contain multiple Content-Length headers");
} else {
req.headers.set("Content-Length", distinct[0]);
}
}
const c = req.headers.get("Content-Length");
if (req.method === "HEAD" && c && c !== "0") {
throw Error("http: method cannot contain a Content-Length");
}
if (c && req.headers.has("transfer-encoding")) {
// A sender MUST NOT send a Content-Length header field in any message
// that contains a Transfer-Encoding header field.
// rfc: https://tools.ietf.org/html/rfc7230#section-3.3.2
throw new Error(
"http: Transfer-Encoding and Content-Length cannot be send together"
);
}
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/full/input/http/http_status.ts | TypeScript | // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
/** HTTP status codes */
export enum Status {}
export const STATUS_TEXT = new Map<Status, string>([]);
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/full/input/http/server.ts | TypeScript | // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
import { encode } from "../encoding/utf8";
import { BufReader, BufWriter } from "../io/bufio";
import { assert } from "../_util/assert";
import { deferred, Deferred, MuxAsyncIterator } from "../async/mod";
import {
bodyReader,
chunkedBodyReader,
emptyReader,
writeResponse,
readRequest,
} from "./_io";
export class ServerRequest {
url!: string;
method!: string;
proto!: string;
protoMinor!: number;
protoMajor!: number;
headers!: Headers;
conn!: Deno.Conn;
r!: BufReader;
w!: BufWriter;
done: Deferred<Error | undefined> = deferred();
private _contentLength: number | undefined | null = undefined;
/**
* Value of Content-Length header.
* If null, then content length is invalid or not given (e.g. chunked encoding).
*/
get contentLength(): number | null {
// undefined means not cached.
// null means invalid or not provided.
if (this._contentLength === undefined) {
const cl = this.headers.get("content-length");
if (cl) {
this._contentLength = parseInt(cl);
// Convert NaN to null (as NaN harder to test)
if (Number.isNaN(this._contentLength)) {
this._contentLength = null;
}
} else {
this._contentLength = null;
}
}
return this._contentLength;
}
private _body: Deno.Reader | null = null;
/**
* Body of the request. The easiest way to consume the body is:
*
* const buf: Uint8Array = await Deno.readAll(req.body);
*/
get body(): Deno.Reader {
if (!this._body) {
if (this.contentLength != null) {
this._body = bodyReader(this.contentLength, this.r);
} else {
const transferEncoding = this.headers.get("transfer-encoding");
if (transferEncoding != null) {
const parts = transferEncoding
.split(",")
.map((e): string => e.trim().toLowerCase());
assert(
parts.includes("chunked"),
'transfer-encoding must include "chunked" if content-length is not set'
);
this._body = chunkedBodyReader(this.headers, this.r);
} else {
// Neither content-length nor transfer-encoding: chunked
this._body = emptyReader();
}
}
}
return this._body;
}
async respond(r: Response): Promise<void> {
let err: Error | undefined;
try {
// Write our response!
await writeResponse(this.w, r);
} catch (e) {
try {
// Eagerly close on error.
this.conn.close();
} catch {
// Pass
}
err = e;
}
// Signal that this request has been processed and the next pipelined
// request on the same connection can be accepted.
this.done.resolve(err);
if (err) {
// Error during responding, rethrow.
throw err;
}
}
private finalized = false;
async finalize(): Promise<void> {
if (this.finalized) return;
// Consume unread body
const body = this.body;
const buf = new Uint8Array(1024);
while ((await body.read(buf)) !== null) {
// Pass
}
this.finalized = true;
}
}
export class Server implements AsyncIterable<ServerRequest> {
private closing = false;
private connections: Deno.Conn[] = [];
constructor(public listener: Deno.Listener) {}
close(): void {
this.closing = true;
this.listener.close();
for (const conn of this.connections) {
try {
conn.close();
} catch (e) {
// Connection might have been already closed
if (!(e instanceof Deno.errors.BadResource)) {
throw e;
}
}
}
}
// Yields all HTTP requests on a single TCP connection.
private async *iterateHttpRequests(
conn: Deno.Conn
): AsyncIterableIterator<ServerRequest> {
const reader = new BufReader(conn);
const writer = new BufWriter(conn);
while (!this.closing) {
let request: ServerRequest | null;
try {
request = await readRequest(conn, reader);
} catch (error) {
if (
error instanceof Deno.errors.InvalidData ||
error instanceof Deno.errors.UnexpectedEof
) {
// An error was thrown while parsing request headers.
await writeResponse(writer, {
status: 400,
body: encode(`${error.message}\r\n\r\n`),
});
}
break;
}
if (request === null) {
break;
}
request.w = writer;
yield request;
// Wait for the request to be processed before we accept a new request on
// this connection.
const responseError = await request.done;
if (responseError) {
// Something bad happened during response.
// (likely other side closed during pipelined req)
// req.done implies this connection already closed, so we can just return.
this.untrackConnection(request.conn);
return;
}
// Consume unread body and trailers if receiver didn't consume those data
await request.finalize();
}
this.untrackConnection(conn);
try {
conn.close();
} catch (e) {
// might have been already closed
}
}
private trackConnection(conn: Deno.Conn): void {
this.connections.push(conn);
}
private untrackConnection(conn: Deno.Conn): void {
const index = this.connections.indexOf(conn);
if (index !== -1) {
this.connections.splice(index, 1);
}
}
// Accepts a new TCP connection and yields all HTTP requests that arrive on
// it. When a connection is accepted, it also creates a new iterator of the
// same kind and adds it to the request multiplexer so that another TCP
// connection can be accepted.
private async *acceptConnAndIterateHttpRequests(
mux: MuxAsyncIterator<ServerRequest>
): AsyncIterableIterator<ServerRequest> {
if (this.closing) return;
// Wait for a new connection.
let conn: Deno.Conn;
try {
conn = await this.listener.accept();
} catch (error) {
if (
error instanceof Deno.errors.BadResource ||
error instanceof Deno.errors.InvalidData ||
error instanceof Deno.errors.UnexpectedEof
) {
return mux.add(this.acceptConnAndIterateHttpRequests(mux));
}
throw error;
}
this.trackConnection(conn);
// Try to accept another connection and add it to the multiplexer.
mux.add(this.acceptConnAndIterateHttpRequests(mux));
// Yield the requests that arrive on the just-accepted connection.
yield* this.iterateHttpRequests(conn);
}
[Symbol.asyncIterator](): AsyncIterableIterator<ServerRequest> {
const mux: MuxAsyncIterator<ServerRequest> = new MuxAsyncIterator();
mux.add(this.acceptConnAndIterateHttpRequests(mux));
return mux.iterate();
}
}
/** Options for creating an HTTP server. */
export type HTTPOptions = Omit<Deno.ListenOptions, "transport">;
/**
* Parse addr from string
*
* const addr = "::1:8000";
* parseAddrFromString(addr);
*
* @param addr Address string
*/
export function _parseAddrFromStr(addr: string): HTTPOptions {
let url: URL;
try {
const host = addr.startsWith(":") ? `0.0.0.0${addr}` : addr;
url = new URL(`http://${host}`);
} catch {
throw new TypeError("Invalid address.");
}
if (
url.username ||
url.password ||
url.pathname != "/" ||
url.search ||
url.hash
) {
throw new TypeError("Invalid address.");
}
return {
hostname: url.hostname,
port: url.port === "" ? 80 : Number(url.port),
};
}
/**
* Create a HTTP server
*
* import { serve } from "https://deno.land/std/http/server.ts";
* const body = "Hello World\n";
* const server = serve({ port: 8000 });
* for await (const req of server) {
* req.respond({ body });
* }
*/
export function serve(addr: string | HTTPOptions): Server {
if (typeof addr === "string") {
addr = _parseAddrFromStr(addr);
}
const listener = Deno.listen(addr);
return new Server(listener);
}
/**
* Start an HTTP server with given options and request handler
*
* const body = "Hello World\n";
* const options = { port: 8000 };
* listenAndServe(options, (req) => {
* req.respond({ body });
* });
*
* @param options Server configuration
* @param handler Request handler
*/
export async function listenAndServe(
addr: string | HTTPOptions,
handler: (req: ServerRequest) => void
): Promise<void> {
const server = serve(addr);
for await (const request of server) {
handler(request);
}
}
/** Options for creating an HTTPS server. */
export type HTTPSOptions = Omit<Deno.ListenTlsOptions, "transport">;
/**
* Create an HTTPS server with given options
*
* const body = "Hello HTTPS";
* const options = {
* hostname: "localhost",
* port: 443,
* certFile: "./path/to/localhost.crt",
* keyFile: "./path/to/localhost.key",
* };
* for await (const req of serveTLS(options)) {
* req.respond({ body });
* }
*
* @param options Server configuration
* @return Async iterable server instance for incoming requests
*/
export function serveTLS(options: HTTPSOptions): Server {
const tlsOptions: Deno.ListenTlsOptions = {
...options,
transport: "tcp",
};
const listener = Deno.listenTls(tlsOptions);
return new Server(listener);
}
/**
* Start an HTTPS server with given options and request handler
*
* const body = "Hello HTTPS";
* const options = {
* hostname: "localhost",
* port: 443,
* certFile: "./path/to/localhost.crt",
* keyFile: "./path/to/localhost.key",
* };
* listenAndServeTLS(options, (req) => {
* req.respond({ body });
* });
*
* @param options Server configuration
* @param handler Request handler
*/
export async function listenAndServeTLS(
options: HTTPSOptions,
handler: (req: ServerRequest) => void
): Promise<void> {
const server = serveTLS(options);
for await (const request of server) {
handler(request);
}
}
/**
* Interface of HTTP server response.
* If body is a Reader, response would be chunked.
* If body is a string, it would be UTF-8 encoded by default.
*/
export interface Response {
status?: number;
headers?: Headers;
body?: Uint8Array | Deno.Reader | string;
trailers?: () => Promise<Headers> | Headers;
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/full/input/io/bufio.ts | TypeScript | // Based on https://github.com/golang/go/blob/891682/src/bufio/bufio.go
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
type Reader = Deno.Reader;
type Writer = Deno.Writer;
type WriterSync = Deno.WriterSync;
import { copyBytes } from "../bytes/mod";
import { assert } from "../_util/assert";
const DEFAULT_BUF_SIZE = 4096;
const MIN_BUF_SIZE = 16;
const MAX_CONSECUTIVE_EMPTY_READS = 100;
const CR = "\r".charCodeAt(0);
const LF = "\n".charCodeAt(0);
export class BufferFullError extends Error {
name = "BufferFullError";
constructor(public partial: Uint8Array) {
super("Buffer full");
}
}
export class PartialReadError extends Deno.errors.UnexpectedEof {
name = "PartialReadError";
partial?: Uint8Array;
constructor() {
super("Encountered UnexpectedEof, data only partially read");
}
}
/** Result type returned by of BufReader.readLine(). */
export interface ReadLineResult {
line: Uint8Array;
more: boolean;
}
/** BufReader implements buffering for a Reader object. */
export class BufReader implements Reader {
private buf!: Uint8Array;
private rd!: Reader; // Reader provided by caller.
private r = 0; // buf read position.
private w = 0; // buf write position.
private eof = false;
// private lastByte: number;
// private lastCharSize: number;
/** return new BufReader unless r is BufReader */
static create(r: Reader, size: number = DEFAULT_BUF_SIZE): BufReader {
return r instanceof BufReader ? r : new BufReader(r, size);
}
constructor(rd: Reader, size: number = DEFAULT_BUF_SIZE) {
if (size < MIN_BUF_SIZE) {
size = MIN_BUF_SIZE;
}
this._reset(new Uint8Array(size), rd);
}
/** Returns the size of the underlying buffer in bytes. */
size(): number {
return this.buf.byteLength;
}
buffered(): number {
return this.w - this.r;
}
// Reads a new chunk into the buffer.
private async _fill(): Promise<void> {
// Slide existing data to beginning.
if (this.r > 0) {
this.buf.copyWithin(0, this.r, this.w);
this.w -= this.r;
this.r = 0;
}
if (this.w >= this.buf.byteLength) {
throw Error("bufio: tried to fill full buffer");
}
// Read new data: try a limited number of times.
for (let i = MAX_CONSECUTIVE_EMPTY_READS; i > 0; i--) {
const rr = await this.rd.read(this.buf.subarray(this.w));
if (rr === null) {
this.eof = true;
return;
}
assert(rr >= 0, "negative read");
this.w += rr;
if (rr > 0) {
return;
}
}
throw new Error(
`No progress after ${MAX_CONSECUTIVE_EMPTY_READS} read() calls`
);
}
/** Discards any buffered data, resets all state, and switches
* the buffered reader to read from r.
*/
reset(r: Reader): void {
this._reset(this.buf, r);
}
private _reset(buf: Uint8Array, rd: Reader): void {
this.buf = buf;
this.rd = rd;
this.eof = false;
// this.lastByte = -1;
// this.lastCharSize = -1;
}
/** reads data into p.
* It returns the number of bytes read into p.
* The bytes are taken from at most one Read on the underlying Reader,
* hence n may be less than len(p).
* To read exactly len(p) bytes, use io.ReadFull(b, p).
*/
async read(p: Uint8Array): Promise<number | null> {
let rr: number | null = p.byteLength;
if (p.byteLength === 0) return rr;
if (this.r === this.w) {
if (p.byteLength >= this.buf.byteLength) {
// Large read, empty buffer.
// Read directly into p to avoid copy.
const rr = await this.rd.read(p);
const nread = rr ?? 0;
assert(nread >= 0, "negative read");
// if (rr.nread > 0) {
// this.lastByte = p[rr.nread - 1];
// this.lastCharSize = -1;
// }
return rr;
}
// One read.
// Do not use this.fill, which will loop.
this.r = 0;
this.w = 0;
rr = await this.rd.read(this.buf);
if (rr === 0 || rr === null) return rr;
assert(rr >= 0, "negative read");
this.w += rr;
}
// copy as much as we can
const copied = copyBytes(this.buf.subarray(this.r, this.w), p, 0);
this.r += copied;
// this.lastByte = this.buf[this.r - 1];
// this.lastCharSize = -1;
return copied;
}
/** reads exactly `p.length` bytes into `p`.
*
* If successful, `p` is returned.
*
* If the end of the underlying stream has been reached, and there are no more
* bytes available in the buffer, `readFull()` returns `null` instead.
*
* An error is thrown if some bytes could be read, but not enough to fill `p`
* entirely before the underlying stream reported an error or EOF. Any error
* thrown will have a `partial` property that indicates the slice of the
* buffer that has been successfully filled with data.
*
* Ported from https://golang.org/pkg/io/#ReadFull
*/
async readFull(p: Uint8Array): Promise<Uint8Array | null> {
let bytesRead = 0;
while (bytesRead < p.length) {
try {
const rr = await this.read(p.subarray(bytesRead));
if (rr === null) {
if (bytesRead === 0) {
return null;
} else {
throw new PartialReadError();
}
}
bytesRead += rr;
} catch (err) {
err.partial = p.subarray(0, bytesRead);
throw err;
}
}
return p;
}
/** Returns the next byte [0, 255] or `null`. */
async readByte(): Promise<number | null> {
while (this.r === this.w) {
if (this.eof) return null;
await this._fill(); // buffer is empty.
}
const c = this.buf[this.r];
this.r++;
// this.lastByte = c;
return c;
}
/** readString() reads until the first occurrence of delim in the input,
* returning a string containing the data up to and including the delimiter.
* If ReadString encounters an error before finding a delimiter,
* it returns the data read before the error and the error itself
* (often `null`).
* ReadString returns err != nil if and only if the returned data does not end
* in delim.
* For simple uses, a Scanner may be more convenient.
*/
async readString(delim: string): Promise<string | null> {
if (delim.length !== 1) {
throw new Error("Delimiter should be a single character");
}
const buffer = await this.readSlice(delim.charCodeAt(0));
if (buffer === null) return null;
return new TextDecoder().decode(buffer);
}
/** `readLine()` is a low-level line-reading primitive. Most callers should
* use `readString('\n')` instead or use a Scanner.
*
* `readLine()` tries to return a single line, not including the end-of-line
* bytes. If the line was too long for the buffer then `more` is set and the
* beginning of the line is returned. The rest of the line will be returned
* from future calls. `more` will be false when returning the last fragment
* of the line. The returned buffer is only valid until the next call to
* `readLine()`.
*
* The text returned from ReadLine does not include the line end ("\r\n" or
* "\n").
*
* When the end of the underlying stream is reached, the final bytes in the
* stream are returned. No indication or error is given if the input ends
* without a final line end. When there are no more trailing bytes to read,
* `readLine()` returns `null`.
*
* Calling `unreadByte()` after `readLine()` will always unread the last byte
* read (possibly a character belonging to the line end) even if that byte is
* not part of the line returned by `readLine()`.
*/
async readLine(): Promise<ReadLineResult | null> {
let line: Uint8Array | null;
try {
line = await this.readSlice(LF);
} catch (err) {
let { partial } = err;
assert(
partial instanceof Uint8Array,
"bufio: caught error from `readSlice()` without `partial` property"
);
// Don't throw if `readSlice()` failed with `BufferFullError`, instead we
// just return whatever is available and set the `more` flag.
if (!(err instanceof BufferFullError)) {
throw err;
}
// Handle the case where "\r\n" straddles the buffer.
if (
!this.eof &&
partial.byteLength > 0 &&
partial[partial.byteLength - 1] === CR
) {
// Put the '\r' back on buf and drop it from line.
// Let the next call to ReadLine check for "\r\n".
assert(
this.r > 0,
"bufio: tried to rewind past start of buffer"
);
this.r--;
partial = partial.subarray(0, partial.byteLength - 1);
}
return { line: partial, more: !this.eof };
}
if (line === null) {
return null;
}
if (line.byteLength === 0) {
return { line, more: false };
}
if (line[line.byteLength - 1] == LF) {
let drop = 1;
if (line.byteLength > 1 && line[line.byteLength - 2] === CR) {
drop = 2;
}
line = line.subarray(0, line.byteLength - drop);
}
return { line, more: false };
}
/** `readSlice()` reads until the first occurrence of `delim` in the input,
* returning a slice pointing at the bytes in the buffer. The bytes stop
* being valid at the next read.
*
* If `readSlice()` encounters an error before finding a delimiter, or the
* buffer fills without finding a delimiter, it throws an error with a
* `partial` property that contains the entire buffer.
*
* If `readSlice()` encounters the end of the underlying stream and there are
* any bytes left in the buffer, the rest of the buffer is returned. In other
* words, EOF is always treated as a delimiter. Once the buffer is empty,
* it returns `null`.
*
* Because the data returned from `readSlice()` will be overwritten by the
* next I/O operation, most clients should use `readString()` instead.
*/
async readSlice(delim: number): Promise<Uint8Array | null> {
let s = 0; // search start index
let slice: Uint8Array | undefined;
while (true) {
// Search buffer.
let i = this.buf.subarray(this.r + s, this.w).indexOf(delim);
if (i >= 0) {
i += s;
slice = this.buf.subarray(this.r, this.r + i + 1);
this.r += i + 1;
break;
}
// EOF?
if (this.eof) {
if (this.r === this.w) {
return null;
}
slice = this.buf.subarray(this.r, this.w);
this.r = this.w;
break;
}
// Buffer full?
if (this.buffered() >= this.buf.byteLength) {
this.r = this.w;
// #4521 The internal buffer should not be reused across reads because it causes corruption of data.
const oldbuf = this.buf;
const newbuf = this.buf.slice(0);
this.buf = newbuf;
throw new BufferFullError(oldbuf);
}
s = this.w - this.r; // do not rescan area we scanned before
// Buffer is not full.
try {
await this._fill();
} catch (err) {
err.partial = slice;
throw err;
}
}
// Handle last byte, if any.
// const i = slice.byteLength - 1;
// if (i >= 0) {
// this.lastByte = slice[i];
// this.lastCharSize = -1
// }
return slice;
}
/** `peek()` returns the next `n` bytes without advancing the reader. The
* bytes stop being valid at the next read call.
*
* When the end of the underlying stream is reached, but there are unread
* bytes left in the buffer, those bytes are returned. If there are no bytes
* left in the buffer, it returns `null`.
*
* If an error is encountered before `n` bytes are available, `peek()` throws
* an error with the `partial` property set to a slice of the buffer that
* contains the bytes that were available before the error occurred.
*/
async peek(n: number): Promise<Uint8Array | null> {
if (n < 0) {
throw Error("negative count");
}
let avail = this.w - this.r;
while (avail < n && avail < this.buf.byteLength && !this.eof) {
try {
await this._fill();
} catch (err) {
err.partial = this.buf.subarray(this.r, this.w);
throw err;
}
avail = this.w - this.r;
}
if (avail === 0 && this.eof) {
return null;
} else if (avail < n && this.eof) {
return this.buf.subarray(this.r, this.r + avail);
} else if (avail < n) {
throw new BufferFullError(this.buf.subarray(this.r, this.w));
}
return this.buf.subarray(this.r, this.r + n);
}
}
abstract class AbstractBufBase {
buf!: Uint8Array;
usedBufferBytes = 0;
err: Error | null = null;
/** Size returns the size of the underlying buffer in bytes. */
size(): number {
return this.buf.byteLength;
}
/** Returns how many bytes are unused in the buffer. */
available(): number {
return this.buf.byteLength - this.usedBufferBytes;
}
/** buffered returns the number of bytes that have been written into the
* current buffer.
*/
buffered(): number {
return this.usedBufferBytes;
}
}
/** BufWriter implements buffering for an deno.Writer object.
* If an error occurs writing to a Writer, no more data will be
* accepted and all subsequent writes, and flush(), will return the error.
* After all data has been written, the client should call the
* flush() method to guarantee all data has been forwarded to
* the underlying deno.Writer.
*/
export class BufWriter extends AbstractBufBase implements Writer {
/** return new BufWriter unless writer is BufWriter */
static create(writer: Writer, size: number = DEFAULT_BUF_SIZE): BufWriter {
return writer instanceof BufWriter
? writer
: new BufWriter(writer, size);
}
constructor(private writer: Writer, size: number = DEFAULT_BUF_SIZE) {
super();
if (size <= 0) {
size = DEFAULT_BUF_SIZE;
}
this.buf = new Uint8Array(size);
}
/** Discards any unflushed buffered data, clears any error, and
* resets buffer to write its output to w.
*/
reset(w: Writer): void {
this.err = null;
this.usedBufferBytes = 0;
this.writer = w;
}
/** Flush writes any buffered data to the underlying io.Writer. */
async flush(): Promise<void> {
if (this.err !== null) throw this.err;
if (this.usedBufferBytes === 0) return;
try {
await Deno.writeAll(
this.writer,
this.buf.subarray(0, this.usedBufferBytes)
);
} catch (e) {
this.err = e;
throw e;
}
this.buf = new Uint8Array(this.buf.length);
this.usedBufferBytes = 0;
}
/** Writes the contents of `data` into the buffer. If the contents won't fully
* fit into the buffer, those bytes that can are copied into the buffer, the
* buffer is the flushed to the writer and the remaining bytes are copied into
* the now empty buffer.
*
* @return the number of bytes written to the buffer.
*/
async write(data: Uint8Array): Promise<number> {
if (this.err !== null) throw this.err;
if (data.length === 0) return 0;
let totalBytesWritten = 0;
let numBytesWritten = 0;
while (data.byteLength > this.available()) {
if (this.buffered() === 0) {
// Large write, empty buffer.
// Write directly from data to avoid copy.
try {
numBytesWritten = await this.writer.write(data);
} catch (e) {
this.err = e;
throw e;
}
} else {
numBytesWritten = copyBytes(
data,
this.buf,
this.usedBufferBytes
);
this.usedBufferBytes += numBytesWritten;
await this.flush();
}
totalBytesWritten += numBytesWritten;
data = data.subarray(numBytesWritten);
}
numBytesWritten = copyBytes(data, this.buf, this.usedBufferBytes);
this.usedBufferBytes += numBytesWritten;
totalBytesWritten += numBytesWritten;
return totalBytesWritten;
}
}
/** BufWriterSync implements buffering for a deno.WriterSync object.
* If an error occurs writing to a WriterSync, no more data will be
* accepted and all subsequent writes, and flush(), will return the error.
* After all data has been written, the client should call the
* flush() method to guarantee all data has been forwarded to
* the underlying deno.WriterSync.
*/
export class BufWriterSync extends AbstractBufBase implements WriterSync {
/** return new BufWriterSync unless writer is BufWriterSync */
static create(
writer: WriterSync,
size: number = DEFAULT_BUF_SIZE
): BufWriterSync {
return writer instanceof BufWriterSync
? writer
: new BufWriterSync(writer, size);
}
constructor(private writer: WriterSync, size: number = DEFAULT_BUF_SIZE) {
super();
if (size <= 0) {
size = DEFAULT_BUF_SIZE;
}
this.buf = new Uint8Array(size);
}
/** Discards any unflushed buffered data, clears any error, and
* resets buffer to write its output to w.
*/
reset(w: WriterSync): void {
this.err = null;
this.usedBufferBytes = 0;
this.writer = w;
}
/** Flush writes any buffered data to the underlying io.WriterSync. */
flush(): void {
if (this.err !== null) throw this.err;
if (this.usedBufferBytes === 0) return;
try {
Deno.writeAllSync(
this.writer,
this.buf.subarray(0, this.usedBufferBytes)
);
} catch (e) {
this.err = e;
throw e;
}
this.buf = new Uint8Array(this.buf.length);
this.usedBufferBytes = 0;
}
/** Writes the contents of `data` into the buffer. If the contents won't fully
* fit into the buffer, those bytes that can are copied into the buffer, the
* buffer is the flushed to the writer and the remaining bytes are copied into
* the now empty buffer.
*
* @return the number of bytes written to the buffer.
*/
writeSync(data: Uint8Array): number {
if (this.err !== null) throw this.err;
if (data.length === 0) return 0;
let totalBytesWritten = 0;
let numBytesWritten = 0;
while (data.byteLength > this.available()) {
if (this.buffered() === 0) {
// Large write, empty buffer.
// Write directly from data to avoid copy.
try {
numBytesWritten = this.writer.writeSync(data);
} catch (e) {
this.err = e;
throw e;
}
} else {
numBytesWritten = copyBytes(
data,
this.buf,
this.usedBufferBytes
);
this.usedBufferBytes += numBytesWritten;
this.flush();
}
totalBytesWritten += numBytesWritten;
data = data.subarray(numBytesWritten);
}
numBytesWritten = copyBytes(data, this.buf, this.usedBufferBytes);
this.usedBufferBytes += numBytesWritten;
totalBytesWritten += numBytesWritten;
return totalBytesWritten;
}
}
/** Generate longest proper prefix which is also suffix array. */
function createLPS(pat: Uint8Array): Uint8Array {
const lps = new Uint8Array(pat.length);
lps[0] = 0;
let prefixEnd = 0;
let i = 1;
while (i < lps.length) {
if (pat[i] == pat[prefixEnd]) {
prefixEnd++;
lps[i] = prefixEnd;
i++;
} else if (prefixEnd === 0) {
lps[i] = 0;
i++;
} else {
prefixEnd = pat[prefixEnd - 1];
}
}
return lps;
}
/** Read delimited bytes from a Reader. */
export async function* readDelim(
reader: Reader,
delim: Uint8Array
): AsyncIterableIterator<Uint8Array> {
// Avoid unicode problems
const delimLen = delim.length;
const delimLPS = createLPS(delim);
let inputBuffer = new Deno.Buffer();
const inspectArr = new Uint8Array(Math.max(1024, delimLen + 1));
// Modified KMP
let inspectIndex = 0;
let matchIndex = 0;
while (true) {
const result = await reader.read(inspectArr);
if (result === null) {
// Yield last chunk.
yield inputBuffer.bytes();
return;
}
if ((result as number) < 0) {
// Discard all remaining and silently fail.
return;
}
const sliceRead = inspectArr.subarray(0, result as number);
await Deno.writeAll(inputBuffer, sliceRead);
let sliceToProcess = inputBuffer.bytes();
while (inspectIndex < sliceToProcess.length) {
if (sliceToProcess[inspectIndex] === delim[matchIndex]) {
inspectIndex++;
matchIndex++;
if (matchIndex === delimLen) {
// Full match
const matchEnd = inspectIndex - delimLen;
const readyBytes = sliceToProcess.subarray(0, matchEnd);
// Copy
const pendingBytes = sliceToProcess.slice(inspectIndex);
yield readyBytes;
// Reset match, different from KMP.
sliceToProcess = pendingBytes;
inspectIndex = 0;
matchIndex = 0;
}
} else {
if (matchIndex === 0) {
inspectIndex++;
} else {
matchIndex = delimLPS[matchIndex - 1];
}
}
}
// Keep inspectIndex and matchIndex.
inputBuffer = new Deno.Buffer(sliceToProcess);
}
}
/** Read delimited strings from a Reader. */
export async function* readStringDelim(
reader: Reader,
delim: string
): AsyncIterableIterator<string> {
const encoder = new TextEncoder();
const decoder = new TextDecoder();
for await (const chunk of readDelim(reader, encoder.encode(delim))) {
yield decoder.decode(chunk);
}
}
/** Read strings line-by-line from a Reader. */
// eslint-disable-next-line require-await
export async function* readLines(
reader: Reader
): AsyncIterableIterator<string> {
yield* readStringDelim(reader, "\n");
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/full/input/textproto/mod.ts | TypeScript | // Based on https://github.com/golang/go/tree/master/src/net/textproto
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
import type { BufReader } from "../io/bufio";
import { concat } from "../bytes/mod";
import { decode } from "../encoding/utf8";
// FROM https://github.com/denoland/deno/blob/b34628a26ab0187a827aa4ebe256e23178e25d39/cli/js/web/headers.ts#L9
const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/g;
function str(buf: Uint8Array | null | undefined): string {
if (buf == null) {
return "";
} else {
return decode(buf);
}
}
function charCode(s: string): number {
return s.charCodeAt(0);
}
export class TextProtoReader {
constructor(readonly r: BufReader) {}
/** readLine() reads a single line from the TextProtoReader,
* eliding the final \n or \r\n from the returned string.
*/
async readLine(): Promise<string | null> {
const s = await this.readLineSlice();
if (s === null) return null;
return str(s);
}
/** ReadMIMEHeader reads a MIME-style header from r.
* The header is a sequence of possibly continued Key: Value lines
* ending in a blank line.
* The returned map m maps CanonicalMIMEHeaderKey(key) to a
* sequence of values in the same order encountered in the input.
*
* For example, consider this input:
*
* My-Key: Value 1
* Long-Key: Even
* Longer Value
* My-Key: Value 2
*
* Given that input, ReadMIMEHeader returns the map:
*
* map[string][]string{
* "My-Key": {"Value 1", "Value 2"},
* "Long-Key": {"Even Longer Value"},
* }
*/
async readMIMEHeader(): Promise<Headers | null> {
const m = new Headers();
let line: Uint8Array | undefined;
// The first line cannot start with a leading space.
let buf = await this.r.peek(1);
if (buf === null) {
return null;
} else if (buf[0] == charCode(" ") || buf[0] == charCode("\t")) {
line = (await this.readLineSlice()) as Uint8Array;
}
buf = await this.r.peek(1);
if (buf === null) {
throw new Deno.errors.UnexpectedEof();
} else if (buf[0] == charCode(" ") || buf[0] == charCode("\t")) {
throw new Deno.errors.InvalidData(
`malformed MIME header initial line: ${str(line)}`
);
}
while (true) {
const kv = await this.readLineSlice(); // readContinuedLineSlice
if (kv === null) throw new Deno.errors.UnexpectedEof();
if (kv.byteLength === 0) return m;
// Key ends at first colon
let i = kv.indexOf(charCode(":"));
if (i < 0) {
throw new Deno.errors.InvalidData(
`malformed MIME header line: ${str(kv)}`
);
}
//let key = canonicalMIMEHeaderKey(kv.subarray(0, endKey));
const key = str(kv.subarray(0, i));
// As per RFC 7230 field-name is a token,
// tokens consist of one or more chars.
// We could throw `Deno.errors.InvalidData` here,
// but better to be liberal in what we
// accept, so if we get an empty key, skip it.
if (key == "") {
continue;
}
// Skip initial spaces in value.
i++; // skip colon
while (
i < kv.byteLength &&
(kv[i] == charCode(" ") || kv[i] == charCode("\t"))
) {
i++;
}
const value = str(kv.subarray(i)).replace(
invalidHeaderCharRegex,
encodeURI
);
// In case of invalid header we swallow the error
// example: "Audio Mode" => invalid due to space in the key
try {
m.append(key, value);
} catch {
// Pass
}
}
}
async readLineSlice(): Promise<Uint8Array | null> {
// this.closeDot();
let line: Uint8Array | undefined;
while (true) {
const r = await this.r.readLine();
if (r === null) return null;
const { line: l, more } = r;
// Avoid the copy if the first call produced a full line.
if (!line && !more) {
// TODO(ry):
// This skipSpace() is definitely misplaced, but I don't know where it
// comes from nor how to fix it.
if (this.skipSpace(l) === 0) {
return new Uint8Array(0);
}
return l;
}
line = line ? concat(line, l) : l;
if (!more) {
break;
}
}
return line;
}
skipSpace(l: Uint8Array): number {
let n = 0;
for (let i = 0; i < l.length; i++) {
if (l[i] === charCode(" ") || l[i] === charCode("\t")) {
continue;
}
n++;
}
return n;
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/full/output/entry.js | JavaScript | function concat(origin, b) {
const output = new Uint8Array(origin.length + b.length);
output.set(origin, 0);
output.set(b, origin.length);
return output;
}
function copyBytes(src, dst, off = 0) {
off = Math.max(0, Math.min(off, dst.byteLength));
const dstBytesAvailable = dst.byteLength - off;
if (src.byteLength > dstBytesAvailable) src = src.subarray(0, dstBytesAvailable);
dst.set(src, off);
return src.byteLength;
}
class DenoStdInternalError extends Error {
constructor(message){
super(message);
this.name = "DenoStdInternalError";
}
}
function assert(expr, msg = "") {
if (!expr) throw new DenoStdInternalError(msg);
}
const DEFAULT_BUF_SIZE = 4096;
const MIN_BUF_SIZE = 16;
const MAX_CONSECUTIVE_EMPTY_READS = 100;
const CR = "\r".charCodeAt(0);
const LF = "\n".charCodeAt(0);
class BufferFullError extends Error {
partial;
name;
constructor(partial){
super("Buffer full"), this.partial = partial, this.name = "BufferFullError";
}
}
class PartialReadError extends Deno.errors.UnexpectedEof {
name = "PartialReadError";
partial;
constructor(){
super("Encountered UnexpectedEof, data only partially read");
}
}
class BufReader {
buf;
rd;
r = 0;
w = 0;
eof = false;
static create(r, size = DEFAULT_BUF_SIZE) {
return r instanceof BufReader ? r : new BufReader(r, size);
}
constructor(rd, size = DEFAULT_BUF_SIZE){
if (size < MIN_BUF_SIZE) size = MIN_BUF_SIZE;
this._reset(new Uint8Array(size), rd);
}
size() {
return this.buf.byteLength;
}
buffered() {
return this.w - this.r;
}
async _fill() {
if (this.r > 0) {
this.buf.copyWithin(0, this.r, this.w);
this.w -= this.r;
this.r = 0;
}
if (this.w >= this.buf.byteLength) throw Error("bufio: tried to fill full buffer");
for(let i = MAX_CONSECUTIVE_EMPTY_READS; i > 0; i--){
const rr = await this.rd.read(this.buf.subarray(this.w));
if (rr === null) {
this.eof = true;
return;
}
assert(rr >= 0, "negative read");
this.w += rr;
if (rr > 0) return;
}
throw new Error(`No progress after ${MAX_CONSECUTIVE_EMPTY_READS} read() calls`);
}
reset(r) {
this._reset(this.buf, r);
}
_reset(buf, rd) {
this.buf = buf;
this.rd = rd;
this.eof = false;
}
async read(p) {
let rr = p.byteLength;
if (p.byteLength === 0) return rr;
if (this.r === this.w) {
if (p.byteLength >= this.buf.byteLength) {
const rr = await this.rd.read(p);
const nread = rr ?? 0;
assert(nread >= 0, "negative read");
return rr;
}
this.r = 0;
this.w = 0;
rr = await this.rd.read(this.buf);
if (rr === 0 || rr === null) return rr;
assert(rr >= 0, "negative read");
this.w += rr;
}
const copied = copyBytes(this.buf.subarray(this.r, this.w), p, 0);
this.r += copied;
return copied;
}
async readFull(p) {
let bytesRead = 0;
while(bytesRead < p.length)try {
const rr = await this.read(p.subarray(bytesRead));
if (rr === null) {
if (bytesRead === 0) return null;
else throw new PartialReadError();
}
bytesRead += rr;
} catch (err) {
err.partial = p.subarray(0, bytesRead);
throw err;
}
return p;
}
async readByte() {
while(this.r === this.w){
if (this.eof) return null;
await this._fill();
}
const c = this.buf[this.r];
this.r++;
return c;
}
async readString(delim) {
if (delim.length !== 1) throw new Error("Delimiter should be a single character");
const buffer = await this.readSlice(delim.charCodeAt(0));
if (buffer === null) return null;
return new TextDecoder().decode(buffer);
}
async readLine() {
let line;
try {
line = await this.readSlice(LF);
} catch (err) {
let { partial } = err;
assert(partial instanceof Uint8Array, "bufio: caught error from `readSlice()` without `partial` property");
if (!(err instanceof BufferFullError)) throw err;
if (!this.eof && partial.byteLength > 0 && partial[partial.byteLength - 1] === CR) {
assert(this.r > 0, "bufio: tried to rewind past start of buffer");
this.r--;
partial = partial.subarray(0, partial.byteLength - 1);
}
return {
line: partial,
more: !this.eof
};
}
if (line === null) return null;
if (line.byteLength === 0) return {
line,
more: false
};
if (line[line.byteLength - 1] == LF) {
let drop = 1;
if (line.byteLength > 1 && line[line.byteLength - 2] === CR) drop = 2;
line = line.subarray(0, line.byteLength - drop);
}
return {
line,
more: false
};
}
async readSlice(delim) {
let s = 0;
let slice;
while(true){
let i = this.buf.subarray(this.r + s, this.w).indexOf(delim);
if (i >= 0) {
i += s;
slice = this.buf.subarray(this.r, this.r + i + 1);
this.r += i + 1;
break;
}
if (this.eof) {
if (this.r === this.w) return null;
slice = this.buf.subarray(this.r, this.w);
this.r = this.w;
break;
}
if (this.buffered() >= this.buf.byteLength) {
this.r = this.w;
const oldbuf = this.buf;
const newbuf = this.buf.slice(0);
this.buf = newbuf;
throw new BufferFullError(oldbuf);
}
s = this.w - this.r;
try {
await this._fill();
} catch (err) {
err.partial = slice;
throw err;
}
}
return slice;
}
async peek(n) {
if (n < 0) throw Error("negative count");
let avail = this.w - this.r;
while(avail < n && avail < this.buf.byteLength && !this.eof){
try {
await this._fill();
} catch (err) {
err.partial = this.buf.subarray(this.r, this.w);
throw err;
}
avail = this.w - this.r;
}
if (avail === 0 && this.eof) return null;
else if (avail < n && this.eof) return this.buf.subarray(this.r, this.r + avail);
else if (avail < n) throw new BufferFullError(this.buf.subarray(this.r, this.w));
return this.buf.subarray(this.r, this.r + n);
}
}
class AbstractBufBase {
buf;
usedBufferBytes = 0;
err = null;
size() {
return this.buf.byteLength;
}
available() {
return this.buf.byteLength - this.usedBufferBytes;
}
buffered() {
return this.usedBufferBytes;
}
}
class BufWriter extends AbstractBufBase {
writer;
static create(writer, size = DEFAULT_BUF_SIZE) {
return writer instanceof BufWriter ? writer : new BufWriter(writer, size);
}
constructor(writer, size = DEFAULT_BUF_SIZE){
super(), this.writer = writer;
if (size <= 0) size = DEFAULT_BUF_SIZE;
this.buf = new Uint8Array(size);
}
reset(w) {
this.err = null;
this.usedBufferBytes = 0;
this.writer = w;
}
async flush() {
if (this.err !== null) throw this.err;
if (this.usedBufferBytes === 0) return;
try {
await Deno.writeAll(this.writer, this.buf.subarray(0, this.usedBufferBytes));
} catch (e) {
this.err = e;
throw e;
}
this.buf = new Uint8Array(this.buf.length);
this.usedBufferBytes = 0;
}
async write(data) {
if (this.err !== null) throw this.err;
if (data.length === 0) return 0;
let totalBytesWritten = 0;
let numBytesWritten = 0;
while(data.byteLength > this.available()){
if (this.buffered() === 0) try {
numBytesWritten = await this.writer.write(data);
} catch (e) {
this.err = e;
throw e;
}
else {
numBytesWritten = copyBytes(data, this.buf, this.usedBufferBytes);
this.usedBufferBytes += numBytesWritten;
await this.flush();
}
totalBytesWritten += numBytesWritten;
data = data.subarray(numBytesWritten);
}
numBytesWritten = copyBytes(data, this.buf, this.usedBufferBytes);
this.usedBufferBytes += numBytesWritten;
totalBytesWritten += numBytesWritten;
return totalBytesWritten;
}
}
const encoder = new TextEncoder();
function encode(input) {
return encoder.encode(input);
}
const decoder = new TextDecoder();
function decode(input) {
return decoder.decode(input);
}
const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/g;
function str(buf) {
if (buf == null) return "";
else return decode(buf);
}
function charCode(s) {
return s.charCodeAt(0);
}
class TextProtoReader {
r;
constructor(r){
this.r = r;
}
async readLine() {
const s = await this.readLineSlice();
if (s === null) return null;
return str(s);
}
async readMIMEHeader() {
const m = new Headers();
let line;
let buf = await this.r.peek(1);
if (buf === null) return null;
else if (buf[0] == charCode(" ") || buf[0] == charCode("\t")) line = await this.readLineSlice();
buf = await this.r.peek(1);
if (buf === null) throw new Deno.errors.UnexpectedEof();
else if (buf[0] == charCode(" ") || buf[0] == charCode("\t")) throw new Deno.errors.InvalidData(`malformed MIME header initial line: ${str(line)}`);
while(true){
const kv = await this.readLineSlice();
if (kv === null) throw new Deno.errors.UnexpectedEof();
if (kv.byteLength === 0) return m;
let i = kv.indexOf(charCode(":"));
if (i < 0) throw new Deno.errors.InvalidData(`malformed MIME header line: ${str(kv)}`);
const key = str(kv.subarray(0, i));
if (key == "") continue;
i++;
while(i < kv.byteLength && (kv[i] == charCode(" ") || kv[i] == charCode("\t")))i++;
const value = str(kv.subarray(i)).replace(invalidHeaderCharRegex, encodeURI);
try {
m.append(key, value);
} catch {}
}
}
async readLineSlice() {
let line;
while(true){
const r = await this.r.readLine();
if (r === null) return null;
const { line: l, more } = r;
if (!line && !more) {
if (this.skipSpace(l) === 0) return new Uint8Array(0);
return l;
}
line = line ? concat(line, l) : l;
if (!more) break;
}
return line;
}
skipSpace(l) {
let n = 0;
for(let i = 0; i < l.length; i++){
if (l[i] === charCode(" ") || l[i] === charCode("\t")) continue;
n++;
}
return n;
}
}
/*#__PURE__*/ (function(Status) {
return Status;
})({});
const STATUS_TEXT = new Map([]);
function deferred() {
let methods;
const promise = new Promise((resolve, reject)=>{
methods = {
resolve,
reject
};
});
return Object.assign(promise, methods);
}
class MuxAsyncIterator {
iteratorCount = 0;
yields = [];
throws = [];
signal = deferred();
add(iterator) {
++this.iteratorCount;
this.callIteratorNext(iterator);
}
async callIteratorNext(iterator) {
try {
const { value, done } = await iterator.next();
if (done) --this.iteratorCount;
else this.yields.push({
iterator,
value
});
} catch (e) {
this.throws.push(e);
}
this.signal.resolve();
}
async *iterate() {
while(this.iteratorCount > 0){
await this.signal;
for(let i = 0; i < this.yields.length; i++){
const { iterator, value } = this.yields[i];
yield value;
this.callIteratorNext(iterator);
}
if (this.throws.length) {
for (const e of this.throws)throw e;
this.throws.length = 0;
}
this.yields.length = 0;
this.signal = deferred();
}
}
[Symbol.asyncIterator]() {
return this.iterate();
}
}
function emptyReader() {
return {
read (_) {
return Promise.resolve(null);
}
};
}
function bodyReader(contentLength, r) {
let totalRead = 0;
let finished = false;
async function read(buf) {
if (finished) return null;
let result;
const remaining = contentLength - totalRead;
if (remaining >= buf.byteLength) result = await r.read(buf);
else {
const readBuf = buf.subarray(0, remaining);
result = await r.read(readBuf);
}
if (result !== null) totalRead += result;
finished = totalRead === contentLength;
return result;
}
return {
read
};
}
function chunkedBodyReader(h, r) {
const tp = new TextProtoReader(r);
let finished = false;
const chunks = [];
async function read(buf) {
if (finished) return null;
const [chunk] = chunks;
if (chunk) {
const chunkRemaining = chunk.data.byteLength - chunk.offset;
const readLength = Math.min(chunkRemaining, buf.byteLength);
for(let i = 0; i < readLength; i++)buf[i] = chunk.data[chunk.offset + i];
chunk.offset += readLength;
if (chunk.offset === chunk.data.byteLength) {
chunks.shift();
if (await tp.readLine() === null) throw new Deno.errors.UnexpectedEof();
}
return readLength;
}
const line = await tp.readLine();
if (line === null) throw new Deno.errors.UnexpectedEof();
const [chunkSizeString] = line.split(";");
const chunkSize = parseInt(chunkSizeString, 16);
if (Number.isNaN(chunkSize) || chunkSize < 0) throw new Error("Invalid chunk size");
if (chunkSize > 0) {
if (chunkSize > buf.byteLength) {
let eof = await r.readFull(buf);
if (eof === null) throw new Deno.errors.UnexpectedEof();
const restChunk = new Uint8Array(chunkSize - buf.byteLength);
eof = await r.readFull(restChunk);
if (eof === null) throw new Deno.errors.UnexpectedEof();
else chunks.push({
offset: 0,
data: restChunk
});
return buf.byteLength;
} else {
const bufToFill = buf.subarray(0, chunkSize);
const eof = await r.readFull(bufToFill);
if (eof === null) throw new Deno.errors.UnexpectedEof();
if (await tp.readLine() === null) throw new Deno.errors.UnexpectedEof();
return chunkSize;
}
} else {
assert(chunkSize === 0);
if (await r.readLine() === null) throw new Deno.errors.UnexpectedEof();
await readTrailers(h, r);
finished = true;
return null;
}
}
return {
read
};
}
function isProhibidedForTrailer(key) {
const s = new Set([
"transfer-encoding",
"content-length",
"trailer"
]);
return s.has(key.toLowerCase());
}
async function readTrailers(headers, r) {
const trailers = parseTrailer(headers.get("trailer"));
if (trailers == null) return;
const trailerNames = [
...trailers.keys()
];
const tp = new TextProtoReader(r);
const result = await tp.readMIMEHeader();
if (result == null) throw new Deno.errors.InvalidData("Missing trailer header.");
const undeclared = [
...result.keys()
].filter((k)=>!trailerNames.includes(k));
if (undeclared.length > 0) throw new Deno.errors.InvalidData(`Undeclared trailers: ${Deno.inspect(undeclared)}.`);
for (const [k, v] of result)headers.append(k, v);
const missingTrailers = trailerNames.filter((k)=>!result.has(k));
if (missingTrailers.length > 0) throw new Deno.errors.InvalidData(`Missing trailers: ${Deno.inspect(missingTrailers)}.`);
headers.delete("trailer");
}
function parseTrailer(field) {
if (field == null) return undefined;
const trailerNames = field.split(",").map((v)=>v.trim().toLowerCase());
if (trailerNames.length === 0) throw new Deno.errors.InvalidData("Empty trailer header.");
const prohibited = trailerNames.filter((k)=>isProhibidedForTrailer(k));
if (prohibited.length > 0) throw new Deno.errors.InvalidData(`Prohibited trailer names: ${Deno.inspect(prohibited)}.`);
return new Headers(trailerNames.map((key)=>[
key,
""
]));
}
async function writeChunkedBody(w, r) {
const writer = BufWriter.create(w);
for await (const chunk of Deno.iter(r)){
if (chunk.byteLength <= 0) continue;
const start = encoder.encode(`${chunk.byteLength.toString(16)}\r\n`);
const end = encoder.encode("\r\n");
await writer.write(start);
await writer.write(chunk);
await writer.write(end);
}
const endChunk = encoder.encode("0\r\n\r\n");
await writer.write(endChunk);
}
async function writeTrailers(w, headers, trailers) {
const trailer = headers.get("trailer");
if (trailer === null) throw new TypeError("Missing trailer header.");
const transferEncoding = headers.get("transfer-encoding");
if (transferEncoding === null || !transferEncoding.match(/^chunked/)) throw new TypeError(`Trailers are only allowed for "transfer-encoding: chunked", got "transfer-encoding: ${transferEncoding}".`);
const writer = BufWriter.create(w);
const trailerNames = trailer.split(",").map((s)=>s.trim().toLowerCase());
const prohibitedTrailers = trailerNames.filter((k)=>isProhibidedForTrailer(k));
if (prohibitedTrailers.length > 0) throw new TypeError(`Prohibited trailer names: ${Deno.inspect(prohibitedTrailers)}.`);
const undeclared = [
...trailers.keys()
].filter((k)=>!trailerNames.includes(k));
if (undeclared.length > 0) throw new TypeError(`Undeclared trailers: ${Deno.inspect(undeclared)}.`);
for (const [key, value] of trailers)await writer.write(encoder.encode(`${key}: ${value}\r\n`));
await writer.write(encoder.encode("\r\n"));
await writer.flush();
}
async function writeResponse(w, r) {
const protoMajor = 1;
const protoMinor = 1;
const statusCode = r.status || 200;
const statusText = STATUS_TEXT.get(statusCode);
const writer = BufWriter.create(w);
if (!statusText) throw new Deno.errors.InvalidData("Bad status code");
if (!r.body) r.body = new Uint8Array();
if (typeof r.body === "string") r.body = encoder.encode(r.body);
let out = `HTTP/${protoMajor}.${protoMinor} ${statusCode} ${statusText}\r\n`;
const headers = r.headers ?? new Headers();
if (r.body && !headers.get("content-length")) {
if (r.body instanceof Uint8Array) out += `content-length: ${r.body.byteLength}\r\n`;
else if (!headers.get("transfer-encoding")) out += "transfer-encoding: chunked\r\n";
}
for (const [key, value] of headers)out += `${key}: ${value}\r\n`;
out += `\r\n`;
const header = encoder.encode(out);
const n = await writer.write(header);
assert(n === header.byteLength);
if (r.body instanceof Uint8Array) {
const n = await writer.write(r.body);
assert(n === r.body.byteLength);
} else if (headers.has("content-length")) {
const contentLength = headers.get("content-length");
assert(contentLength != null);
const bodyLength = parseInt(contentLength);
const n = await Deno.copy(r.body, writer);
assert(n === bodyLength);
} else await writeChunkedBody(writer, r.body);
if (r.trailers) {
const t = await r.trailers();
await writeTrailers(writer, headers, t);
}
await writer.flush();
}
class ServerRequest {
url;
method;
proto;
protoMinor;
protoMajor;
headers;
conn;
r;
w;
done = deferred();
_contentLength = undefined;
get contentLength() {
if (this._contentLength === undefined) {
const cl = this.headers.get("content-length");
if (cl) {
this._contentLength = parseInt(cl);
if (Number.isNaN(this._contentLength)) this._contentLength = null;
} else this._contentLength = null;
}
return this._contentLength;
}
_body = null;
get body() {
if (!this._body) {
if (this.contentLength != null) this._body = bodyReader(this.contentLength, this.r);
else {
const transferEncoding = this.headers.get("transfer-encoding");
if (transferEncoding != null) {
const parts = transferEncoding.split(",").map((e)=>e.trim().toLowerCase());
assert(parts.includes("chunked"), 'transfer-encoding must include "chunked" if content-length is not set');
this._body = chunkedBodyReader(this.headers, this.r);
} else this._body = emptyReader();
}
}
return this._body;
}
async respond(r) {
let err;
try {
await writeResponse(this.w, r);
} catch (e) {
try {
this.conn.close();
} catch {}
err = e;
}
this.done.resolve(err);
if (err) throw err;
}
finalized = false;
async finalize() {
if (this.finalized) return;
const body = this.body;
const buf = new Uint8Array(1024);
while(await body.read(buf) !== null);
this.finalized = true;
}
}
function parseHTTPVersion(vers) {
switch(vers){
case "HTTP/1.1":
return [
1,
1
];
case "HTTP/1.0":
return [
1,
0
];
default:
{
const Big = 1000000;
if (!vers.startsWith("HTTP/")) break;
const dot = vers.indexOf(".");
if (dot < 0) break;
const majorStr = vers.substring(vers.indexOf("/") + 1, dot);
const major = Number(majorStr);
if (!Number.isInteger(major) || major < 0 || major > Big) break;
const minorStr = vers.substring(dot + 1);
const minor = Number(minorStr);
if (!Number.isInteger(minor) || minor < 0 || minor > Big) break;
return [
major,
minor
];
}
}
throw new Error(`malformed HTTP version ${vers}`);
}
async function readRequest(conn, bufr) {
const tp = new TextProtoReader(bufr);
const firstLine = await tp.readLine();
if (firstLine === null) return null;
const headers = await tp.readMIMEHeader();
if (headers === null) throw new Deno.errors.UnexpectedEof();
const req = new ServerRequest();
req.conn = conn;
req.r = bufr;
[req.method, req.url, req.proto] = firstLine.split(" ", 3);
[req.protoMinor, req.protoMajor] = parseHTTPVersion(req.proto);
req.headers = headers;
fixLength(req);
return req;
}
class Server {
listener;
closing;
connections;
constructor(listener){
this.listener = listener;
this.closing = false;
this.connections = [];
}
close() {
this.closing = true;
this.listener.close();
for (const conn of this.connections)try {
conn.close();
} catch (e) {
if (!(e instanceof Deno.errors.BadResource)) throw e;
}
}
async *iterateHttpRequests(conn) {
const reader = new BufReader(conn);
const writer = new BufWriter(conn);
while(!this.closing){
let request;
try {
request = await readRequest(conn, reader);
} catch (error) {
if (error instanceof Deno.errors.InvalidData || error instanceof Deno.errors.UnexpectedEof) await writeResponse(writer, {
status: 400,
body: encode(`${error.message}\r\n\r\n`)
});
break;
}
if (request === null) break;
request.w = writer;
yield request;
const responseError = await request.done;
if (responseError) {
this.untrackConnection(request.conn);
return;
}
await request.finalize();
}
this.untrackConnection(conn);
try {
conn.close();
} catch (e) {}
}
trackConnection(conn) {
this.connections.push(conn);
}
untrackConnection(conn) {
const index = this.connections.indexOf(conn);
if (index !== -1) this.connections.splice(index, 1);
}
async *acceptConnAndIterateHttpRequests(mux) {
if (this.closing) return;
let conn;
try {
conn = await this.listener.accept();
} catch (error) {
if (error instanceof Deno.errors.BadResource || error instanceof Deno.errors.InvalidData || error instanceof Deno.errors.UnexpectedEof) return mux.add(this.acceptConnAndIterateHttpRequests(mux));
throw error;
}
this.trackConnection(conn);
mux.add(this.acceptConnAndIterateHttpRequests(mux));
yield* this.iterateHttpRequests(conn);
}
[Symbol.asyncIterator]() {
const mux = new MuxAsyncIterator();
mux.add(this.acceptConnAndIterateHttpRequests(mux));
return mux.iterate();
}
}
function _parseAddrFromStr(addr) {
let url;
try {
const host = addr.startsWith(":") ? `0.0.0.0${addr}` : addr;
url = new URL(`http://${host}`);
} catch {
throw new TypeError("Invalid address.");
}
if (url.username || url.password || url.pathname != "/" || url.search || url.hash) throw new TypeError("Invalid address.");
return {
hostname: url.hostname,
port: url.port === "" ? 80 : Number(url.port)
};
}
function serve(addr) {
if (typeof addr === "string") addr = _parseAddrFromStr(addr);
const listener = Deno.listen(addr);
return new Server(listener);
}
async function listenAndServe(addr, handler) {
const server = serve(addr);
for await (const request of server)handler(request);
}
function fixLength(req) {
const contentLength = req.headers.get("Content-Length");
if (contentLength) {
const arrClen = contentLength.split(",");
if (arrClen.length > 1) {
const distinct = [
...new Set(arrClen.map((e)=>e.trim()))
];
if (distinct.length > 1) throw Error("cannot contain multiple Content-Length headers");
else req.headers.set("Content-Length", distinct[0]);
}
const c = req.headers.get("Content-Length");
if (req.method === "HEAD" && c && c !== "0") throw Error("http: method cannot contain a Content-Length");
if (c && req.headers.has("transfer-encoding")) throw new Error("http: Transfer-Encoding and Content-Length cannot be send together");
}
}
listenAndServe({
port: 8080
}, async (req)=>{});
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-1/input/async/deferred.ts | TypeScript | // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
// TODO(ry) It'd be better to make Deferred a class that inherits from
// Promise, rather than an interface. This is possible in ES2016, however
// typescript produces broken code when targeting ES5 code.
// See https://github.com/Microsoft/TypeScript/issues/15202
// At the time of writing, the github issue is closed but the problem remains.
export interface Deferred<T> extends Promise<T> {
resolve: (value?: T | PromiseLike<T>) => void;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
reject: (reason?: any) => void;
}
/** Creates a Promise with the `reject` and `resolve` functions
* placed as methods on the promise object itself. It allows you to do:
*
* const p = deferred<number>();
* // ...
* p.resolve(42);
*/
export function deferred<T>(): Deferred<T> {
let methods;
const promise = new Promise<T>((resolve, reject): void => {
methods = { resolve, reject };
});
return Object.assign(promise, methods) as Deferred<T>;
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-1/input/async/delay.ts | TypeScript | // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
/* Resolves after the given number of milliseconds. */
export function delay(ms: number): Promise<void> {
return new Promise((res): number =>
setTimeout((): void => {
res();
}, ms)
);
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-1/input/async/mod.ts | TypeScript | // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
export * from "./deferred";
export * from "./delay";
export * from "./mux_async_iterator";
export * from "./pool";
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-1/input/async/mux_async_iterator.ts | TypeScript | // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
import { Deferred, deferred } from "./deferred.ts";
interface TaggedYieldedValue<T> {
iterator: AsyncIterableIterator<T>;
value: T;
}
/** The MuxAsyncIterator class multiplexes multiple async iterators into a
* single stream. It currently makes an assumption:
* - The final result (the value returned and not yielded from the iterator)
* does not matter; if there is any, it is discarded.
*/
export class MuxAsyncIterator<T> implements AsyncIterable<T> {
private iteratorCount = 0;
private yields: Array<TaggedYieldedValue<T>> = [];
// eslint-disable-next-line @typescript-eslint/no-explicit-any
private throws: any[] = [];
private signal: Deferred<void> = deferred();
add(iterator: AsyncIterableIterator<T>): void {
++this.iteratorCount;
this.callIteratorNext(iterator);
}
private async callIteratorNext(
iterator: AsyncIterableIterator<T>
): Promise<void> {
try {
const { value, done } = await iterator.next();
if (done) {
--this.iteratorCount;
} else {
this.yields.push({ iterator, value });
}
} catch (e) {
this.throws.push(e);
}
this.signal.resolve();
}
async *iterate(): AsyncIterableIterator<T> {
while (this.iteratorCount > 0) {
// Sleep until any of the wrapped iterators yields.
await this.signal;
// Note that while we're looping over `yields`, new items may be added.
for (let i = 0; i < this.yields.length; i++) {
const { iterator, value } = this.yields[i];
yield value;
this.callIteratorNext(iterator);
}
if (this.throws.length) {
for (const e of this.throws) {
throw e;
}
this.throws.length = 0;
}
// Clear the `yields` list and reset the `signal` promise.
this.yields.length = 0;
this.signal = deferred();
}
}
[Symbol.asyncIterator](): AsyncIterableIterator<T> {
return this.iterate();
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-1/input/async/pool.ts | TypeScript | // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
/**
* pooledMap transforms values from an (async) iterable into another async
* iterable. The transforms are done concurrently, with a max concurrency
* defined by the poolLimit.
*
* @param poolLimit The maximum count of items being processed concurrently.
* @param array The input array for mapping.
* @param iteratorFn The function to call for every item of the array.
*/
export function pooledMap<T, R>(
poolLimit: number,
array: Iterable<T> | AsyncIterable<T>,
iteratorFn: (data: T) => Promise<R>
): AsyncIterableIterator<R> {
// Create the async iterable that is returned from this function.
const res = new TransformStream<Promise<R>, R>({
async transform(
p: Promise<R>,
controller: TransformStreamDefaultController<R>
): Promise<void> {
controller.enqueue(await p);
},
});
// Start processing items from the iterator
(async (): Promise<void> => {
const writer = res.writable.getWriter();
const executing: Array<Promise<unknown>> = [];
for await (const item of array) {
const p = Promise.resolve().then(() => iteratorFn(item));
writer.write(p);
const e: Promise<unknown> = p.then(() =>
executing.splice(executing.indexOf(e), 1)
);
executing.push(e);
if (executing.length >= poolLimit) {
await Promise.race(executing);
}
}
// Wait until all ongoing events have processed, then close the writer.
await Promise.all(executing);
writer.close();
})();
return res.readable.getIterator();
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-1/input/entry.js | JavaScript | import { listenAndServe } from "./http/server";
listenAndServe({ port: 8080 }, async (req) => {});
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-1/input/http/_io.ts | TypeScript | import { ServerRequest, Response } from "./server";
export function emptyReader(): Deno.Reader {
return {
read(_: Uint8Array): Promise<number | null> {
return Promise.resolve(null);
},
};
}
export function bodyReader(contentLength: number, r: BufReader): Deno.Reader {
let totalRead = 0;
let finished = false;
async function read(buf: Uint8Array): Promise<number | null> {
if (finished) return null;
let result: number | null;
const remaining = contentLength - totalRead;
if (remaining >= buf.byteLength) {
result = await r.read(buf);
} else {
const readBuf = buf.subarray(0, remaining);
result = await r.read(readBuf);
}
if (result !== null) {
totalRead += result;
}
finished = totalRead === contentLength;
return result;
}
return { read };
}
export function chunkedBodyReader(h: Headers, r: BufReader): Deno.Reader {
// Based on https://tools.ietf.org/html/rfc2616#section-19.4.6
const tp = new TextProtoReader(r);
let finished = false;
const chunks: Array<{
offset: number;
data: Uint8Array;
}> = [];
async function read(buf: Uint8Array): Promise<number | null> {
if (finished) return null;
const [chunk] = chunks;
if (chunk) {
const chunkRemaining = chunk.data.byteLength - chunk.offset;
const readLength = Math.min(chunkRemaining, buf.byteLength);
for (let i = 0; i < readLength; i++) {
buf[i] = chunk.data[chunk.offset + i];
}
chunk.offset += readLength;
if (chunk.offset === chunk.data.byteLength) {
chunks.shift();
// Consume \r\n;
if ((await tp.readLine()) === null) {
throw new Deno.errors.UnexpectedEof();
}
}
return readLength;
}
const line = await tp.readLine();
if (line === null) throw new Deno.errors.UnexpectedEof();
// TODO: handle chunk extension
const [chunkSizeString] = line.split(";");
const chunkSize = parseInt(chunkSizeString, 16);
if (Number.isNaN(chunkSize) || chunkSize < 0) {
throw new Error("Invalid chunk size");
}
if (chunkSize > 0) {
if (chunkSize > buf.byteLength) {
let eof = await r.readFull(buf);
if (eof === null) {
throw new Deno.errors.UnexpectedEof();
}
const restChunk = new Uint8Array(chunkSize - buf.byteLength);
eof = await r.readFull(restChunk);
if (eof === null) {
throw new Deno.errors.UnexpectedEof();
} else {
chunks.push({
offset: 0,
data: restChunk,
});
}
return buf.byteLength;
} else {
const bufToFill = buf.subarray(0, chunkSize);
const eof = await r.readFull(bufToFill);
if (eof === null) {
throw new Deno.errors.UnexpectedEof();
}
// Consume \r\n
if ((await tp.readLine()) === null) {
throw new Deno.errors.UnexpectedEof();
}
return chunkSize;
}
} else {
assert(chunkSize === 0);
// Consume \r\n
if ((await r.readLine()) === null) {
throw new Deno.errors.UnexpectedEof();
}
await readTrailers(h, r);
finished = true;
return null;
}
}
return { read };
}
function isProhibidedForTrailer(key: string): boolean {
const s = new Set(["transfer-encoding", "content-length", "trailer"]);
return s.has(key.toLowerCase());
}
/** Read trailer headers from reader and append values to headers. "trailer"
* field will be deleted. */
export async function readTrailers(
headers: Headers,
r: BufReader
): Promise<void> {
const trailers = parseTrailer(headers.get("trailer"));
if (trailers == null) return;
const trailerNames = [...trailers.keys()];
const tp = new TextProtoReader(r);
const result = await tp.readMIMEHeader();
if (result == null) {
throw new Deno.errors.InvalidData("Missing trailer header.");
}
const undeclared = [...result.keys()].filter(
(k) => !trailerNames.includes(k)
);
if (undeclared.length > 0) {
throw new Deno.errors.InvalidData(
`Undeclared trailers: ${Deno.inspect(undeclared)}.`
);
}
for (const [k, v] of result) {
headers.append(k, v);
}
const missingTrailers = trailerNames.filter((k) => !result.has(k));
if (missingTrailers.length > 0) {
throw new Deno.errors.InvalidData(
`Missing trailers: ${Deno.inspect(missingTrailers)}.`
);
}
headers.delete("trailer");
}
function parseTrailer(field: string | null): Headers | undefined {
if (field == null) {
return undefined;
}
const trailerNames = field.split(",").map((v) => v.trim().toLowerCase());
if (trailerNames.length === 0) {
throw new Deno.errors.InvalidData("Empty trailer header.");
}
const prohibited = trailerNames.filter((k) => isProhibidedForTrailer(k));
if (prohibited.length > 0) {
throw new Deno.errors.InvalidData(
`Prohibited trailer names: ${Deno.inspect(prohibited)}.`
);
}
return new Headers(trailerNames.map((key) => [key, ""]));
}
export async function writeChunkedBody(
w: Deno.Writer,
r: Deno.Reader
): Promise<void> {
const writer = BufWriter.create(w);
for await (const chunk of Deno.iter(r)) {
if (chunk.byteLength <= 0) continue;
const start = encoder.encode(`${chunk.byteLength.toString(16)}\r\n`);
const end = encoder.encode("\r\n");
await writer.write(start);
await writer.write(chunk);
await writer.write(end);
}
const endChunk = encoder.encode("0\r\n\r\n");
await writer.write(endChunk);
}
/** Write trailer headers to writer. It should mostly should be called after
* `writeResponse()`. */
export async function writeTrailers(
w: Deno.Writer,
headers: Headers,
trailers: Headers
): Promise<void> {
const trailer = headers.get("trailer");
if (trailer === null) {
throw new TypeError("Missing trailer header.");
}
const transferEncoding = headers.get("transfer-encoding");
if (transferEncoding === null || !transferEncoding.match(/^chunked/)) {
throw new TypeError(
`Trailers are only allowed for "transfer-encoding: chunked", got "transfer-encoding: ${transferEncoding}".`
);
}
const writer = BufWriter.create(w);
const trailerNames = trailer.split(",").map((s) => s.trim().toLowerCase());
const prohibitedTrailers = trailerNames.filter((k) =>
isProhibidedForTrailer(k)
);
if (prohibitedTrailers.length > 0) {
throw new TypeError(
`Prohibited trailer names: ${Deno.inspect(prohibitedTrailers)}.`
);
}
const undeclared = [...trailers.keys()].filter(
(k) => !trailerNames.includes(k)
);
if (undeclared.length > 0) {
throw new TypeError(
`Undeclared trailers: ${Deno.inspect(undeclared)}.`
);
}
for (const [key, value] of trailers) {
await writer.write(encoder.encode(`${key}: ${value}\r\n`));
}
await writer.write(encoder.encode("\r\n"));
await writer.flush();
}
export async function writeResponse(
w: Deno.Writer,
r: Response
): Promise<void> {
const protoMajor = 1;
const protoMinor = 1;
const statusCode = r.status || 200;
const statusText = STATUS_TEXT.get(statusCode);
const writer = BufWriter.create(w);
if (!statusText) {
throw new Deno.errors.InvalidData("Bad status code");
}
if (!r.body) {
r.body = new Uint8Array();
}
if (typeof r.body === "string") {
r.body = encoder.encode(r.body);
}
let out = `HTTP/${protoMajor}.${protoMinor} ${statusCode} ${statusText}\r\n`;
const headers = r.headers ?? new Headers();
if (r.body && !headers.get("content-length")) {
if (r.body instanceof Uint8Array) {
out += `content-length: ${r.body.byteLength}\r\n`;
} else if (!headers.get("transfer-encoding")) {
out += "transfer-encoding: chunked\r\n";
}
}
for (const [key, value] of headers) {
out += `${key}: ${value}\r\n`;
}
out += `\r\n`;
const header = encoder.encode(out);
const n = await writer.write(header);
assert(n === header.byteLength);
if (r.body instanceof Uint8Array) {
const n = await writer.write(r.body);
assert(n === r.body.byteLength);
} else if (headers.has("content-length")) {
const contentLength = headers.get("content-length");
assert(contentLength != null);
const bodyLength = parseInt(contentLength);
const n = await Deno.copy(r.body, writer);
assert(n === bodyLength);
} else {
await writeChunkedBody(writer, r.body);
}
if (r.trailers) {
const t = await r.trailers();
await writeTrailers(writer, headers, t);
}
await writer.flush();
}
/**
* ParseHTTPVersion parses a HTTP version string.
* "HTTP/1.0" returns (1, 0).
* Ported from https://github.com/golang/go/blob/f5c43b9/src/net/http/request.go#L766-L792
*/
export function parseHTTPVersion(vers: string): [number, number] {
switch (vers) {
case "HTTP/1.1":
return [1, 1];
case "HTTP/1.0":
return [1, 0];
default: {
const Big = 1000000; // arbitrary upper bound
if (!vers.startsWith("HTTP/")) {
break;
}
const dot = vers.indexOf(".");
if (dot < 0) {
break;
}
const majorStr = vers.substring(vers.indexOf("/") + 1, dot);
const major = Number(majorStr);
if (!Number.isInteger(major) || major < 0 || major > Big) {
break;
}
const minorStr = vers.substring(dot + 1);
const minor = Number(minorStr);
if (!Number.isInteger(minor) || minor < 0 || minor > Big) {
break;
}
return [major, minor];
}
}
throw new Error(`malformed HTTP version ${vers}`);
}
export async function readRequest(
conn: Deno.Conn,
bufr: BufReader
): Promise<ServerRequest | null> {
const tp = new TextProtoReader(bufr);
const firstLine = await tp.readLine(); // e.g. GET /index.html HTTP/1.0
if (firstLine === null) return null;
const headers = await tp.readMIMEHeader();
if (headers === null) throw new Deno.errors.UnexpectedEof();
const req = new ServerRequest();
req.conn = conn;
req.r = bufr;
[req.method, req.url, req.proto] = firstLine.split(" ", 3);
[req.protoMinor, req.protoMajor] = parseHTTPVersion(req.proto);
req.headers = headers;
fixLength(req);
return req;
}
function fixLength(req: ServerRequest): void {
const contentLength = req.headers.get("Content-Length");
if (contentLength) {
const arrClen = contentLength.split(",");
if (arrClen.length > 1) {
const distinct = [...new Set(arrClen.map((e): string => e.trim()))];
if (distinct.length > 1) {
throw Error("cannot contain multiple Content-Length headers");
} else {
req.headers.set("Content-Length", distinct[0]);
}
}
const c = req.headers.get("Content-Length");
if (req.method === "HEAD" && c && c !== "0") {
throw Error("http: method cannot contain a Content-Length");
}
if (c && req.headers.has("transfer-encoding")) {
// A sender MUST NOT send a Content-Length header field in any message
// that contains a Transfer-Encoding header field.
// rfc: https://tools.ietf.org/html/rfc7230#section-3.3.2
throw new Error(
"http: Transfer-Encoding and Content-Length cannot be send together"
);
}
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-1/input/http/server.ts | TypeScript | // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
import { deferred, Deferred, MuxAsyncIterator } from "../async/mod";
import {
bodyReader,
chunkedBodyReader,
emptyReader,
writeResponse,
readRequest,
} from "./_io";
export class ServerRequest {
url!: string;
method!: string;
proto!: string;
protoMinor!: number;
protoMajor!: number;
headers!: Headers;
conn!: Deno.Conn;
r!: BufReader;
w!: BufWriter;
done: Deferred<Error | undefined> = deferred();
private _contentLength: number | undefined | null = undefined;
/**
* Value of Content-Length header.
* If null, then content length is invalid or not given (e.g. chunked encoding).
*/
get contentLength(): number | null {
// undefined means not cached.
// null means invalid or not provided.
if (this._contentLength === undefined) {
const cl = this.headers.get("content-length");
if (cl) {
this._contentLength = parseInt(cl);
// Convert NaN to null (as NaN harder to test)
if (Number.isNaN(this._contentLength)) {
this._contentLength = null;
}
} else {
this._contentLength = null;
}
}
return this._contentLength;
}
private _body: Deno.Reader | null = null;
/**
* Body of the request. The easiest way to consume the body is:
*
* const buf: Uint8Array = await Deno.readAll(req.body);
*/
get body(): Deno.Reader {
if (!this._body) {
if (this.contentLength != null) {
this._body = bodyReader(this.contentLength, this.r);
} else {
const transferEncoding = this.headers.get("transfer-encoding");
if (transferEncoding != null) {
const parts = transferEncoding
.split(",")
.map((e): string => e.trim().toLowerCase());
assert(
parts.includes("chunked"),
'transfer-encoding must include "chunked" if content-length is not set'
);
this._body = chunkedBodyReader(this.headers, this.r);
} else {
// Neither content-length nor transfer-encoding: chunked
this._body = emptyReader();
}
}
}
return this._body;
}
async respond(r: Response): Promise<void> {
let err: Error | undefined;
try {
// Write our response!
await writeResponse(this.w, r);
} catch (e) {
try {
// Eagerly close on error.
this.conn.close();
} catch {
// Pass
}
err = e;
}
// Signal that this request has been processed and the next pipelined
// request on the same connection can be accepted.
this.done.resolve(err);
if (err) {
// Error during responding, rethrow.
throw err;
}
}
private finalized = false;
async finalize(): Promise<void> {
if (this.finalized) return;
// Consume unread body
const body = this.body;
const buf = new Uint8Array(1024);
while ((await body.read(buf)) !== null) {
// Pass
}
this.finalized = true;
}
}
export class Server implements AsyncIterable<ServerRequest> {
private closing = false;
private connections: Deno.Conn[] = [];
constructor(public listener: Deno.Listener) {}
close(): void {
this.closing = true;
this.listener.close();
for (const conn of this.connections) {
try {
conn.close();
} catch (e) {
// Connection might have been already closed
if (!(e instanceof Deno.errors.BadResource)) {
throw e;
}
}
}
}
// Yields all HTTP requests on a single TCP connection.
private async *iterateHttpRequests(
conn: Deno.Conn
): AsyncIterableIterator<ServerRequest> {
const reader = new BufReader(conn);
const writer = new BufWriter(conn);
while (!this.closing) {
let request: ServerRequest | null;
try {
request = await readRequest(conn, reader);
} catch (error) {
if (
error instanceof Deno.errors.InvalidData ||
error instanceof Deno.errors.UnexpectedEof
) {
// An error was thrown while parsing request headers.
await writeResponse(writer, {
status: 400,
body: encode(`${error.message}\r\n\r\n`),
});
}
break;
}
if (request === null) {
break;
}
request.w = writer;
yield request;
// Wait for the request to be processed before we accept a new request on
// this connection.
const responseError = await request.done;
if (responseError) {
// Something bad happened during response.
// (likely other side closed during pipelined req)
// req.done implies this connection already closed, so we can just return.
this.untrackConnection(request.conn);
return;
}
// Consume unread body and trailers if receiver didn't consume those data
await request.finalize();
}
this.untrackConnection(conn);
try {
conn.close();
} catch (e) {
// might have been already closed
}
}
private trackConnection(conn: Deno.Conn): void {
this.connections.push(conn);
}
private untrackConnection(conn: Deno.Conn): void {
const index = this.connections.indexOf(conn);
if (index !== -1) {
this.connections.splice(index, 1);
}
}
// Accepts a new TCP connection and yields all HTTP requests that arrive on
// it. When a connection is accepted, it also creates a new iterator of the
// same kind and adds it to the request multiplexer so that another TCP
// connection can be accepted.
private async *acceptConnAndIterateHttpRequests(
mux: MuxAsyncIterator<ServerRequest>
): AsyncIterableIterator<ServerRequest> {
if (this.closing) return;
// Wait for a new connection.
let conn: Deno.Conn;
try {
conn = await this.listener.accept();
} catch (error) {
if (
error instanceof Deno.errors.BadResource ||
error instanceof Deno.errors.InvalidData ||
error instanceof Deno.errors.UnexpectedEof
) {
return mux.add(this.acceptConnAndIterateHttpRequests(mux));
}
throw error;
}
this.trackConnection(conn);
// Try to accept another connection and add it to the multiplexer.
mux.add(this.acceptConnAndIterateHttpRequests(mux));
// Yield the requests that arrive on the just-accepted connection.
yield* this.iterateHttpRequests(conn);
}
[Symbol.asyncIterator](): AsyncIterableIterator<ServerRequest> {
const mux: MuxAsyncIterator<ServerRequest> = new MuxAsyncIterator();
mux.add(this.acceptConnAndIterateHttpRequests(mux));
return mux.iterate();
}
}
/** Options for creating an HTTP server. */
export type HTTPOptions = Omit<Deno.ListenOptions, "transport">;
/**
* Parse addr from string
*
* const addr = "::1:8000";
* parseAddrFromString(addr);
*
* @param addr Address string
*/
export function _parseAddrFromStr(addr: string): HTTPOptions {
let url: URL;
try {
const host = addr.startsWith(":") ? `0.0.0.0${addr}` : addr;
url = new URL(`http://${host}`);
} catch {
throw new TypeError("Invalid address.");
}
if (
url.username ||
url.password ||
url.pathname != "/" ||
url.search ||
url.hash
) {
throw new TypeError("Invalid address.");
}
return {
hostname: url.hostname,
port: url.port === "" ? 80 : Number(url.port),
};
}
/**
* Create a HTTP server
*
* import { serve } from "https://deno.land/std/http/server.ts";
* const body = "Hello World\n";
* const server = serve({ port: 8000 });
* for await (const req of server) {
* req.respond({ body });
* }
*/
export function serve(addr: string | HTTPOptions): Server {
if (typeof addr === "string") {
addr = _parseAddrFromStr(addr);
}
const listener = Deno.listen(addr);
return new Server(listener);
}
/**
* Start an HTTP server with given options and request handler
*
* const body = "Hello World\n";
* const options = { port: 8000 };
* listenAndServe(options, (req) => {
* req.respond({ body });
* });
*
* @param options Server configuration
* @param handler Request handler
*/
export async function listenAndServe(
addr: string | HTTPOptions,
handler: (req: ServerRequest) => void
): Promise<void> {
const server = serve(addr);
for await (const request of server) {
handler(request);
}
}
/** Options for creating an HTTPS server. */
export type HTTPSOptions = Omit<Deno.ListenTlsOptions, "transport">;
/**
* Create an HTTPS server with given options
*
* const body = "Hello HTTPS";
* const options = {
* hostname: "localhost",
* port: 443,
* certFile: "./path/to/localhost.crt",
* keyFile: "./path/to/localhost.key",
* };
* for await (const req of serveTLS(options)) {
* req.respond({ body });
* }
*
* @param options Server configuration
* @return Async iterable server instance for incoming requests
*/
export function serveTLS(options: HTTPSOptions): Server {
const tlsOptions: Deno.ListenTlsOptions = {
...options,
transport: "tcp",
};
const listener = Deno.listenTls(tlsOptions);
return new Server(listener);
}
/**
* Start an HTTPS server with given options and request handler
*
* const body = "Hello HTTPS";
* const options = {
* hostname: "localhost",
* port: 443,
* certFile: "./path/to/localhost.crt",
* keyFile: "./path/to/localhost.key",
* };
* listenAndServeTLS(options, (req) => {
* req.respond({ body });
* });
*
* @param options Server configuration
* @param handler Request handler
*/
export async function listenAndServeTLS(
options: HTTPSOptions,
handler: (req: ServerRequest) => void
): Promise<void> {
const server = serveTLS(options);
for await (const request of server) {
handler(request);
}
}
/**
* Interface of HTTP server response.
* If body is a Reader, response would be chunked.
* If body is a string, it would be UTF-8 encoded by default.
*/
export interface Response {
status?: number;
headers?: Headers;
body?: Uint8Array | Deno.Reader | string;
trailers?: () => Promise<Headers> | Headers;
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-1/input/textproto/mod.ts | TypeScript | // Based on https://github.com/golang/go/tree/master/src/net/textproto
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// FROM https://github.com/denoland/deno/blob/b34628a26ab0187a827aa4ebe256e23178e25d39/cli/js/web/headers.ts#L9
const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/g;
function str(buf: Uint8Array | null | undefined): string {
if (buf == null) {
return "";
} else {
return decode(buf);
}
}
function charCode(s: string): number {
return s.charCodeAt(0);
}
export class TextProtoReader {
constructor(readonly r: BufReader) {}
constructor(readonly r: BufReader) {}
/** readLine() reads a single line from the TextProtoReader,
* eliding the final \n or \r\n from the returned string.
*/
async readLine(): Promise<string | null> {
const s = await this.readLineSlice();
if (s === null) return null;
return str(s);
}
/** ReadMIMEHeader reads a MIME-style header from r.
* The header is a sequence of possibly continued Key: Value lines
* ending in a blank line.
* The returned map m maps CanonicalMIMEHeaderKey(key) to a
* sequence of values in the same order encountered in the input.
*
* For example, consider this input:
*
* My-Key: Value 1
* Long-Key: Even
* Longer Value
* My-Key: Value 2
*
* Given that input, ReadMIMEHeader returns the map:
*
* map[string][]string{
* "My-Key": {"Value 1", "Value 2"},
* "Long-Key": {"Even Longer Value"},
* }
*/
async readMIMEHeader(): Promise<Headers | null> {
const m = new Headers();
let line: Uint8Array | undefined;
// The first line cannot start with a leading space.
let buf = await this.r.peek(1);
if (buf === null) {
return null;
} else if (buf[0] == charCode(" ") || buf[0] == charCode("\t")) {
line = (await this.readLineSlice()) as Uint8Array;
}
buf = await this.r.peek(1);
if (buf === null) {
throw new Deno.errors.UnexpectedEof();
} else if (buf[0] == charCode(" ") || buf[0] == charCode("\t")) {
throw new Deno.errors.InvalidData(
`malformed MIME header initial line: ${str(line)}`
);
}
while (true) {
const kv = await this.readLineSlice(); // readContinuedLineSlice
if (kv === null) throw new Deno.errors.UnexpectedEof();
if (kv.byteLength === 0) return m;
// Key ends at first colon
let i = kv.indexOf(charCode(":"));
if (i < 0) {
throw new Deno.errors.InvalidData(
`malformed MIME header line: ${str(kv)}`
);
}
//let key = canonicalMIMEHeaderKey(kv.subarray(0, endKey));
const key = str(kv.subarray(0, i));
// As per RFC 7230 field-name is a token,
// tokens consist of one or more chars.
// We could throw `Deno.errors.InvalidData` here,
// but better to be liberal in what we
// accept, so if we get an empty key, skip it.
if (key == "") {
continue;
}
// Skip initial spaces in value.
i++; // skip colon
while (
i < kv.byteLength &&
(kv[i] == charCode(" ") || kv[i] == charCode("\t"))
) {
i++;
}
const value = str(kv.subarray(i)).replace(
invalidHeaderCharRegex,
encodeURI
);
// In case of invalid header we swallow the error
// example: "Audio Mode" => invalid due to space in the key
try {
m.append(key, value);
} catch {
// Pass
}
}
}
async readLineSlice(): Promise<Uint8Array | null> {
// this.closeDot();
let line: Uint8Array | undefined;
while (true) {
const r = await this.r.readLine();
if (r === null) return null;
const { line: l, more } = r;
// Avoid the copy if the first call produced a full line.
if (!line && !more) {
// TODO(ry):
// This skipSpace() is definitely misplaced, but I don't know where it
// comes from nor how to fix it.
if (this.skipSpace(l) === 0) {
return new Uint8Array(0);
}
return l;
}
line = line ? concat(line, l) : l;
if (!more) {
break;
}
}
return line;
}
skipSpace(l: Uint8Array): number {
let n = 0;
for (let i = 0; i < l.length; i++) {
if (l[i] === charCode(" ") || l[i] === charCode("\t")) {
continue;
}
n++;
}
return n;
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-1/output/entry.js | JavaScript | function deferred() {
let methods;
const promise = new Promise((resolve, reject)=>{
methods = {
resolve,
reject
};
});
return Object.assign(promise, methods);
}
class MuxAsyncIterator {
iteratorCount = 0;
yields = [];
throws = [];
signal = deferred();
add(iterator) {
++this.iteratorCount;
this.callIteratorNext(iterator);
}
async callIteratorNext(iterator) {
try {
const { value, done } = await iterator.next();
if (done) --this.iteratorCount;
else this.yields.push({
iterator,
value
});
} catch (e) {
this.throws.push(e);
}
this.signal.resolve();
}
async *iterate() {
while(this.iteratorCount > 0){
await this.signal;
for(let i = 0; i < this.yields.length; i++){
const { iterator, value } = this.yields[i];
yield value;
this.callIteratorNext(iterator);
}
if (this.throws.length) {
for (const e of this.throws)throw e;
this.throws.length = 0;
}
this.yields.length = 0;
this.signal = deferred();
}
}
[Symbol.asyncIterator]() {
return this.iterate();
}
}
function emptyReader() {
return {
read (_) {
return Promise.resolve(null);
}
};
}
function bodyReader(contentLength, r) {
let totalRead = 0;
let finished = false;
async function read(buf) {
if (finished) return null;
let result;
const remaining = contentLength - totalRead;
if (remaining >= buf.byteLength) result = await r.read(buf);
else {
const readBuf = buf.subarray(0, remaining);
result = await r.read(readBuf);
}
if (result !== null) totalRead += result;
finished = totalRead === contentLength;
return result;
}
return {
read
};
}
function chunkedBodyReader(h, r) {
const tp = new TextProtoReader(r);
let finished = false;
const chunks = [];
async function read(buf) {
if (finished) return null;
const [chunk] = chunks;
if (chunk) {
const chunkRemaining = chunk.data.byteLength - chunk.offset;
const readLength = Math.min(chunkRemaining, buf.byteLength);
for(let i = 0; i < readLength; i++)buf[i] = chunk.data[chunk.offset + i];
chunk.offset += readLength;
if (chunk.offset === chunk.data.byteLength) {
chunks.shift();
if (await tp.readLine() === null) throw new Deno.errors.UnexpectedEof();
}
return readLength;
}
const line = await tp.readLine();
if (line === null) throw new Deno.errors.UnexpectedEof();
const [chunkSizeString] = line.split(";");
const chunkSize = parseInt(chunkSizeString, 16);
if (Number.isNaN(chunkSize) || chunkSize < 0) throw new Error("Invalid chunk size");
if (chunkSize > 0) {
if (chunkSize > buf.byteLength) {
let eof = await r.readFull(buf);
if (eof === null) throw new Deno.errors.UnexpectedEof();
const restChunk = new Uint8Array(chunkSize - buf.byteLength);
eof = await r.readFull(restChunk);
if (eof === null) throw new Deno.errors.UnexpectedEof();
else chunks.push({
offset: 0,
data: restChunk
});
return buf.byteLength;
} else {
const bufToFill = buf.subarray(0, chunkSize);
const eof = await r.readFull(bufToFill);
if (eof === null) throw new Deno.errors.UnexpectedEof();
if (await tp.readLine() === null) throw new Deno.errors.UnexpectedEof();
return chunkSize;
}
} else {
assert(chunkSize === 0);
if (await r.readLine() === null) throw new Deno.errors.UnexpectedEof();
await readTrailers(h, r);
finished = true;
return null;
}
}
return {
read
};
}
function isProhibidedForTrailer(key) {
const s = new Set([
"transfer-encoding",
"content-length",
"trailer"
]);
return s.has(key.toLowerCase());
}
async function readTrailers(headers, r) {
const trailers = parseTrailer(headers.get("trailer"));
if (trailers == null) return;
const trailerNames = [
...trailers.keys()
];
const tp = new TextProtoReader(r);
const result = await tp.readMIMEHeader();
if (result == null) throw new Deno.errors.InvalidData("Missing trailer header.");
const undeclared = [
...result.keys()
].filter((k)=>!trailerNames.includes(k));
if (undeclared.length > 0) throw new Deno.errors.InvalidData(`Undeclared trailers: ${Deno.inspect(undeclared)}.`);
for (const [k, v] of result)headers.append(k, v);
const missingTrailers = trailerNames.filter((k)=>!result.has(k));
if (missingTrailers.length > 0) throw new Deno.errors.InvalidData(`Missing trailers: ${Deno.inspect(missingTrailers)}.`);
headers.delete("trailer");
}
function parseTrailer(field) {
if (field == null) return undefined;
const trailerNames = field.split(",").map((v)=>v.trim().toLowerCase());
if (trailerNames.length === 0) throw new Deno.errors.InvalidData("Empty trailer header.");
const prohibited = trailerNames.filter((k)=>isProhibidedForTrailer(k));
if (prohibited.length > 0) throw new Deno.errors.InvalidData(`Prohibited trailer names: ${Deno.inspect(prohibited)}.`);
return new Headers(trailerNames.map((key)=>[
key,
""
]));
}
async function writeChunkedBody(w, r) {
const writer = BufWriter.create(w);
for await (const chunk of Deno.iter(r)){
if (chunk.byteLength <= 0) continue;
const start = encoder.encode(`${chunk.byteLength.toString(16)}\r\n`);
const end = encoder.encode("\r\n");
await writer.write(start);
await writer.write(chunk);
await writer.write(end);
}
const endChunk = encoder.encode("0\r\n\r\n");
await writer.write(endChunk);
}
async function writeTrailers(w, headers, trailers) {
const trailer = headers.get("trailer");
if (trailer === null) throw new TypeError("Missing trailer header.");
const transferEncoding = headers.get("transfer-encoding");
if (transferEncoding === null || !transferEncoding.match(/^chunked/)) throw new TypeError(`Trailers are only allowed for "transfer-encoding: chunked", got "transfer-encoding: ${transferEncoding}".`);
const writer = BufWriter.create(w);
const trailerNames = trailer.split(",").map((s)=>s.trim().toLowerCase());
const prohibitedTrailers = trailerNames.filter((k)=>isProhibidedForTrailer(k));
if (prohibitedTrailers.length > 0) throw new TypeError(`Prohibited trailer names: ${Deno.inspect(prohibitedTrailers)}.`);
const undeclared = [
...trailers.keys()
].filter((k)=>!trailerNames.includes(k));
if (undeclared.length > 0) throw new TypeError(`Undeclared trailers: ${Deno.inspect(undeclared)}.`);
for (const [key, value] of trailers)await writer.write(encoder.encode(`${key}: ${value}\r\n`));
await writer.write(encoder.encode("\r\n"));
await writer.flush();
}
async function writeResponse(w, r) {
const protoMajor = 1;
const protoMinor = 1;
const statusCode = r.status || 200;
const statusText = STATUS_TEXT.get(statusCode);
const writer = BufWriter.create(w);
if (!statusText) throw new Deno.errors.InvalidData("Bad status code");
if (!r.body) r.body = new Uint8Array();
if (typeof r.body === "string") r.body = encoder.encode(r.body);
let out = `HTTP/${protoMajor}.${protoMinor} ${statusCode} ${statusText}\r\n`;
const headers = r.headers ?? new Headers();
if (r.body && !headers.get("content-length")) {
if (r.body instanceof Uint8Array) out += `content-length: ${r.body.byteLength}\r\n`;
else if (!headers.get("transfer-encoding")) out += "transfer-encoding: chunked\r\n";
}
for (const [key, value] of headers)out += `${key}: ${value}\r\n`;
out += `\r\n`;
const header = encoder.encode(out);
const n = await writer.write(header);
assert(n === header.byteLength);
if (r.body instanceof Uint8Array) {
const n = await writer.write(r.body);
assert(n === r.body.byteLength);
} else if (headers.has("content-length")) {
const contentLength = headers.get("content-length");
assert(contentLength != null);
const bodyLength = parseInt(contentLength);
const n = await Deno.copy(r.body, writer);
assert(n === bodyLength);
} else await writeChunkedBody(writer, r.body);
if (r.trailers) {
const t = await r.trailers();
await writeTrailers(writer, headers, t);
}
await writer.flush();
}
class ServerRequest {
url;
method;
proto;
protoMinor;
protoMajor;
headers;
conn;
r;
w;
done = deferred();
_contentLength = undefined;
get contentLength() {
if (this._contentLength === undefined) {
const cl = this.headers.get("content-length");
if (cl) {
this._contentLength = parseInt(cl);
if (Number.isNaN(this._contentLength)) this._contentLength = null;
} else this._contentLength = null;
}
return this._contentLength;
}
_body = null;
get body() {
if (!this._body) {
if (this.contentLength != null) this._body = bodyReader(this.contentLength, this.r);
else {
const transferEncoding = this.headers.get("transfer-encoding");
if (transferEncoding != null) {
const parts = transferEncoding.split(",").map((e)=>e.trim().toLowerCase());
assert(parts.includes("chunked"), 'transfer-encoding must include "chunked" if content-length is not set');
this._body = chunkedBodyReader(this.headers, this.r);
} else this._body = emptyReader();
}
}
return this._body;
}
async respond(r) {
let err;
try {
await writeResponse(this.w, r);
} catch (e) {
try {
this.conn.close();
} catch {}
err = e;
}
this.done.resolve(err);
if (err) throw err;
}
finalized = false;
async finalize() {
if (this.finalized) return;
const body = this.body;
const buf = new Uint8Array(1024);
while(await body.read(buf) !== null);
this.finalized = true;
}
}
function parseHTTPVersion(vers) {
switch(vers){
case "HTTP/1.1":
return [
1,
1
];
case "HTTP/1.0":
return [
1,
0
];
default:
{
const Big = 1000000;
if (!vers.startsWith("HTTP/")) break;
const dot = vers.indexOf(".");
if (dot < 0) break;
const majorStr = vers.substring(vers.indexOf("/") + 1, dot);
const major = Number(majorStr);
if (!Number.isInteger(major) || major < 0 || major > Big) break;
const minorStr = vers.substring(dot + 1);
const minor = Number(minorStr);
if (!Number.isInteger(minor) || minor < 0 || minor > Big) break;
return [
major,
minor
];
}
}
throw new Error(`malformed HTTP version ${vers}`);
}
async function readRequest(conn, bufr) {
const tp = new TextProtoReader(bufr);
const firstLine = await tp.readLine();
if (firstLine === null) return null;
const headers = await tp.readMIMEHeader();
if (headers === null) throw new Deno.errors.UnexpectedEof();
const req = new ServerRequest();
req.conn = conn;
req.r = bufr;
[req.method, req.url, req.proto] = firstLine.split(" ", 3);
[req.protoMinor, req.protoMajor] = parseHTTPVersion(req.proto);
req.headers = headers;
fixLength(req);
return req;
}
class Server {
listener;
closing;
connections;
constructor(listener){
this.listener = listener;
this.closing = false;
this.connections = [];
}
close() {
this.closing = true;
this.listener.close();
for (const conn of this.connections)try {
conn.close();
} catch (e) {
if (!(e instanceof Deno.errors.BadResource)) throw e;
}
}
async *iterateHttpRequests(conn) {
const reader = new BufReader(conn);
const writer = new BufWriter(conn);
while(!this.closing){
let request;
try {
request = await readRequest(conn, reader);
} catch (error) {
if (error instanceof Deno.errors.InvalidData || error instanceof Deno.errors.UnexpectedEof) await writeResponse(writer, {
status: 400,
body: encode(`${error.message}\r\n\r\n`)
});
break;
}
if (request === null) break;
request.w = writer;
yield request;
const responseError = await request.done;
if (responseError) {
this.untrackConnection(request.conn);
return;
}
await request.finalize();
}
this.untrackConnection(conn);
try {
conn.close();
} catch (e) {}
}
trackConnection(conn) {
this.connections.push(conn);
}
untrackConnection(conn) {
const index = this.connections.indexOf(conn);
if (index !== -1) this.connections.splice(index, 1);
}
async *acceptConnAndIterateHttpRequests(mux) {
if (this.closing) return;
let conn;
try {
conn = await this.listener.accept();
} catch (error) {
if (error instanceof Deno.errors.BadResource || error instanceof Deno.errors.InvalidData || error instanceof Deno.errors.UnexpectedEof) return mux.add(this.acceptConnAndIterateHttpRequests(mux));
throw error;
}
this.trackConnection(conn);
mux.add(this.acceptConnAndIterateHttpRequests(mux));
yield* this.iterateHttpRequests(conn);
}
[Symbol.asyncIterator]() {
const mux = new MuxAsyncIterator();
mux.add(this.acceptConnAndIterateHttpRequests(mux));
return mux.iterate();
}
}
function _parseAddrFromStr(addr) {
let url;
try {
const host = addr.startsWith(":") ? `0.0.0.0${addr}` : addr;
url = new URL(`http://${host}`);
} catch {
throw new TypeError("Invalid address.");
}
if (url.username || url.password || url.pathname != "/" || url.search || url.hash) throw new TypeError("Invalid address.");
return {
hostname: url.hostname,
port: url.port === "" ? 80 : Number(url.port)
};
}
function serve(addr) {
if (typeof addr === "string") addr = _parseAddrFromStr(addr);
const listener = Deno.listen(addr);
return new Server(listener);
}
async function listenAndServe(addr, handler) {
const server = serve(addr);
for await (const request of server)handler(request);
}
function fixLength(req) {
const contentLength = req.headers.get("Content-Length");
if (contentLength) {
const arrClen = contentLength.split(",");
if (arrClen.length > 1) {
const distinct = [
...new Set(arrClen.map((e)=>e.trim()))
];
if (distinct.length > 1) throw Error("cannot contain multiple Content-Length headers");
else req.headers.set("Content-Length", distinct[0]);
}
const c = req.headers.get("Content-Length");
if (req.method === "HEAD" && c && c !== "0") throw Error("http: method cannot contain a Content-Length");
if (c && req.headers.has("transfer-encoding")) throw new Error("http: Transfer-Encoding and Content-Length cannot be send together");
}
}
listenAndServe({
port: 8080
}, async (req)=>{});
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-2/input/async/deferred.ts | TypeScript | export function deferred<T>(): Deferred<T> {}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-2/input/async/mod.ts | TypeScript | // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
export * from "./deferred";
export * from "./mux_async_iterator";
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-2/input/async/mux_async_iterator.ts | TypeScript | import { deferred } from "./deferred";
export class MuxAsyncIterator<T> implements AsyncIterable<T> {
private signal = deferred();
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-2/input/entry.js | JavaScript | import { listenAndServe } from "./http/server";
listenAndServe({ port: 8080 }, async (req) => {});
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-2/input/http/_io.ts | TypeScript | import { ServerRequest } from "./server";
console.log(ServerRequest);
export function emptyReader(): Deno.Reader {}
export function bodyReader(contentLength: number, r: BufReader): Deno.Reader {}
export function chunkedBodyReader(h: Headers, r: BufReader): Deno.Reader {}
export async function readTrailers(
headers: Headers,
r: BufReader
): Promise<void> {}
export async function writeChunkedBody(
w: Deno.Writer,
r: Deno.Reader
): Promise<void> {}
/** Write trailer headers to writer. It should mostly should be called after
* `writeResponse()`. */
export async function writeTrailers(
w: Deno.Writer,
headers: Headers,
trailers: Headers
): Promise<void> {}
export async function writeResponse(
w: Deno.Writer,
r: Response
): Promise<void> {}
/**
* ParseHTTPVersion parses a HTTP version string.
* "HTTP/1.0" returns (1, 0).
* Ported from https://github.com/golang/go/blob/f5c43b9/src/net/http/request.go#L766-L792
*/
export function parseHTTPVersion(vers: string): [number, number] {}
export async function readRequest(
conn: Deno.Conn,
bufr: BufReader
): Promise<ServerRequest | null> {}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-2/input/http/server.ts | TypeScript | // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
import { deferred, Deferred, MuxAsyncIterator } from "../async/mod";
import { writeResponse, readRequest } from "./_io";
console.log(deferred, writeResponse, readRequest, MuxAsyncIterator);
export class ServerRequest {
done: Deferred<Error | undefined> = deferred();
}
export class Server implements AsyncIterable<ServerRequest> {}
export type HTTPOptions = Omit<Deno.ListenOptions, "transport">;
export function _parseAddrFromStr(addr: string): HTTPOptions {}
export function serve(addr: string | HTTPOptions): Server {}
export async function listenAndServe(
addr: string | HTTPOptions,
handler: (req: ServerRequest) => void
): Promise<void> {}
export type HTTPSOptions = Omit<Deno.ListenTlsOptions, "transport">;
export function serveTLS(options: HTTPSOptions): Server {}
export async function listenAndServeTLS(
options: HTTPSOptions,
handler: (req: ServerRequest) => void
): Promise<void> {}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-2/input/textproto/mod.ts | TypeScript | // Based on https://github.com/golang/go/tree/master/src/net/textproto
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// FROM https://github.com/denoland/deno/blob/b34628a26ab0187a827aa4ebe256e23178e25d39/cli/js/web/headers.ts#L9
export class TextProtoReader {}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-2/output/entry.js | JavaScript | function _define_property(obj, key, value) {
if (key in obj) {
Object.defineProperty(obj, key, {
value: value,
enumerable: true,
configurable: true,
writable: true
});
} else {
obj[key] = value;
}
return obj;
}
function deferred() {}
class MuxAsyncIterator {
constructor(){
_define_property(this, "signal", deferred());
}
}
class ServerRequest {
constructor(){
_define_property(this, "done", deferred());
}
}
console.log(ServerRequest);
async function writeResponse(w, r) {}
async function readRequest(conn, bufr) {}
console.log(deferred, writeResponse, readRequest, MuxAsyncIterator);
async function listenAndServe(addr, handler) {}
listenAndServe({
port: 8080
}, async (req)=>{});
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-3/input/async/deferred.ts | TypeScript | export function deferred<T>(): Deferred<T> {}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-3/input/async/mod.ts | TypeScript | // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
export * from "./deferred";
export * from "./mux_async_iterator";
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-3/input/async/mux_async_iterator.ts | TypeScript | import { deferred } from "./deferred";
export class MuxAsyncIterator<T> implements AsyncIterable<T> {
private signal = deferred();
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-3/input/entry.js | JavaScript | import { deferred, MuxAsyncIterator } from "./async/mod";
console.log(deferred, MuxAsyncIterator);
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-3/output/entry.js | JavaScript | function _define_property(obj, key, value) {
if (key in obj) {
Object.defineProperty(obj, key, {
value: value,
enumerable: true,
configurable: true,
writable: true
});
} else {
obj[key] = value;
}
return obj;
}
function deferred() {}
class MuxAsyncIterator {
constructor(){
_define_property(this, "signal", deferred());
}
}
console.log(deferred, MuxAsyncIterator);
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-4/input/async/deferred.ts | TypeScript | export function deferred<T>(): Deferred<T> {}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-4/input/async/mod.ts | TypeScript | export { deferred } from "./deferred";
export { MuxAsyncIterator } from "./mux_async_iterator";
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-4/input/async/mux_async_iterator.ts | TypeScript | import { deferred } from "./deferred";
export class MuxAsyncIterator<T> implements AsyncIterable<T> {
private signal = deferred();
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-4/input/entry.js | JavaScript | import { deferred, MuxAsyncIterator } from "./async/mod";
console.log(deferred, MuxAsyncIterator);
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-4/output/entry.js | JavaScript | function _define_property(obj, key, value) {
if (key in obj) {
Object.defineProperty(obj, key, {
value: value,
enumerable: true,
configurable: true,
writable: true
});
} else {
obj[key] = value;
}
return obj;
}
function deferred() {}
class MuxAsyncIterator {
constructor(){
_define_property(this, "signal", deferred());
}
}
console.log(deferred, MuxAsyncIterator);
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-5/input/async/deferred.ts | TypeScript | export function deferred<T>(): Deferred<T> {}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-5/input/async/mod.ts | TypeScript | export { deferred } from "./deferred";
export { MuxAsyncIterator } from "./mux_async_iterator";
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-5/input/async/mux_async_iterator.ts | TypeScript | import { deferred } from "./deferred";
export function MuxAsyncIterator() {}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-5/input/entry.js | JavaScript | import { deferred, MuxAsyncIterator } from "./async/mod";
console.log(deferred, MuxAsyncIterator);
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-001/simple-5/output/entry.js | JavaScript | function deferred() {}
function MuxAsyncIterator() {}
console.log(deferred, MuxAsyncIterator);
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-002/.full/input/async/deferred.ts | TypeScript | // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
// TODO(ry) It'd be better to make Deferred a class that inherits from
// Promise, rather than an interface. This is possible in ES2016, however
// typescript produces broken code when targeting ES5 code.
// See https://github.com/Microsoft/TypeScript/issues/15202
// At the time of writing, the github issue is closed but the problem remains.
export interface Deferred<T> extends Promise<T> {
resolve: (value?: T | PromiseLike<T>) => void;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
reject: (reason?: any) => void;
}
/** Creates a Promise with the `reject` and `resolve` functions
* placed as methods on the promise object itself. It allows you to do:
*
* const p = deferred<number>();
* // ...
* p.resolve(42);
*/
export function deferred<T>(): Deferred<T> {
let methods;
const promise = new Promise<T>((resolve, reject): void => {
methods = { resolve, reject };
});
return Object.assign(promise, methods) as Deferred<T>;
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-002/.full/input/async/delay.ts | TypeScript | // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
/* Resolves after the given number of milliseconds. */
export function delay(ms: number): Promise<void> {
return new Promise((res): number =>
setTimeout((): void => {
res();
}, ms)
);
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-002/.full/input/async/mod.ts | TypeScript | // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
export * from "./deferred";
export * from "./delay";
export * from "./mux_async_iterator";
export * from "./pool";
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-002/.full/input/async/mux_async_iterator.ts | TypeScript | // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
import { Deferred, deferred } from "./deferred.ts";
interface TaggedYieldedValue<T> {
iterator: AsyncIterableIterator<T>;
value: T;
}
/** The MuxAsyncIterator class multiplexes multiple async iterators into a
* single stream. It currently makes an assumption:
* - The final result (the value returned and not yielded from the iterator)
* does not matter; if there is any, it is discarded.
*/
export class MuxAsyncIterator<T> implements AsyncIterable<T> {
private iteratorCount = 0;
private yields: Array<TaggedYieldedValue<T>> = [];
// eslint-disable-next-line @typescript-eslint/no-explicit-any
private throws: any[] = [];
private signal: Deferred<void> = deferred();
add(iterator: AsyncIterableIterator<T>): void {
++this.iteratorCount;
this.callIteratorNext(iterator);
}
private async callIteratorNext(
iterator: AsyncIterableIterator<T>
): Promise<void> {
try {
const { value, done } = await iterator.next();
if (done) {
--this.iteratorCount;
} else {
this.yields.push({ iterator, value });
}
} catch (e) {
this.throws.push(e);
}
this.signal.resolve();
}
async *iterate(): AsyncIterableIterator<T> {
while (this.iteratorCount > 0) {
// Sleep until any of the wrapped iterators yields.
await this.signal;
// Note that while we're looping over `yields`, new items may be added.
for (let i = 0; i < this.yields.length; i++) {
const { iterator, value } = this.yields[i];
yield value;
this.callIteratorNext(iterator);
}
if (this.throws.length) {
for (const e of this.throws) {
throw e;
}
this.throws.length = 0;
}
// Clear the `yields` list and reset the `signal` promise.
this.yields.length = 0;
this.signal = deferred();
}
}
[Symbol.asyncIterator](): AsyncIterableIterator<T> {
return this.iterate();
}
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-002/.full/input/async/pool.ts | TypeScript | // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
/**
* pooledMap transforms values from an (async) iterable into another async
* iterable. The transforms are done concurrently, with a max concurrency
* defined by the poolLimit.
*
* @param poolLimit The maximum count of items being processed concurrently.
* @param array The input array for mapping.
* @param iteratorFn The function to call for every item of the array.
*/
export function pooledMap<T, R>(
poolLimit: number,
array: Iterable<T> | AsyncIterable<T>,
iteratorFn: (data: T) => Promise<R>
): AsyncIterableIterator<R> {
// Create the async iterable that is returned from this function.
const res = new TransformStream<Promise<R>, R>({
async transform(
p: Promise<R>,
controller: TransformStreamDefaultController<R>
): Promise<void> {
controller.enqueue(await p);
},
});
// Start processing items from the iterator
(async (): Promise<void> => {
const writer = res.writable.getWriter();
const executing: Array<Promise<unknown>> = [];
for await (const item of array) {
const p = Promise.resolve().then(() => iteratorFn(item));
writer.write(p);
const e: Promise<unknown> = p.then(() =>
executing.splice(executing.indexOf(e), 1)
);
executing.push(e);
if (executing.length >= poolLimit) {
await Promise.race(executing);
}
}
// Wait until all ongoing events have processed, then close the writer.
await Promise.all(executing);
writer.close();
})();
return res.readable.getIterator();
}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-002/.full/input/entry.js | JavaScript | export * from "./async/mod";
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/deno-002/.full/output/entry.js | JavaScript | // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
/* Resolves after the given number of milliseconds. */ export function delay(ms) {
return new Promise((res)=>setTimeout(()=>{
res();
}, ms)
);
}
function deferred1() {
let methods;
const promise = new Promise((resolve, reject)=>{
});
return Object.assign(promise, methods);
}
var tmp = Symbol.asyncIterator;
/** The MuxAsyncIterator class multiplexes multiple async iterators into a
* single stream. It currently makes an assumption:
* - The final result (the value returned and not yielded from the iterator)
* does not matter; if there is any, it is discarded.
*/ export class MuxAsyncIterator {
add(iterator) {
++this.iteratorCount;
this.callIteratorNext(iterator);
}
async callIteratorNext(iterator) {
try {
const { value , done } = await iterator.next();
if (done) --this.iteratorCount;
else this.yields.push({
iterator,
value
});
} catch (e) {
this.throws.push(e);
}
this.signal.resolve();
}
async *iterate() {
while(this.iteratorCount > 0){
// Sleep until any of the wrapped iterators yields.
await this.signal;
// Note that while we're looping over `yields`, new items may be added.
for(let i = 0; i < this.yields.length; i++){
const { iterator , value } = this.yields[i];
yield value;
this.callIteratorNext(iterator);
}
if (this.throws.length) {
for (const e of this.throws)throw e;
this.throws.length = 0;
}
// Clear the `yields` list and reset the `signal` promise.
this.yields.length = 0;
this.signal = deferred1();
}
}
[tmp]() {
return this.iterate();
}
constructor(){
this.iteratorCount = 0;
this.yields = [];
this.throws = [];
this.signal = deferred1();
}
}
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
/**
* pooledMap transforms values from an (async) iterable into another async
* iterable. The transforms are done concurrently, with a max concurrency
* defined by the poolLimit.
*
* @param poolLimit The maximum count of items being processed concurrently.
* @param array The input array for mapping.
* @param iteratorFn The function to call for every item of the array.
*/ export function pooledMap(poolLimit, array, iteratorFn) {
// Create the async iterable that is returned from this function.
const res = new TransformStream({
async transform (p, controller) {
controller.enqueue(await p);
}
});
// Start processing items from the iterator
(async ()=>{
const writer = res.writable.getWriter();
const executing = [];
for await (const item of array){
const p = Promise.resolve().then(()=>iteratorFn(item)
);
writer.write(p);
const e = p.then(()=>executing.splice(executing.indexOf(e), 1)
);
executing.push(e);
if (executing.length >= poolLimit) await Promise.race(executing);
}
// Wait until all ongoing events have processed, then close the writer.
await Promise.all(executing);
writer.close();
})();
return res.readable.getIterator();
}
export { deferred1 as deferred };
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/drop-unused/export/default-mixed/input/a.js | JavaScript | export const foo = 1;
export const bar = 2;
export const baz = 3;
export default foo;
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/drop-unused/export/default-mixed/input/entry.js | JavaScript | import foo from "./a";
export { foo };
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/drop-unused/export/default-mixed/output/entry.js | JavaScript | const foo = 1;
export { foo as foo };
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/drop-unused/export/named/input/entry.js | JavaScript | const foo = "a",
bar = "v";
export { foo };
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/drop-unused/export/named/output/entry.js | JavaScript | const foo = "a";
export { foo as foo };
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/drop-unused/side-effect/import-multi/input/a.js | JavaScript | export function a() {}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/drop-unused/side-effect/import-multi/input/b.js | JavaScript | export function b() {}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/drop-unused/side-effect/import-multi/input/entry.js | JavaScript | import { a } from "./a";
import { b } from "./b";
console.log(a, b);
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/drop-unused/side-effect/import-multi/output/entry.js | JavaScript | function a() {}
function b() {}
console.log(a, b);
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/drop-unused/side-effect/simple/input/entry.js | JavaScript | const a = 1,
b = 2,
c = a;
console.log(c);
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/drop-unused/side-effect/simple/output/entry.js | JavaScript | const a = 1, c = a;
console.log(c);
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/dynamic-import/namespace/dynamic-key/input/entry.js | JavaScript | function foo() {
return Math.random() > 0.5 ? "a" : "b";
}
import(`./lib/${foo()}`);
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/dynamic-import/namespace/dynamic-key/input/lib/a.js | JavaScript | export default "a";
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/dynamic-import/namespace/dynamic-key/input/lib/b.js | JavaScript | export default "b";
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/dynamic-import/namespace/dynamic-key/input/lib/nop.js | JavaScript | export default "unused";
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/dynamic-import/namespace/dynamic-key/output/entry.js | JavaScript | function foo() {
return Math.random() > 0.5 ? "a" : "b";
}
import(`./lib/${foo()}`);
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/dynmaic-imports/issue-1112/simple/input/a.js | JavaScript | export const a = 1;
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/dynmaic-imports/issue-1112/simple/input/entry.js | JavaScript | const a = await import("./a.ts");
console.log(a);
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/dynmaic-imports/issue-1112/simple/output/entry.js | JavaScript | const a = await import("./a.ts");
console.log(a);
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export-all/input/a.js | JavaScript | export const DEBUG = true;
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export-all/input/b.js | JavaScript | export class B {}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export-all/input/entry.js | JavaScript | export * from "./a";
export * from "./b";
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export-all/output/entry.js | JavaScript | const DEBUG = true;
export { DEBUG as DEBUG };
class B {
}
export { B as B };
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export-star-namespace/issue-1109/input/a.ts | TypeScript | export const a = 1;
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export-star-namespace/issue-1109/input/entry.js | JavaScript | export * as a from "./a";
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export-star-namespace/issue-1109/output/entry.js | JavaScript | var a = 1;
const mod = {
a: a
};
export { mod as a };
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export/all-1/multiple/input/a.js | JavaScript | export const [a, b, c] = [1, 2, 3];
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export/all-1/multiple/input/b.js | JavaScript | export const [d, e, f] = [4, 5, 6];
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export/all-1/multiple/input/entry.js | JavaScript | export * from "./a";
export * from "./b";
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export/all-1/multiple/output/entry.js | JavaScript | const [a, b, c] = [
1,
2,
3
];
export { a as a, b as b, c as c };
const [d, e, f] = [
4,
5,
6
];
export { d as d, e as e, f as f };
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export/all-2/input/a.js | JavaScript | import { Root } from "./c";
export class A extends Root {}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export/all-2/input/b.js | JavaScript | import { Root } from "./c";
export class B extends Root {}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export/all-2/input/c.js | JavaScript | export class Root {}
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export/all-2/input/entry.js | JavaScript | export * from "./a";
export * from "./b";
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export/all-2/output/entry.js | JavaScript | class Root {
}
class A extends Root {
}
export { A as A };
class B extends Root {
}
export { B as B };
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export/all-nested-1/input/a.js | JavaScript | export * from "./b";
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export/all-nested-1/input/b.js | JavaScript | export * from "./c";
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export/all-nested-1/input/c.js | JavaScript | export const a = 1;
export const b = 2;
export const c = 3;
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export/all-nested-1/input/entry.js | JavaScript | export * from "./a";
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export/all-nested-1/output/entry.js | JavaScript | const a = 1;
const b = 2;
const c = 3;
export { a as a };
export { b as b };
export { c as c };
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export/complex-1/input/a.js | JavaScript | import { b } from "./b";
export const a = "1";
console.log(b);
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export/complex-1/input/b.js | JavaScript | export const b = "1";
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export/complex-1/input/entry.js | JavaScript | export { a } from "./a";
export { b } from "./b";
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export/complex-1/output/entry.js | JavaScript | const b = "1";
const a = "1";
console.log(b);
export { a as a };
export { b as b };
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export/issue-1111/simple/input/a.js | JavaScript | export const a = "a";
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University | |
crates/swc_node_bundler/tests/pass/export/issue-1111/simple/input/d.js | JavaScript | import { a } from "./a";
export const d = { a };
| willcrichton/ilc-swc | 1 | Rust | willcrichton | Will Crichton | Brown University |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.