File size: 7,069 Bytes
b91e262 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 | import { InvariantError } from '../../shared/lib/invariant-error'
import { createAtomicTimerGroup } from './app-render-scheduling'
import {
DANGEROUSLY_runPendingImmediatesAfterCurrentTask,
expectNoPendingImmediates,
} from '../node-environment-extensions/fast-set-immediate.external'
/**
* This is a utility function to make scheduling sequential tasks that run back to back easier.
* We schedule on the same queue (setTimeout) at the same time to ensure no other events can sneak in between.
*/
export function prerenderAndAbortInSequentialTasks<R>(
prerender: () => Promise<R>,
abort: () => void
): Promise<R> {
if (process.env.NEXT_RUNTIME === 'edge') {
throw new InvariantError(
'`prerenderAndAbortInSequentialTasks` should not be called in edge runtime.'
)
} else {
return new Promise((resolve, reject) => {
const scheduleTimeout = createAtomicTimerGroup()
let pendingResult: Promise<R>
scheduleTimeout(() => {
try {
DANGEROUSLY_runPendingImmediatesAfterCurrentTask()
pendingResult = prerender()
pendingResult.catch(() => {})
} catch (err) {
reject(err)
}
})
scheduleTimeout(() => {
try {
expectNoPendingImmediates()
abort()
resolve(pendingResult)
} catch (err) {
reject(err)
}
})
})
}
}
/**
* Like `prerenderAndAbortInSequentialTasks`, but with another task between `prerender` and `abort`,
* which allows us to move a part of the render into a separate task.
*/
export function prerenderAndAbortInSequentialTasksWithStages<R>(
prerender: () => Promise<R>,
advanceStage: () => void,
abort: () => void
): Promise<R> {
if (process.env.NEXT_RUNTIME === 'edge') {
throw new InvariantError(
'`prerenderAndAbortInSequentialTasksWithStages` should not be called in edge runtime.'
)
} else {
return new Promise((resolve, reject) => {
const scheduleTimeout = createAtomicTimerGroup()
let pendingResult: Promise<R>
scheduleTimeout(() => {
try {
DANGEROUSLY_runPendingImmediatesAfterCurrentTask()
pendingResult = prerender()
pendingResult.catch(() => {})
} catch (err) {
reject(err)
}
})
scheduleTimeout(() => {
try {
DANGEROUSLY_runPendingImmediatesAfterCurrentTask()
advanceStage()
} catch (err) {
reject(err)
}
})
scheduleTimeout(() => {
try {
expectNoPendingImmediates()
abort()
resolve(pendingResult)
} catch (err) {
reject(err)
}
})
})
}
}
// React's RSC prerender function will emit an incomplete flight stream when using `prerender`. If the connection
// closes then whatever hanging chunks exist will be errored. This is because prerender (an experimental feature)
// has not yet implemented a concept of resume. For now we will simulate a paused connection by wrapping the stream
// in one that doesn't close even when the underlying is complete.
export class ReactServerResult {
private _stream: null | ReadableStream<Uint8Array>
constructor(stream: ReadableStream<Uint8Array>) {
this._stream = stream
}
tee() {
if (this._stream === null) {
throw new Error(
'Cannot tee a ReactServerResult that has already been consumed'
)
}
const tee = this._stream.tee()
this._stream = tee[0]
return tee[1]
}
consume() {
if (this._stream === null) {
throw new Error(
'Cannot consume a ReactServerResult that has already been consumed'
)
}
const stream = this._stream
this._stream = null
return stream
}
}
export type ReactServerPrerenderResolveToType = {
prelude: ReadableStream<Uint8Array>
}
export async function createReactServerPrerenderResult(
underlying: Promise<ReactServerPrerenderResolveToType>
): Promise<ReactServerPrerenderResult> {
const chunks: Array<Uint8Array> = []
const { prelude } = await underlying
const reader = prelude.getReader()
while (true) {
const { done, value } = await reader.read()
if (done) {
return new ReactServerPrerenderResult(chunks)
} else {
chunks.push(value)
}
}
}
export async function createReactServerPrerenderResultFromRender(
underlying: ReadableStream<Uint8Array>
): Promise<ReactServerPrerenderResult> {
const chunks: Array<Uint8Array> = []
const reader = underlying.getReader()
while (true) {
const { done, value } = await reader.read()
if (done) {
break
} else {
chunks.push(value)
}
}
return new ReactServerPrerenderResult(chunks)
}
export class ReactServerPrerenderResult {
private _chunks: null | Array<Uint8Array>
private assertChunks(expression: string): Array<Uint8Array> {
if (this._chunks === null) {
throw new InvariantError(
`Cannot \`${expression}\` on a ReactServerPrerenderResult that has already been consumed.`
)
}
return this._chunks
}
private consumeChunks(expression: string): Array<Uint8Array> {
const chunks = this.assertChunks(expression)
this.consume()
return chunks
}
consume(): void {
this._chunks = null
}
constructor(chunks: Array<Uint8Array>) {
this._chunks = chunks
}
asUnclosingStream(): ReadableStream<Uint8Array> {
const chunks = this.assertChunks('asUnclosingStream()')
return createUnclosingStream(chunks)
}
consumeAsUnclosingStream(): ReadableStream<Uint8Array> {
const chunks = this.consumeChunks('consumeAsUnclosingStream()')
return createUnclosingStream(chunks)
}
asStream(): ReadableStream<Uint8Array> {
const chunks = this.assertChunks('asStream()')
return createClosingStream(chunks)
}
consumeAsStream(): ReadableStream<Uint8Array> {
const chunks = this.consumeChunks('consumeAsStream()')
return createClosingStream(chunks)
}
}
function createUnclosingStream(
chunks: Array<Uint8Array>
): ReadableStream<Uint8Array> {
let i = 0
return new ReadableStream({
async pull(controller) {
if (i < chunks.length) {
controller.enqueue(chunks[i++])
}
// we intentionally keep the stream open. The consumer will clear
// out chunks once finished and the remaining memory will be GC'd
// when this object goes out of scope
},
})
}
function createClosingStream(
chunks: Array<Uint8Array>
): ReadableStream<Uint8Array> {
let i = 0
return new ReadableStream({
async pull(controller) {
if (i < chunks.length) {
controller.enqueue(chunks[i++])
} else {
controller.close()
}
},
})
}
export async function processPrelude(
unprocessedPrelude: ReadableStream<Uint8Array>
) {
const [prelude, peek] = unprocessedPrelude.tee()
const reader = peek.getReader()
const firstResult = await reader.read()
reader.cancel()
const preludeIsEmpty = firstResult.done === true
return { prelude, preludeIsEmpty }
}
|