This commit is contained in:
Lukian 2023-06-20 15:25:19 +02:00
parent 13ec9babde
commit 68f4b60012
1429 changed files with 2481 additions and 272836 deletions

View file

@ -3,10 +3,10 @@
const Readable = require('./readable')
const {
InvalidArgumentError,
RequestAbortedError,
ResponseStatusCodeError
RequestAbortedError
} = require('../core/errors')
const util = require('../core/util')
const { getResolveErrorBodyCallback } = require('./util')
const { AsyncResource } = require('async_hooks')
const { addSignal, removeSignal } = require('./abort-signal')
@ -16,13 +16,17 @@ class RequestHandler extends AsyncResource {
throw new InvalidArgumentError('invalid opts')
}
const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError } = opts
const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError, highWaterMark } = opts
try {
if (typeof callback !== 'function') {
throw new InvalidArgumentError('invalid callback')
}
if (highWaterMark && (typeof highWaterMark !== 'number' || highWaterMark < 0)) {
throw new InvalidArgumentError('invalid highWaterMark')
}
if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
}
@ -53,6 +57,7 @@ class RequestHandler extends AsyncResource {
this.context = null
this.onInfo = onInfo || null
this.throwOnError = throwOnError
this.highWaterMark = highWaterMark
if (util.isStream(body)) {
body.on('error', (err) => {
@ -73,40 +78,39 @@ class RequestHandler extends AsyncResource {
}
onHeaders (statusCode, rawHeaders, resume, statusMessage) {
const { callback, opaque, abort, context } = this
const { callback, opaque, abort, context, responseHeaders, highWaterMark } = this
const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
if (statusCode < 200) {
if (this.onInfo) {
const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
this.onInfo({ statusCode, headers })
}
return
}
const parsedHeaders = util.parseHeaders(rawHeaders)
const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers
const contentType = parsedHeaders['content-type']
const body = new Readable(resume, abort, contentType)
const body = new Readable({ resume, abort, contentType, highWaterMark })
this.callback = null
this.res = body
const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
if (callback !== null) {
if (this.throwOnError && statusCode >= 400) {
this.runInAsyncScope(getResolveErrorBodyCallback, null,
{ callback, body, contentType, statusCode, statusMessage, headers }
)
return
} else {
this.runInAsyncScope(callback, null, null, {
statusCode,
headers,
trailers: this.trailers,
opaque,
body,
context
})
}
this.runInAsyncScope(callback, null, null, {
statusCode,
headers,
trailers: this.trailers,
opaque,
body,
context
})
}
}
@ -153,33 +157,6 @@ class RequestHandler extends AsyncResource {
}
}
async function getResolveErrorBodyCallback ({ callback, body, contentType, statusCode, statusMessage, headers }) {
if (statusCode === 204 || !contentType) {
body.dump()
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers))
return
}
try {
if (contentType.startsWith('application/json')) {
const payload = await body.json()
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload))
return
}
if (contentType.startsWith('text/')) {
const payload = await body.text()
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload))
return
}
} catch (err) {
// Process in a fallback if error
}
body.dump()
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers))
}
function request (opts, callback) {
if (callback === undefined) {
return new Promise((resolve, reject) => {

View file

@ -1,12 +1,13 @@
'use strict'
const { finished } = require('stream')
const { finished, PassThrough } = require('stream')
const {
InvalidArgumentError,
InvalidReturnValueError,
RequestAbortedError
} = require('../core/errors')
const util = require('../core/util')
const { getResolveErrorBodyCallback } = require('./util')
const { AsyncResource } = require('async_hooks')
const { addSignal, removeSignal } = require('./abort-signal')
@ -16,7 +17,7 @@ class StreamHandler extends AsyncResource {
throw new InvalidArgumentError('invalid opts')
}
const { signal, method, opaque, body, onInfo, responseHeaders } = opts
const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError } = opts
try {
if (typeof callback !== 'function') {
@ -57,6 +58,7 @@ class StreamHandler extends AsyncResource {
this.trailers = null
this.body = body
this.onInfo = onInfo || null
this.throwOnError = throwOnError || false
if (util.isStream(body)) {
body.on('error', (err) => {
@ -76,52 +78,67 @@ class StreamHandler extends AsyncResource {
this.context = context
}
onHeaders (statusCode, rawHeaders, resume) {
const { factory, opaque, context } = this
onHeaders (statusCode, rawHeaders, resume, statusMessage) {
const { factory, opaque, context, callback, responseHeaders } = this
const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
if (statusCode < 200) {
if (this.onInfo) {
const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
this.onInfo({ statusCode, headers })
}
return
}
this.factory = null
const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
const res = this.runInAsyncScope(factory, null, {
statusCode,
headers,
opaque,
context
})
if (
!res ||
typeof res.write !== 'function' ||
typeof res.end !== 'function' ||
typeof res.on !== 'function'
) {
throw new InvalidReturnValueError('expected Writable')
let res
if (this.throwOnError && statusCode >= 400) {
const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers
const contentType = parsedHeaders['content-type']
res = new PassThrough()
this.callback = null
this.runInAsyncScope(getResolveErrorBodyCallback, null,
{ callback, body: res, contentType, statusCode, statusMessage, headers }
)
} else {
res = this.runInAsyncScope(factory, null, {
statusCode,
headers,
opaque,
context
})
if (
!res ||
typeof res.write !== 'function' ||
typeof res.end !== 'function' ||
typeof res.on !== 'function'
) {
throw new InvalidReturnValueError('expected Writable')
}
// TODO: Avoid finished. It registers an unnecessary amount of listeners.
finished(res, { readable: false }, (err) => {
const { callback, res, opaque, trailers, abort } = this
this.res = null
if (err || !res.readable) {
util.destroy(res, err)
}
this.callback = null
this.runInAsyncScope(callback, null, err || null, { opaque, trailers })
if (err) {
abort()
}
})
}
res.on('drain', resume)
// TODO: Avoid finished. It registers an unnecessary amount of listeners.
finished(res, { readable: false }, (err) => {
const { callback, res, opaque, trailers, abort } = this
this.res = null
if (err || !res.readable) {
util.destroy(res, err)
}
this.callback = null
this.runInAsyncScope(callback, null, err || null, { opaque, trailers })
if (err) {
abort()
}
})
this.res = res

View file

@ -4,7 +4,7 @@
const assert = require('assert')
const { Readable } = require('stream')
const { RequestAbortedError, NotSupportedError } = require('../core/errors')
const { RequestAbortedError, NotSupportedError, InvalidArgumentError } = require('../core/errors')
const util = require('../core/util')
const { ReadableStreamFrom, toUSVString } = require('../core/util')
@ -17,11 +17,16 @@ const kAbort = Symbol('abort')
const kContentType = Symbol('kContentType')
module.exports = class BodyReadable extends Readable {
constructor (resume, abort, contentType = '') {
constructor ({
resume,
abort,
contentType = '',
highWaterMark = 64 * 1024 // Same as nodejs fs streams.
}) {
super({
autoDestroy: true,
read: resume,
highWaterMark: 64 * 1024 // Same as nodejs fs streams.
highWaterMark
})
this._readableState.dataEmitted = false
@ -146,15 +151,31 @@ module.exports = class BodyReadable extends Readable {
async dump (opts) {
let limit = opts && Number.isFinite(opts.limit) ? opts.limit : 262144
const signal = opts && opts.signal
const abortFn = () => {
this.destroy()
}
if (signal) {
if (typeof signal !== 'object' || !('aborted' in signal)) {
throw new InvalidArgumentError('signal must be an AbortSignal')
}
util.throwIfAborted(signal)
signal.addEventListener('abort', abortFn, { once: true })
}
try {
for await (const chunk of this) {
util.throwIfAborted(signal)
limit -= Buffer.byteLength(chunk)
if (limit < 0) {
return
}
}
} catch {
// Do nothing...
util.throwIfAborted(signal)
} finally {
if (signal) {
signal.removeEventListener('abort', abortFn)
}
}
}
}

46
node_modules/undici/lib/api/util.js generated vendored Normal file
View file

@ -0,0 +1,46 @@
const assert = require('assert')
const {
ResponseStatusCodeError
} = require('../core/errors')
const { toUSVString } = require('../core/util')
async function getResolveErrorBodyCallback ({ callback, body, contentType, statusCode, statusMessage, headers }) {
assert(body)
let chunks = []
let limit = 0
for await (const chunk of body) {
chunks.push(chunk)
limit += chunk.length
if (limit > 128 * 1024) {
chunks = null
break
}
}
if (statusCode === 204 || !contentType || !chunks) {
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers))
return
}
try {
if (contentType.startsWith('application/json')) {
const payload = JSON.parse(toUSVString(Buffer.concat(chunks)))
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload))
return
}
if (contentType.startsWith('text/')) {
const payload = toUSVString(Buffer.concat(chunks))
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload))
return
}
} catch (err) {
// Process in a fallback if error
}
process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers))
}
module.exports = { getResolveErrorBodyCallback }

842
node_modules/undici/lib/cache/cache.js generated vendored Normal file
View file

@ -0,0 +1,842 @@
'use strict'
const { kConstruct } = require('./symbols')
const { urlEquals, fieldValues: getFieldValues } = require('./util')
const { kEnumerableProperty, isDisturbed } = require('../core/util')
const { kHeadersList } = require('../core/symbols')
const { webidl } = require('../fetch/webidl')
const { Response, cloneResponse } = require('../fetch/response')
const { Request } = require('../fetch/request')
const { kState, kHeaders, kGuard, kRealm } = require('../fetch/symbols')
const { fetching } = require('../fetch/index')
const { urlIsHttpHttpsScheme, createDeferredPromise, readAllBytes } = require('../fetch/util')
const assert = require('assert')
const { getGlobalDispatcher } = require('../global')
/**
* @see https://w3c.github.io/ServiceWorker/#dfn-cache-batch-operation
* @typedef {Object} CacheBatchOperation
* @property {'delete' | 'put'} type
* @property {any} request
* @property {any} response
* @property {import('../../types/cache').CacheQueryOptions} options
*/
/**
* @see https://w3c.github.io/ServiceWorker/#dfn-request-response-list
* @typedef {[any, any][]} requestResponseList
*/
class Cache {
/**
* @see https://w3c.github.io/ServiceWorker/#dfn-relevant-request-response-list
* @type {requestResponseList}
*/
#relevantRequestResponseList
constructor () {
if (arguments[0] !== kConstruct) {
webidl.illegalConstructor()
}
this.#relevantRequestResponseList = arguments[1]
}
async match (request, options = {}) {
webidl.brandCheck(this, Cache)
webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.match' })
request = webidl.converters.RequestInfo(request)
options = webidl.converters.CacheQueryOptions(options)
const p = await this.matchAll(request, options)
if (p.length === 0) {
return
}
return p[0]
}
async matchAll (request = undefined, options = {}) {
webidl.brandCheck(this, Cache)
if (request !== undefined) request = webidl.converters.RequestInfo(request)
options = webidl.converters.CacheQueryOptions(options)
// 1.
let r = null
// 2.
if (request !== undefined) {
if (request instanceof Request) {
// 2.1.1
r = request[kState]
// 2.1.2
if (r.method !== 'GET' && !options.ignoreMethod) {
return []
}
} else if (typeof request === 'string') {
// 2.2.1
r = new Request(request)[kState]
}
}
// 5.
// 5.1
const responses = []
// 5.2
if (request === undefined) {
// 5.2.1
for (const requestResponse of this.#relevantRequestResponseList) {
responses.push(requestResponse[1])
}
} else { // 5.3
// 5.3.1
const requestResponses = this.#queryCache(r, options)
// 5.3.2
for (const requestResponse of requestResponses) {
responses.push(requestResponse[1])
}
}
// 5.4
// We don't implement CORs so we don't need to loop over the responses, yay!
// 5.5.1
const responseList = []
// 5.5.2
for (const response of responses) {
// 5.5.2.1
const responseObject = new Response(response.body?.source ?? null)
const body = responseObject[kState].body
responseObject[kState] = response
responseObject[kState].body = body
responseObject[kHeaders][kHeadersList] = response.headersList
responseObject[kHeaders][kGuard] = 'immutable'
responseList.push(responseObject)
}
// 6.
return Object.freeze(responseList)
}
async add (request) {
webidl.brandCheck(this, Cache)
webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.add' })
request = webidl.converters.RequestInfo(request)
// 1.
const requests = [request]
// 2.
const responseArrayPromise = this.addAll(requests)
// 3.
return await responseArrayPromise
}
async addAll (requests) {
webidl.brandCheck(this, Cache)
webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.addAll' })
requests = webidl.converters['sequence<RequestInfo>'](requests)
// 1.
const responsePromises = []
// 2.
const requestList = []
// 3.
for (const request of requests) {
if (typeof request === 'string') {
continue
}
// 3.1
const r = request[kState]
// 3.2
if (!urlIsHttpHttpsScheme(r.url) || r.method !== 'GET') {
throw webidl.errors.exception({
header: 'Cache.addAll',
message: 'Expected http/s scheme when method is not GET.'
})
}
}
// 4.
/** @type {ReturnType<typeof fetching>[]} */
const fetchControllers = []
// 5.
for (const request of requests) {
// 5.1
const r = new Request(request)[kState]
// 5.2
if (!urlIsHttpHttpsScheme(r.url)) {
throw webidl.errors.exception({
header: 'Cache.addAll',
message: 'Expected http/s scheme.'
})
}
// 5.4
r.initiator = 'fetch'
r.destination = 'subresource'
// 5.5
requestList.push(r)
// 5.6
const responsePromise = createDeferredPromise()
// 5.7
fetchControllers.push(fetching({
request: r,
dispatcher: getGlobalDispatcher(),
processResponse (response) {
// 1.
if (response.type === 'error' || response.status === 206 || response.status < 200 || response.status > 299) {
responsePromise.reject(webidl.errors.exception({
header: 'Cache.addAll',
message: 'Received an invalid status code or the request failed.'
}))
} else if (response.headersList.contains('vary')) { // 2.
// 2.1
const fieldValues = getFieldValues(response.headersList.get('vary'))
// 2.2
for (const fieldValue of fieldValues) {
// 2.2.1
if (fieldValue === '*') {
responsePromise.reject(webidl.errors.exception({
header: 'Cache.addAll',
message: 'invalid vary field value'
}))
for (const controller of fetchControllers) {
controller.abort()
}
return
}
}
}
},
processResponseEndOfBody (response) {
// 1.
if (response.aborted) {
responsePromise.reject(new DOMException('aborted', 'AbortError'))
return
}
// 2.
responsePromise.resolve(response)
}
}))
// 5.8
responsePromises.push(responsePromise.promise)
}
// 6.
const p = Promise.all(responsePromises)
// 7.
const responses = await p
// 7.1
const operations = []
// 7.2
let index = 0
// 7.3
for (const response of responses) {
// 7.3.1
/** @type {CacheBatchOperation} */
const operation = {
type: 'put', // 7.3.2
request: requestList[index], // 7.3.3
response // 7.3.4
}
operations.push(operation) // 7.3.5
index++ // 7.3.6
}
// 7.5
const cacheJobPromise = createDeferredPromise()
// 7.6.1
let errorData = null
// 7.6.2
try {
this.#batchCacheOperations(operations)
} catch (e) {
errorData = e
}
// 7.6.3
queueMicrotask(() => {
// 7.6.3.1
if (errorData === null) {
cacheJobPromise.resolve(undefined)
} else {
// 7.6.3.2
cacheJobPromise.reject(errorData)
}
})
// 7.7
return cacheJobPromise.promise
}
async put (request, response) {
webidl.brandCheck(this, Cache)
webidl.argumentLengthCheck(arguments, 2, { header: 'Cache.put' })
request = webidl.converters.RequestInfo(request)
response = webidl.converters.Response(response)
// 1.
let innerRequest = null
// 2.
if (request instanceof Request) {
innerRequest = request[kState]
} else { // 3.
innerRequest = new Request(request)[kState]
}
// 4.
if (!urlIsHttpHttpsScheme(innerRequest.url) || innerRequest.method !== 'GET') {
throw webidl.errors.exception({
header: 'Cache.put',
message: 'Expected an http/s scheme when method is not GET'
})
}
// 5.
const innerResponse = response[kState]
// 6.
if (innerResponse.status === 206) {
throw webidl.errors.exception({
header: 'Cache.put',
message: 'Got 206 status'
})
}
// 7.
if (innerResponse.headersList.contains('vary')) {
// 7.1.
const fieldValues = getFieldValues(innerResponse.headersList.get('vary'))
// 7.2.
for (const fieldValue of fieldValues) {
// 7.2.1
if (fieldValue === '*') {
throw webidl.errors.exception({
header: 'Cache.put',
message: 'Got * vary field value'
})
}
}
}
// 8.
if (innerResponse.body && (isDisturbed(innerResponse.body.stream) || innerResponse.body.stream.locked)) {
throw webidl.errors.exception({
header: 'Cache.put',
message: 'Response body is locked or disturbed'
})
}
// 9.
const clonedResponse = cloneResponse(innerResponse)
// 10.
const bodyReadPromise = createDeferredPromise()
// 11.
if (innerResponse.body != null) {
// 11.1
const stream = innerResponse.body.stream
// 11.2
const reader = stream.getReader()
// 11.3
readAllBytes(
reader,
(bytes) => bodyReadPromise.resolve(bytes),
(error) => bodyReadPromise.reject(error)
)
} else {
bodyReadPromise.resolve(undefined)
}
// 12.
/** @type {CacheBatchOperation[]} */
const operations = []
// 13.
/** @type {CacheBatchOperation} */
const operation = {
type: 'put', // 14.
request: innerRequest, // 15.
response: clonedResponse // 16.
}
// 17.
operations.push(operation)
// 19.
const bytes = await bodyReadPromise.promise
if (clonedResponse.body != null) {
clonedResponse.body.source = bytes
}
// 19.1
const cacheJobPromise = createDeferredPromise()
// 19.2.1
let errorData = null
// 19.2.2
try {
this.#batchCacheOperations(operations)
} catch (e) {
errorData = e
}
// 19.2.3
queueMicrotask(() => {
// 19.2.3.1
if (errorData === null) {
cacheJobPromise.resolve()
} else { // 19.2.3.2
cacheJobPromise.reject(errorData)
}
})
return cacheJobPromise.promise
}
async delete (request, options = {}) {
webidl.brandCheck(this, Cache)
webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.delete' })
request = webidl.converters.RequestInfo(request)
options = webidl.converters.CacheQueryOptions(options)
/**
* @type {Request}
*/
let r = null
if (request instanceof Request) {
r = request[kState]
if (r.method !== 'GET' && !options.ignoreMethod) {
return false
}
} else {
assert(typeof request === 'string')
r = new Request(request)[kState]
}
/** @type {CacheBatchOperation[]} */
const operations = []
/** @type {CacheBatchOperation} */
const operation = {
type: 'delete',
request: r,
options
}
operations.push(operation)
const cacheJobPromise = createDeferredPromise()
let errorData = null
let requestResponses
try {
requestResponses = this.#batchCacheOperations(operations)
} catch (e) {
errorData = e
}
queueMicrotask(() => {
if (errorData === null) {
cacheJobPromise.resolve(!!requestResponses?.length)
} else {
cacheJobPromise.reject(errorData)
}
})
return cacheJobPromise.promise
}
/**
* @see https://w3c.github.io/ServiceWorker/#dom-cache-keys
* @param {any} request
* @param {import('../../types/cache').CacheQueryOptions} options
* @returns {readonly Request[]}
*/
async keys (request = undefined, options = {}) {
webidl.brandCheck(this, Cache)
if (request !== undefined) request = webidl.converters.RequestInfo(request)
options = webidl.converters.CacheQueryOptions(options)
// 1.
let r = null
// 2.
if (request !== undefined) {
// 2.1
if (request instanceof Request) {
// 2.1.1
r = request[kState]
// 2.1.2
if (r.method !== 'GET' && !options.ignoreMethod) {
return []
}
} else if (typeof request === 'string') { // 2.2
r = new Request(request)[kState]
}
}
// 4.
const promise = createDeferredPromise()
// 5.
// 5.1
const requests = []
// 5.2
if (request === undefined) {
// 5.2.1
for (const requestResponse of this.#relevantRequestResponseList) {
// 5.2.1.1
requests.push(requestResponse[0])
}
} else { // 5.3
// 5.3.1
const requestResponses = this.#queryCache(r, options)
// 5.3.2
for (const requestResponse of requestResponses) {
// 5.3.2.1
requests.push(requestResponse[0])
}
}
// 5.4
queueMicrotask(() => {
// 5.4.1
const requestList = []
// 5.4.2
for (const request of requests) {
const requestObject = new Request('https://a')
requestObject[kState] = request
requestObject[kHeaders][kHeadersList] = request.headersList
requestObject[kHeaders][kGuard] = 'immutable'
requestObject[kRealm] = request.client
// 5.4.2.1
requestList.push(requestObject)
}
// 5.4.3
promise.resolve(Object.freeze(requestList))
})
return promise.promise
}
/**
* @see https://w3c.github.io/ServiceWorker/#batch-cache-operations-algorithm
* @param {CacheBatchOperation[]} operations
* @returns {requestResponseList}
*/
#batchCacheOperations (operations) {
// 1.
const cache = this.#relevantRequestResponseList
// 2.
const backupCache = [...cache]
// 3.
const addedItems = []
// 4.1
const resultList = []
try {
// 4.2
for (const operation of operations) {
// 4.2.1
if (operation.type !== 'delete' && operation.type !== 'put') {
throw webidl.errors.exception({
header: 'Cache.#batchCacheOperations',
message: 'operation type does not match "delete" or "put"'
})
}
// 4.2.2
if (operation.type === 'delete' && operation.response != null) {
throw webidl.errors.exception({
header: 'Cache.#batchCacheOperations',
message: 'delete operation should not have an associated response'
})
}
// 4.2.3
if (this.#queryCache(operation.request, operation.options, addedItems).length) {
throw new DOMException('???', 'InvalidStateError')
}
// 4.2.4
let requestResponses
// 4.2.5
if (operation.type === 'delete') {
// 4.2.5.1
requestResponses = this.#queryCache(operation.request, operation.options)
// TODO: the spec is wrong, this is needed to pass WPTs
if (requestResponses.length === 0) {
return []
}
// 4.2.5.2
for (const requestResponse of requestResponses) {
const idx = cache.indexOf(requestResponse)
assert(idx !== -1)
// 4.2.5.2.1
cache.splice(idx, 1)
}
} else if (operation.type === 'put') { // 4.2.6
// 4.2.6.1
if (operation.response == null) {
throw webidl.errors.exception({
header: 'Cache.#batchCacheOperations',
message: 'put operation should have an associated response'
})
}
// 4.2.6.2
const r = operation.request
// 4.2.6.3
if (!urlIsHttpHttpsScheme(r.url)) {
throw webidl.errors.exception({
header: 'Cache.#batchCacheOperations',
message: 'expected http or https scheme'
})
}
// 4.2.6.4
if (r.method !== 'GET') {
throw webidl.errors.exception({
header: 'Cache.#batchCacheOperations',
message: 'not get method'
})
}
// 4.2.6.5
if (operation.options != null) {
throw webidl.errors.exception({
header: 'Cache.#batchCacheOperations',
message: 'options must not be defined'
})
}
// 4.2.6.6
requestResponses = this.#queryCache(operation.request)
// 4.2.6.7
for (const requestResponse of requestResponses) {
const idx = cache.indexOf(requestResponse)
assert(idx !== -1)
// 4.2.6.7.1
cache.splice(idx, 1)
}
// 4.2.6.8
cache.push([operation.request, operation.response])
// 4.2.6.10
addedItems.push([operation.request, operation.response])
}
// 4.2.7
resultList.push([operation.request, operation.response])
}
// 4.3
return resultList
} catch (e) { // 5.
// 5.1
this.#relevantRequestResponseList.length = 0
// 5.2
this.#relevantRequestResponseList = backupCache
// 5.3
throw e
}
}
/**
* @see https://w3c.github.io/ServiceWorker/#query-cache
* @param {any} requestQuery
* @param {import('../../types/cache').CacheQueryOptions} options
* @param {requestResponseList} targetStorage
* @returns {requestResponseList}
*/
#queryCache (requestQuery, options, targetStorage) {
/** @type {requestResponseList} */
const resultList = []
const storage = targetStorage ?? this.#relevantRequestResponseList
for (const requestResponse of storage) {
const [cachedRequest, cachedResponse] = requestResponse
if (this.#requestMatchesCachedItem(requestQuery, cachedRequest, cachedResponse, options)) {
resultList.push(requestResponse)
}
}
return resultList
}
/**
* @see https://w3c.github.io/ServiceWorker/#request-matches-cached-item-algorithm
* @param {any} requestQuery
* @param {any} request
* @param {any | null} response
* @param {import('../../types/cache').CacheQueryOptions | undefined} options
* @returns {boolean}
*/
#requestMatchesCachedItem (requestQuery, request, response = null, options) {
// if (options?.ignoreMethod === false && request.method === 'GET') {
// return false
// }
const queryURL = new URL(requestQuery.url)
const cachedURL = new URL(request.url)
if (options?.ignoreSearch) {
cachedURL.search = ''
queryURL.search = ''
}
if (!urlEquals(queryURL, cachedURL, true)) {
return false
}
if (
response == null ||
options?.ignoreVary ||
!response.headersList.contains('vary')
) {
return true
}
const fieldValues = getFieldValues(response.headersList.get('vary'))
for (const fieldValue of fieldValues) {
if (fieldValue === '*') {
return false
}
const requestValue = request.headersList.get(fieldValue)
const queryValue = requestQuery.headersList.get(fieldValue)
// If one has the header and the other doesn't, or one has
// a different value than the other, return false
if (requestValue !== queryValue) {
return false
}
}
return true
}
}
Object.defineProperties(Cache.prototype, {
[Symbol.toStringTag]: {
value: 'Cache',
configurable: true
},
match: kEnumerableProperty,
matchAll: kEnumerableProperty,
add: kEnumerableProperty,
addAll: kEnumerableProperty,
put: kEnumerableProperty,
delete: kEnumerableProperty,
keys: kEnumerableProperty
})
const cacheQueryOptionConverters = [
{
key: 'ignoreSearch',
converter: webidl.converters.boolean,
defaultValue: false
},
{
key: 'ignoreMethod',
converter: webidl.converters.boolean,
defaultValue: false
},
{
key: 'ignoreVary',
converter: webidl.converters.boolean,
defaultValue: false
}
]
webidl.converters.CacheQueryOptions = webidl.dictionaryConverter(cacheQueryOptionConverters)
webidl.converters.MultiCacheQueryOptions = webidl.dictionaryConverter([
...cacheQueryOptionConverters,
{
key: 'cacheName',
converter: webidl.converters.DOMString
}
])
webidl.converters.Response = webidl.interfaceConverter(Response)
webidl.converters['sequence<RequestInfo>'] = webidl.sequenceConverter(
webidl.converters.RequestInfo
)
module.exports = {
Cache
}

144
node_modules/undici/lib/cache/cachestorage.js generated vendored Normal file
View file

@ -0,0 +1,144 @@
'use strict'
const { kConstruct } = require('./symbols')
const { Cache } = require('./cache')
const { webidl } = require('../fetch/webidl')
const { kEnumerableProperty } = require('../core/util')
class CacheStorage {
/**
* @see https://w3c.github.io/ServiceWorker/#dfn-relevant-name-to-cache-map
* @type {Map<string, import('./cache').requestResponseList}
*/
#caches = new Map()
constructor () {
if (arguments[0] !== kConstruct) {
webidl.illegalConstructor()
}
}
async match (request, options = {}) {
webidl.brandCheck(this, CacheStorage)
webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.match' })
request = webidl.converters.RequestInfo(request)
options = webidl.converters.MultiCacheQueryOptions(options)
// 1.
if (options.cacheName != null) {
// 1.1.1.1
if (this.#caches.has(options.cacheName)) {
// 1.1.1.1.1
const cacheList = this.#caches.get(options.cacheName)
const cache = new Cache(kConstruct, cacheList)
return await cache.match(request, options)
}
} else { // 2.
// 2.2
for (const cacheList of this.#caches.values()) {
const cache = new Cache(kConstruct, cacheList)
// 2.2.1.2
const response = await cache.match(request, options)
if (response !== undefined) {
return response
}
}
}
}
/**
* @see https://w3c.github.io/ServiceWorker/#cache-storage-has
* @param {string} cacheName
* @returns {Promise<boolean>}
*/
async has (cacheName) {
webidl.brandCheck(this, CacheStorage)
webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.has' })
cacheName = webidl.converters.DOMString(cacheName)
// 2.1.1
// 2.2
return this.#caches.has(cacheName)
}
/**
* @see https://w3c.github.io/ServiceWorker/#dom-cachestorage-open
* @param {string} cacheName
* @returns {Promise<Cache>}
*/
async open (cacheName) {
webidl.brandCheck(this, CacheStorage)
webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.open' })
cacheName = webidl.converters.DOMString(cacheName)
// 2.1
if (this.#caches.has(cacheName)) {
// await caches.open('v1') !== await caches.open('v1')
// 2.1.1
const cache = this.#caches.get(cacheName)
// 2.1.1.1
return new Cache(kConstruct, cache)
}
// 2.2
const cache = []
// 2.3
this.#caches.set(cacheName, cache)
// 2.4
return new Cache(kConstruct, cache)
}
/**
* @see https://w3c.github.io/ServiceWorker/#cache-storage-delete
* @param {string} cacheName
* @returns {Promise<boolean>}
*/
async delete (cacheName) {
webidl.brandCheck(this, CacheStorage)
webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.delete' })
cacheName = webidl.converters.DOMString(cacheName)
return this.#caches.delete(cacheName)
}
/**
* @see https://w3c.github.io/ServiceWorker/#cache-storage-keys
* @returns {string[]}
*/
async keys () {
webidl.brandCheck(this, CacheStorage)
// 2.1
const keys = this.#caches.keys()
// 2.2
return [...keys]
}
}
Object.defineProperties(CacheStorage.prototype, {
[Symbol.toStringTag]: {
value: 'CacheStorage',
configurable: true
},
match: kEnumerableProperty,
has: kEnumerableProperty,
open: kEnumerableProperty,
delete: kEnumerableProperty,
keys: kEnumerableProperty
})
module.exports = {
CacheStorage
}

5
node_modules/undici/lib/cache/symbols.js generated vendored Normal file
View file

@ -0,0 +1,5 @@
'use strict'
module.exports = {
kConstruct: Symbol('constructable')
}

49
node_modules/undici/lib/cache/util.js generated vendored Normal file
View file

@ -0,0 +1,49 @@
'use strict'
const assert = require('assert')
const { URLSerializer } = require('../fetch/dataURL')
const { isValidHeaderName } = require('../fetch/util')
/**
* @see https://url.spec.whatwg.org/#concept-url-equals
* @param {URL} A
* @param {URL} B
* @param {boolean | undefined} excludeFragment
* @returns {boolean}
*/
function urlEquals (A, B, excludeFragment = false) {
const serializedA = URLSerializer(A, excludeFragment)
const serializedB = URLSerializer(B, excludeFragment)
return serializedA === serializedB
}
/**
* @see https://github.com/chromium/chromium/blob/694d20d134cb553d8d89e5500b9148012b1ba299/content/browser/cache_storage/cache_storage_cache.cc#L260-L262
* @param {string} header
*/
function fieldValues (header) {
assert(header !== null)
const values = []
for (let value of header.split(',')) {
value = value.trim()
if (!value.length) {
continue
} else if (!isValidHeaderName(value)) {
continue
}
values.push(value)
}
return values
}
module.exports = {
urlEquals,
fieldValues
}

119
node_modules/undici/lib/client.js generated vendored
View file

@ -1,3 +1,5 @@
// @ts-check
'use strict'
/* global WebAssembly */
@ -5,6 +7,7 @@
const assert = require('assert')
const net = require('net')
const util = require('./core/util')
const timers = require('./timers')
const Request = require('./core/request')
const DispatcherBase = require('./dispatcher-base')
const {
@ -18,7 +21,8 @@ const {
InformationalError,
BodyTimeoutError,
HTTPParserError,
ResponseExceededMaxSizeError
ResponseExceededMaxSizeError,
ClientDestroyedError
} = require('./core/errors')
const buildConnector = require('./core/connect')
const {
@ -65,6 +69,7 @@ const {
kLocalAddress,
kMaxResponseSize
} = require('./core/symbols')
const FastBuffer = Buffer[Symbol.species]
const kClosedResolve = Symbol('kClosedResolve')
@ -83,7 +88,15 @@ try {
channels.connected = { hasSubscribers: false }
}
/**
* @type {import('../types/client').default}
*/
class Client extends DispatcherBase {
/**
*
* @param {string|URL} url
* @param {import('../types/client').Client.Options} options
*/
constructor (url, {
interceptors,
maxHeaderSize,
@ -107,7 +120,9 @@ class Client extends DispatcherBase {
connect,
maxRequestsPerClient,
localAddress,
maxResponseSize
maxResponseSize,
autoSelectFamily,
autoSelectFamilyAttemptTimeout
} = {}) {
super()
@ -183,12 +198,20 @@ class Client extends DispatcherBase {
throw new InvalidArgumentError('maxResponseSize must be a positive number')
}
if (
autoSelectFamilyAttemptTimeout != null &&
(!Number.isInteger(autoSelectFamilyAttemptTimeout) || autoSelectFamilyAttemptTimeout < -1)
) {
throw new InvalidArgumentError('autoSelectFamilyAttemptTimeout must be a positive number')
}
if (typeof connect !== 'function') {
connect = buildConnector({
...tls,
maxCachedSessions,
socketPath,
timeout: connectTimeout,
...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),
...connect
})
}
@ -210,8 +233,8 @@ class Client extends DispatcherBase {
this[kResuming] = 0 // 0, idle, 1, scheduled, 2 resuming
this[kNeedDrain] = 0 // 0, idle, 1, scheduled, 2 resuming
this[kHostHeader] = `host: ${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}\r\n`
this[kBodyTimeout] = bodyTimeout != null ? bodyTimeout : 30e3
this[kHeadersTimeout] = headersTimeout != null ? headersTimeout : 30e3
this[kBodyTimeout] = bodyTimeout != null ? bodyTimeout : 300e3
this[kHeadersTimeout] = headersTimeout != null ? headersTimeout : 300e3
this[kStrictContentLength] = strictContentLength == null ? true : strictContentLength
this[kMaxRedirections] = maxRedirections
this[kMaxRequests] = maxRequestsPerClient
@ -298,7 +321,7 @@ class Client extends DispatcherBase {
async [kClose] () {
return new Promise((resolve) => {
if (!this[kSize]) {
this.destroy(resolve)
resolve(null)
} else {
this[kClosedResolve] = resolve
}
@ -315,6 +338,7 @@ class Client extends DispatcherBase {
const callback = () => {
if (this[kClosedResolve]) {
// TODO (fix): Should we error here with ClientDestroyedError?
this[kClosedResolve]()
this[kClosedResolve] = null
}
@ -337,11 +361,11 @@ const createRedirectInterceptor = require('./interceptor/redirectInterceptor')
const EMPTY_BUF = Buffer.alloc(0)
async function lazyllhttp () {
const llhttpWasmData = process.env.JEST_WORKER_ID ? require('./llhttp/llhttp.wasm.js') : undefined
const llhttpWasmData = process.env.JEST_WORKER_ID ? require('./llhttp/llhttp-wasm.js') : undefined
let mod
try {
mod = await WebAssembly.compile(Buffer.from(require('./llhttp/llhttp_simd.wasm.js'), 'base64'))
mod = await WebAssembly.compile(Buffer.from(require('./llhttp/llhttp_simd-wasm.js'), 'base64'))
} catch (e) {
/* istanbul ignore next */
@ -349,7 +373,7 @@ async function lazyllhttp () {
// being enabled, but the occurring of this other error
// * https://github.com/emscripten-core/emscripten/issues/11495
// got me to remove that check to avoid breaking Node 12.
mod = await WebAssembly.compile(Buffer.from(llhttpWasmData || require('./llhttp/llhttp.wasm.js'), 'base64'))
mod = await WebAssembly.compile(Buffer.from(llhttpWasmData || require('./llhttp/llhttp-wasm.js'), 'base64'))
}
return await WebAssembly.instantiate(mod, {
@ -362,9 +386,8 @@ async function lazyllhttp () {
},
wasm_on_status: (p, at, len) => {
assert.strictEqual(currentParser.ptr, p)
const start = at - currentBufferPtr
const end = start + len
return currentParser.onStatus(currentBufferRef.slice(start, end)) || 0
const start = at - currentBufferPtr + currentBufferRef.byteOffset
return currentParser.onStatus(new FastBuffer(currentBufferRef.buffer, start, len)) || 0
},
wasm_on_message_begin: (p) => {
assert.strictEqual(currentParser.ptr, p)
@ -372,15 +395,13 @@ async function lazyllhttp () {
},
wasm_on_header_field: (p, at, len) => {
assert.strictEqual(currentParser.ptr, p)
const start = at - currentBufferPtr
const end = start + len
return currentParser.onHeaderField(currentBufferRef.slice(start, end)) || 0
const start = at - currentBufferPtr + currentBufferRef.byteOffset
return currentParser.onHeaderField(new FastBuffer(currentBufferRef.buffer, start, len)) || 0
},
wasm_on_header_value: (p, at, len) => {
assert.strictEqual(currentParser.ptr, p)
const start = at - currentBufferPtr
const end = start + len
return currentParser.onHeaderValue(currentBufferRef.slice(start, end)) || 0
const start = at - currentBufferPtr + currentBufferRef.byteOffset
return currentParser.onHeaderValue(new FastBuffer(currentBufferRef.buffer, start, len)) || 0
},
wasm_on_headers_complete: (p, statusCode, upgrade, shouldKeepAlive) => {
assert.strictEqual(currentParser.ptr, p)
@ -388,9 +409,8 @@ async function lazyllhttp () {
},
wasm_on_body: (p, at, len) => {
assert.strictEqual(currentParser.ptr, p)
const start = at - currentBufferPtr
const end = start + len
return currentParser.onBody(currentBufferRef.slice(start, end)) || 0
const start = at - currentBufferPtr + currentBufferRef.byteOffset
return currentParser.onBody(new FastBuffer(currentBufferRef.buffer, start, len)) || 0
},
wasm_on_message_complete: (p) => {
assert.strictEqual(currentParser.ptr, p)
@ -447,9 +467,9 @@ class Parser {
setTimeout (value, type) {
this.timeoutType = type
if (value !== this.timeoutValue) {
clearTimeout(this.timeout)
timers.clearTimeout(this.timeout)
if (value) {
this.timeout = setTimeout(onParserTimeout, value, this)
this.timeout = timers.setTimeout(onParserTimeout, value, this)
// istanbul ignore else: only for jest
if (this.timeout.unref) {
this.timeout.unref()
@ -549,7 +569,10 @@ class Parser {
/* istanbul ignore else: difficult to make a test case for */
if (ptr) {
const len = new Uint8Array(llhttp.memory.buffer, ptr).indexOf(0)
message = Buffer.from(llhttp.memory.buffer, ptr, len).toString()
message =
'Response does not match the HTTP/1.1 protocol (' +
Buffer.from(llhttp.memory.buffer, ptr, len).toString() +
')'
}
throw new HTTPParserError(message, constants.ERROR[ret], data.slice(offset))
}
@ -565,7 +588,7 @@ class Parser {
this.llhttp.llhttp_free(this.ptr)
this.ptr = null
clearTimeout(this.timeout)
timers.clearTimeout(this.timeout)
this.timeout = null
this.timeoutValue = null
this.timeoutType = null
@ -1064,6 +1087,11 @@ async function connect (client) {
})
})
if (client.destroyed) {
util.destroy(socket.on('error', () => {}), new ClientDestroyedError())
return
}
if (!llhttpInstance) {
llhttpInstance = await llhttpPromise
llhttpPromise = null
@ -1106,6 +1134,10 @@ async function connect (client) {
}
client.emit('connect', client[kUrl], [client])
} catch (err) {
if (client.destroyed) {
return
}
client[kConnecting] = false
if (channels.connectError.hasSubscribers) {
@ -1168,8 +1200,9 @@ function _resume (client, sync) {
return
}
if (client.closed && !client[kSize]) {
client.destroy()
if (client[kClosedResolve] && !client[kSize]) {
client[kClosedResolve]()
client[kClosedResolve] = null
return
}
@ -1421,17 +1454,17 @@ function write (client, request) {
/* istanbul ignore else: assertion */
if (!body) {
if (contentLength === 0) {
socket.write(`${header}content-length: 0\r\n\r\n`, 'ascii')
socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1')
} else {
assert(contentLength === null, 'no body must not have content length')
socket.write(`${header}\r\n`, 'ascii')
socket.write(`${header}\r\n`, 'latin1')
}
request.onRequestSent()
} else if (util.isBuffer(body)) {
assert(contentLength === body.byteLength, 'buffer body must have content length')
socket.cork()
socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'ascii')
socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1')
socket.write(body)
socket.uncork()
request.onBodySent(body)
@ -1464,9 +1497,11 @@ function writeStream ({ body, client, request, socket, contentLength, header, ex
const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header })
const onData = function (chunk) {
try {
assert(!finished)
if (finished) {
return
}
try {
if (!writer.write(chunk) && this.pause) {
this.pause()
}
@ -1475,7 +1510,9 @@ function writeStream ({ body, client, request, socket, contentLength, header, ex
}
}
const onDrain = function () {
assert(!finished)
if (finished) {
return
}
if (body.resume) {
body.resume()
@ -1546,7 +1583,7 @@ async function writeBlob ({ body, client, request, socket, contentLength, header
const buffer = Buffer.from(await body.arrayBuffer())
socket.cork()
socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'ascii')
socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1')
socket.write(buffer)
socket.uncork()
@ -1650,26 +1687,30 @@ class AsyncWriter {
process.emitWarning(new RequestContentLengthMismatchError())
}
socket.cork()
if (bytesWritten === 0) {
if (!expectsPayload) {
socket[kReset] = true
}
if (contentLength === null) {
socket.write(`${header}transfer-encoding: chunked\r\n`, 'ascii')
socket.write(`${header}transfer-encoding: chunked\r\n`, 'latin1')
} else {
socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'ascii')
socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1')
}
}
if (contentLength === null) {
socket.write(`\r\n${len.toString(16)}\r\n`, 'ascii')
socket.write(`\r\n${len.toString(16)}\r\n`, 'latin1')
}
this.bytesWritten += len
const ret = socket.write(chunk)
socket.uncork()
request.onBodySent(chunk)
if (!ret) {
@ -1705,12 +1746,12 @@ class AsyncWriter {
// no Transfer-Encoding is sent and the request method defines a meaning
// for an enclosed payload body.
socket.write(`${header}content-length: 0\r\n\r\n`, 'ascii')
socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1')
} else {
socket.write(`${header}\r\n`, 'ascii')
socket.write(`${header}\r\n`, 'latin1')
}
} else if (contentLength === null) {
socket.write('\r\n0\r\n\r\n', 'ascii')
socket.write('\r\n0\r\n\r\n', 'latin1')
}
if (contentLength !== null && bytesWritten !== contentLength) {

View file

@ -83,7 +83,8 @@ function getSetCookies (headers) {
return []
}
return cookies.map((pair) => parseSetCookie(pair[1]))
// In older versions of undici, cookies is a list of name:value.
return cookies.map((pair) => parseSetCookie(Array.isArray(pair) ? pair[1] : pair))
}
/**

View file

@ -2,7 +2,7 @@
const { maxNameValuePairSize, maxAttributeValueSize } = require('./constants')
const { isCTLExcludingHtab } = require('./util')
const { collectASequenceOfCodePoints } = require('../fetch/dataURL')
const { collectASequenceOfCodePointsFast } = require('../fetch/dataURL')
const assert = require('assert')
/**
@ -32,7 +32,7 @@ function parseSetCookie (header) {
// (including the %x3B (";") in question).
const position = { position: 0 }
nameValuePair = collectASequenceOfCodePoints((char) => char !== ';', header, position)
nameValuePair = collectASequenceOfCodePointsFast(';', header, position)
unparsedAttributes = header.slice(position.position)
} else {
// Otherwise:
@ -54,8 +54,8 @@ function parseSetCookie (header) {
// empty) value string consists of the characters after the first
// %x3D ("=") character.
const position = { position: 0 }
name = collectASequenceOfCodePoints(
(char) => char !== '=',
name = collectASequenceOfCodePointsFast(
'=',
nameValuePair,
position
)
@ -106,8 +106,8 @@ function parseUnparsedAttributes (unparsedAttributes, cookieAttributeList = {})
if (unparsedAttributes.includes(';')) {
// 1. Consume the characters of the unparsed-attributes up to, but
// not including, the first %x3B (";") character.
cookieAv = collectASequenceOfCodePoints(
(char) => char !== ';',
cookieAv = collectASequenceOfCodePointsFast(
';',
unparsedAttributes,
{ position: 0 }
)
@ -134,8 +134,8 @@ function parseUnparsedAttributes (unparsedAttributes, cookieAttributeList = {})
// character.
const position = { position: 0 }
attributeName = collectASequenceOfCodePoints(
(char) => char !== '=',
attributeName = collectASequenceOfCodePointsFast(
'=',
cookieAv,
position
)

View file

@ -120,6 +120,12 @@ function buildConnector ({ maxCachedSessions, socketPath, timeout, ...opts }) {
})
}
// Set TCP keep alive options on the socket here instead of in connect() for the case of assigning the socket
if (options.keepAlive == null || options.keepAlive) {
const keepAliveInitialDelay = options.keepAliveInitialDelay === undefined ? 60e3 : options.keepAliveInitialDelay
socket.setKeepAlive(true, keepAliveInitialDelay)
}
const cancelTimeout = setupTimeout(() => onConnectTimeout(socket), timeout)
socket

View file

@ -34,10 +34,6 @@ const channels = {}
let extractBody
const nodeVersion = process.versions.node.split('.')
const nodeMajor = Number(nodeVersion[0])
const nodeMinor = Number(nodeVersion[1])
try {
const diagnosticsChannel = require('diagnostics_channel')
channels.create = diagnosticsChannel.channel('undici:request:create')
@ -172,7 +168,7 @@ class Request {
}
if (util.isFormDataLike(this.body)) {
if (nodeMajor < 16 || (nodeMajor === 16 && nodeMinor < 8)) {
if (util.nodeMajor < 16 || (util.nodeMajor === 16 && util.nodeMinor < 8)) {
throw new InvalidArgumentError('Form-Data bodies are only supported in node v16.8 and newer.')
}
@ -186,6 +182,7 @@ class Request {
this.headers += `content-type: ${contentType}\r\n`
}
this.body = bodyStream.stream
this.contentLength = bodyStream.length
} else if (util.isBlobLike(body) && this.contentType == null && body.type) {
this.contentType = body.type
this.headers += `content-type: ${body.type}\r\n`
@ -279,11 +276,16 @@ class Request {
}
function processHeaderValue (key, val) {
if (val && (typeof val === 'object' && !Array.isArray(val))) {
throw new InvalidArgumentError(`invalid ${key} header`)
} else if (headerCharRegex.exec(val) !== null) {
if (val && typeof val === 'object') {
throw new InvalidArgumentError(`invalid ${key} header`)
}
val = val != null ? `${val}` : ''
if (headerCharRegex.exec(val) !== null) {
throw new InvalidArgumentError(`invalid ${key} header`)
}
return `${key}: ${val}\r\n`
}
@ -299,6 +301,9 @@ function processHeader (request, key, val) {
key.length === 4 &&
key.toLowerCase() === 'host'
) {
if (headerCharRegex.exec(val) !== null) {
throw new InvalidArgumentError(`invalid ${key} header`)
}
// Consumed by Client
request.host = val
} else if (
@ -313,11 +318,10 @@ function processHeader (request, key, val) {
} else if (
request.contentType === null &&
key.length === 12 &&
key.toLowerCase() === 'content-type' &&
headerCharRegex.exec(val) === null
key.toLowerCase() === 'content-type'
) {
request.contentType = val
request.headers += `${key}: ${val}\r\n`
request.headers += processHeaderValue(key, val)
} else if (
key.length === 17 &&
key.toLowerCase() === 'transfer-encoding'
@ -327,7 +331,7 @@ function processHeader (request, key, val) {
key.length === 10 &&
key.toLowerCase() === 'connection'
) {
const value = val.toLowerCase()
const value = typeof val === 'string' ? val.toLowerCase() : null
if (value !== 'close' && value !== 'keep-alive') {
throw new InvalidArgumentError('invalid connection header')
} else if (value === 'close') {

View file

@ -41,7 +41,7 @@ module.exports = {
kClient: Symbol('client'),
kParser: Symbol('parser'),
kOnDestroyed: Symbol('destroy callbacks'),
kPipelining: Symbol('pipelinig'),
kPipelining: Symbol('pipelining'),
kSocket: Symbol('socket'),
kHostHeader: Symbol('host header'),
kConnector: Symbol('connector'),

122
node_modules/undici/lib/core/util.js generated vendored
View file

@ -10,10 +10,12 @@ const { Blob } = require('buffer')
const nodeUtil = require('util')
const { stringify } = require('querystring')
const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v))
function nop () {}
function isStream (obj) {
return obj && typeof obj.pipe === 'function'
return obj && typeof obj === 'object' && typeof obj.pipe === 'function' && typeof obj.on === 'function'
}
// based on https://github.com/node-fetch/fetch-blob/blob/8ab587d34080de94140b54f07168451e7d0b655e/index.js#L229-L241 (MIT License)
@ -44,34 +46,40 @@ function buildURL (url, queryParams) {
function parseURL (url) {
if (typeof url === 'string') {
url = new URL(url)
if (!/^https?:/.test(url.origin || url.protocol)) {
throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
}
return url
}
if (!url || typeof url !== 'object') {
throw new InvalidArgumentError('invalid url')
throw new InvalidArgumentError('Invalid URL: The URL argument must be a non-null object.')
}
if (url.port != null && url.port !== '' && !Number.isFinite(parseInt(url.port))) {
throw new InvalidArgumentError('invalid port')
throw new InvalidArgumentError('Invalid URL: port must be a valid integer or a string representation of an integer.')
}
if (url.path != null && typeof url.path !== 'string') {
throw new InvalidArgumentError('invalid path')
throw new InvalidArgumentError('Invalid URL path: the path must be a string or null/undefined.')
}
if (url.pathname != null && typeof url.pathname !== 'string') {
throw new InvalidArgumentError('invalid pathname')
throw new InvalidArgumentError('Invalid URL pathname: the pathname must be a string or null/undefined.')
}
if (url.hostname != null && typeof url.hostname !== 'string') {
throw new InvalidArgumentError('invalid hostname')
throw new InvalidArgumentError('Invalid URL hostname: the hostname must be a string or null/undefined.')
}
if (url.origin != null && typeof url.origin !== 'string') {
throw new InvalidArgumentError('invalid origin')
throw new InvalidArgumentError('Invalid URL origin: the origin must be a string or null/undefined.')
}
if (!/^https?:/.test(url.origin || url.protocol)) {
throw new InvalidArgumentError('invalid protocol')
throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
}
if (!(url instanceof URL)) {
@ -213,25 +221,55 @@ function parseHeaders (headers, obj = {}) {
for (let i = 0; i < headers.length; i += 2) {
const key = headers[i].toString().toLowerCase()
let val = obj[key]
if (!val) {
if (Array.isArray(headers[i + 1])) {
obj[key] = headers[i + 1]
} else {
obj[key] = headers[i + 1].toString()
obj[key] = headers[i + 1].toString('utf8')
}
} else {
if (!Array.isArray(val)) {
val = [val]
obj[key] = val
}
val.push(headers[i + 1].toString())
val.push(headers[i + 1].toString('utf8'))
}
}
// See https://github.com/nodejs/node/pull/46528
if ('content-length' in obj && 'content-disposition' in obj) {
obj['content-disposition'] = Buffer.from(obj['content-disposition']).toString('latin1')
}
return obj
}
function parseRawHeaders (headers) {
return headers.map(header => header.toString())
const ret = []
let hasContentLength = false
let contentDispositionIdx = -1
for (let n = 0; n < headers.length; n += 2) {
const key = headers[n + 0].toString()
const val = headers[n + 1].toString('utf8')
if (key.length === 14 && (key === 'content-length' || key.toLowerCase() === 'content-length')) {
ret.push(key, val)
hasContentLength = true
} else if (key.length === 19 && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) {
contentDispositionIdx = ret.push(key, val) - 1
} else {
ret.push(key, val)
}
}
// See https://github.com/nodejs/node/pull/46528
if (hasContentLength && contentDispositionIdx !== -1) {
ret[contentDispositionIdx] = Buffer.from(ret[contentDispositionIdx]).toString('latin1')
}
return ret
}
function isBuffer (buffer) {
@ -356,23 +394,49 @@ function ReadableStreamFrom (iterable) {
// The chunk should be a FormData instance and contains
// all the required methods.
function isFormDataLike (chunk) {
return (chunk &&
chunk.constructor && chunk.constructor.name === 'FormData' &&
typeof chunk === 'object' &&
(typeof chunk.append === 'function' &&
typeof chunk.delete === 'function' &&
typeof chunk.get === 'function' &&
typeof chunk.getAll === 'function' &&
typeof chunk.has === 'function' &&
typeof chunk.set === 'function' &&
typeof chunk.entries === 'function' &&
typeof chunk.keys === 'function' &&
typeof chunk.values === 'function' &&
typeof chunk.forEach === 'function')
function isFormDataLike (object) {
return (
object &&
typeof object === 'object' &&
typeof object.append === 'function' &&
typeof object.delete === 'function' &&
typeof object.get === 'function' &&
typeof object.getAll === 'function' &&
typeof object.has === 'function' &&
typeof object.set === 'function' &&
object[Symbol.toStringTag] === 'FormData'
)
}
function throwIfAborted (signal) {
if (!signal) { return }
if (typeof signal.throwIfAborted === 'function') {
signal.throwIfAborted()
} else {
if (signal.aborted) {
// DOMException not available < v17.0.0
const err = new Error('The operation was aborted')
err.name = 'AbortError'
throw err
}
}
}
const hasToWellFormed = !!String.prototype.toWellFormed
/**
* @param {string} val
*/
function toUSVString (val) {
if (hasToWellFormed) {
return `${val}`.toWellFormed()
} else if (nodeUtil.toUSVString) {
return nodeUtil.toUSVString(val)
}
return `${val}`
}
const kEnumerableProperty = Object.create(null)
kEnumerableProperty.enumerable = true
@ -382,7 +446,7 @@ module.exports = {
isDisturbed,
isErrored,
isReadable,
toUSVString: nodeUtil.toUSVString || ((val) => `${val}`),
toUSVString,
isReadableAborted,
isBlobLike,
parseOrigin,
@ -403,5 +467,9 @@ module.exports = {
validateHandler,
getSocketInfo,
isFormDataLike,
buildURL
buildURL,
throwIfAborted,
nodeMajor,
nodeMinor,
nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13)
}

View file

@ -19,7 +19,7 @@ class DispatcherBase extends Dispatcher {
super()
this[kDestroyed] = false
this[kOnDestroyed] = []
this[kOnDestroyed] = null
this[kClosed] = false
this[kOnClosed] = []
}
@ -127,6 +127,7 @@ class DispatcherBase extends Dispatcher {
}
this[kDestroyed] = true
this[kOnDestroyed] = this[kOnDestroyed] || []
this[kOnDestroyed].push(callback)
const onDestroyed = () => {
@ -167,7 +168,7 @@ class DispatcherBase extends Dispatcher {
throw new InvalidArgumentError('opts must be an object.')
}
if (this[kDestroyed]) {
if (this[kDestroyed] || this[kOnDestroyed]) {
throw new ClientDestroyedError()
}

View file

@ -123,6 +123,7 @@ function extractBody (object, keepalive = false) {
const blobParts = []
const rn = new Uint8Array([13, 10]) // '\r\n'
length = 0
let hasUnknownSizeValue = false
for (const [name, value] of object) {
if (typeof value === 'string') {
@ -138,13 +139,20 @@ function extractBody (object, keepalive = false) {
value.type || 'application/octet-stream'
}\r\n\r\n`)
blobParts.push(chunk, value, rn)
length += chunk.byteLength + value.size + rn.byteLength
if (typeof value.size === 'number') {
length += chunk.byteLength + value.size + rn.byteLength
} else {
hasUnknownSizeValue = true
}
}
}
const chunk = enc.encode(`--${boundary}--`)
blobParts.push(chunk)
length += chunk.byteLength
if (hasUnknownSizeValue) {
length = null
}
// Set source to object.
source = object

View file

@ -48,11 +48,17 @@ const requestCache = [
'only-if-cached'
]
// https://fetch.spec.whatwg.org/#request-body-header-name
const requestBodyHeader = [
'content-encoding',
'content-language',
'content-location',
'content-type'
'content-type',
// See https://github.com/nodejs/undici/issues/2021
// 'Content-Length' is a forbidden header name, which is typically
// removed in the Headers implementation. However, undici doesn't
// filter out headers, so we add it here.
'content-length'
]
// https://fetch.spec.whatwg.org/#enumdef-requestduplex

View file

@ -1,15 +1,18 @@
const assert = require('assert')
const { atob } = require('buffer')
const { format } = require('url')
const { isValidHTTPToken, isomorphicDecode } = require('./util')
const { isomorphicDecode } = require('./util')
const encoder = new TextEncoder()
// Regex
const HTTP_TOKEN_CODEPOINTS = /^[!#$%&'*+-.^_|~A-z0-9]+$/
/**
* @see https://mimesniff.spec.whatwg.org/#http-token-code-point
*/
const HTTP_TOKEN_CODEPOINTS = /^[!#$%&'*+-.^_|~A-Za-z0-9]+$/
const HTTP_WHITESPACE_REGEX = /(\u000A|\u000D|\u0009|\u0020)/ // eslint-disable-line
// https://mimesniff.spec.whatwg.org/#http-quoted-string-token-code-point
const HTTP_QUOTED_STRING_TOKENS = /^(\u0009|\x{0020}-\x{007E}|\x{0080}-\x{00FF})+$/ // eslint-disable-line
/**
* @see https://mimesniff.spec.whatwg.org/#http-quoted-string-token-code-point
*/
const HTTP_QUOTED_STRING_TOKENS = /[\u0009|\u0020-\u007E|\u0080-\u00FF]/ // eslint-disable-line
// https://fetch.spec.whatwg.org/#data-url-processor
/** @param {URL} dataURL */
@ -31,22 +34,20 @@ function dataURLProcessor (dataURL) {
// 5. Let mimeType be the result of collecting a
// sequence of code points that are not equal
// to U+002C (,), given position.
let mimeType = collectASequenceOfCodePoints(
(char) => char !== ',',
let mimeType = collectASequenceOfCodePointsFast(
',',
input,
position
)
// 6. Strip leading and trailing ASCII whitespace
// from mimeType.
// Note: This will only remove U+0020 SPACE code
// points, if any.
// Undici implementation note: we need to store the
// length because if the mimetype has spaces removed,
// the wrong amount will be sliced from the input in
// step #9
const mimeTypeLength = mimeType.length
mimeType = mimeType.replace(/^(\u0020)+|(\u0020)+$/g, '')
mimeType = removeASCIIWhitespace(mimeType, true, true)
// 7. If position is past the end of input, then
// return failure
@ -118,7 +119,17 @@ function dataURLProcessor (dataURL) {
* @param {boolean} excludeFragment
*/
function URLSerializer (url, excludeFragment = false) {
return format(url, { fragment: !excludeFragment })
const href = url.href
if (!excludeFragment) {
return href
}
const hash = href.lastIndexOf('#')
if (hash === -1) {
return href
}
return href.slice(0, hash)
}
// https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points
@ -145,6 +156,25 @@ function collectASequenceOfCodePoints (condition, input, position) {
return result
}
/**
* A faster collectASequenceOfCodePoints that only works when comparing a single character.
* @param {string} char
* @param {string} input
* @param {{ position: number }} position
*/
function collectASequenceOfCodePointsFast (char, input, position) {
const idx = input.indexOf(char, position.position)
const start = position.position
if (idx === -1) {
position.position = input.length
return input.slice(start)
}
position.position = idx
return input.slice(start, position.position)
}
// https://url.spec.whatwg.org/#string-percent-decode
/** @param {string} input */
function stringPercentDecode (input) {
@ -205,7 +235,7 @@ function percentDecode (input) {
function parseMIMEType (input) {
// 1. Remove any leading and trailing HTTP whitespace
// from input.
input = input.trim()
input = removeHTTPWhitespace(input, true, true)
// 2. Let position be a position variable for input,
// initially pointing at the start of input.
@ -214,8 +244,8 @@ function parseMIMEType (input) {
// 3. Let type be the result of collecting a sequence
// of code points that are not U+002F (/) from
// input, given position.
const type = collectASequenceOfCodePoints(
(char) => char !== '/',
const type = collectASequenceOfCodePointsFast(
'/',
input,
position
)
@ -239,14 +269,14 @@ function parseMIMEType (input) {
// 7. Let subtype be the result of collecting a sequence of
// code points that are not U+003B (;) from input, given
// position.
let subtype = collectASequenceOfCodePoints(
(char) => char !== ';',
let subtype = collectASequenceOfCodePointsFast(
';',
input,
position
)
// 8. Remove any trailing HTTP whitespace from subtype.
subtype = subtype.trimEnd()
subtype = removeHTTPWhitespace(subtype, false, true)
// 9. If subtype is the empty string or does not solely
// contain HTTP token code points, then return failure.
@ -254,17 +284,20 @@ function parseMIMEType (input) {
return 'failure'
}
const typeLowercase = type.toLowerCase()
const subtypeLowercase = subtype.toLowerCase()
// 10. Let mimeType be a new MIME type record whose type
// is type, in ASCII lowercase, and subtype is subtype,
// in ASCII lowercase.
// https://mimesniff.spec.whatwg.org/#mime-type
const mimeType = {
type: type.toLowerCase(),
subtype: subtype.toLowerCase(),
type: typeLowercase,
subtype: subtypeLowercase,
/** @type {Map<string, string>} */
parameters: new Map(),
// https://mimesniff.spec.whatwg.org/#mime-type-essence
essence: `${type}/${subtype}`
essence: `${typeLowercase}/${subtypeLowercase}`
}
// 11. While position is not past the end of input:
@ -324,8 +357,8 @@ function parseMIMEType (input) {
// 2. Collect a sequence of code points that are not
// U+003B (;) from input, given position.
collectASequenceOfCodePoints(
(char) => char !== ';',
collectASequenceOfCodePointsFast(
';',
input,
position
)
@ -335,15 +368,14 @@ function parseMIMEType (input) {
// 1. Set parameterValue to the result of collecting
// a sequence of code points that are not U+003B (;)
// from input, given position.
parameterValue = collectASequenceOfCodePoints(
(char) => char !== ';',
parameterValue = collectASequenceOfCodePointsFast(
';',
input,
position
)
// 2. Remove any trailing HTTP whitespace from parameterValue.
// Note: it says "trailing" whitespace; leading is fine.
parameterValue = parameterValue.trimEnd()
parameterValue = removeHTTPWhitespace(parameterValue, false, true)
// 3. If parameterValue is the empty string, then continue.
if (parameterValue.length === 0) {
@ -360,7 +392,7 @@ function parseMIMEType (input) {
if (
parameterName.length !== 0 &&
HTTP_TOKEN_CODEPOINTS.test(parameterName) &&
!HTTP_QUOTED_STRING_TOKENS.test(parameterValue) &&
(parameterValue.length === 0 || HTTP_QUOTED_STRING_TOKENS.test(parameterValue)) &&
!mimeType.parameters.has(parameterName)
) {
mimeType.parameters.set(parameterName, parameterValue)
@ -494,11 +526,11 @@ function collectAnHTTPQuotedString (input, position, extractValue) {
*/
function serializeAMimeType (mimeType) {
assert(mimeType !== 'failure')
const { type, subtype, parameters } = mimeType
const { parameters, essence } = mimeType
// 1. Let serialization be the concatenation of mimeTypes
// type, U+002F (/), and mimeTypes subtype.
let serialization = `${type}/${subtype}`
let serialization = essence
// 2. For each name → value of mimeTypes parameters:
for (let [name, value] of parameters.entries()) {
@ -513,7 +545,7 @@ function serializeAMimeType (mimeType) {
// 4. If value does not solely contain HTTP token code
// points or value is the empty string, then:
if (!isValidHTTPToken(value)) {
if (!HTTP_TOKEN_CODEPOINTS.test(value)) {
// 1. Precede each occurence of U+0022 (") or
// U+005C (\) in value with U+005C (\).
value = value.replace(/(\\|")/g, '\\$1')
@ -533,10 +565,64 @@ function serializeAMimeType (mimeType) {
return serialization
}
/**
* @see https://fetch.spec.whatwg.org/#http-whitespace
* @param {string} char
*/
function isHTTPWhiteSpace (char) {
return char === '\r' || char === '\n' || char === '\t' || char === ' '
}
/**
* @see https://fetch.spec.whatwg.org/#http-whitespace
* @param {string} str
*/
function removeHTTPWhitespace (str, leading = true, trailing = true) {
let lead = 0
let trail = str.length - 1
if (leading) {
for (; lead < str.length && isHTTPWhiteSpace(str[lead]); lead++);
}
if (trailing) {
for (; trail > 0 && isHTTPWhiteSpace(str[trail]); trail--);
}
return str.slice(lead, trail + 1)
}
/**
* @see https://infra.spec.whatwg.org/#ascii-whitespace
* @param {string} char
*/
function isASCIIWhitespace (char) {
return char === '\r' || char === '\n' || char === '\t' || char === '\f' || char === ' '
}
/**
* @see https://infra.spec.whatwg.org/#strip-leading-and-trailing-ascii-whitespace
*/
function removeASCIIWhitespace (str, leading = true, trailing = true) {
let lead = 0
let trail = str.length - 1
if (leading) {
for (; lead < str.length && isASCIIWhitespace(str[lead]); lead++);
}
if (trailing) {
for (; trail > 0 && isASCIIWhitespace(str[trail]); trail--);
}
return str.slice(lead, trail + 1)
}
module.exports = {
dataURLProcessor,
URLSerializer,
collectASequenceOfCodePoints,
collectASequenceOfCodePointsFast,
stringPercentDecode,
parseMIMEType,
collectAnHTTPQuotedString,

View file

@ -61,14 +61,7 @@ class FormData {
// The delete(name) method steps are to remove all entries whose name
// is name from thiss entry list.
const next = []
for (const entry of this[kState]) {
if (entry.name !== name) {
next.push(entry)
}
}
this[kState] = next
this[kState] = this[kState].filter(entry => entry.name !== name)
}
get (name) {

View file

@ -3,7 +3,7 @@
'use strict'
const { kHeadersList } = require('../core/symbols')
const { kGuard, kHeadersCaseInsensitive } = require('./symbols')
const { kGuard } = require('./symbols')
const { kEnumerableProperty } = require('../core/util')
const {
makeIterator,
@ -11,6 +11,7 @@ const {
isValidHeaderValue
} = require('./util')
const { webidl } = require('./webidl')
const assert = require('assert')
const kHeadersMap = Symbol('headers map')
const kHeadersSortedMap = Symbol('headers map sorted')
@ -23,10 +24,12 @@ function headerValueNormalize (potentialValue) {
// To normalize a byte sequence potentialValue, remove
// any leading and trailing HTTP whitespace bytes from
// potentialValue.
return potentialValue.replace(
/^[\r\n\t ]+|[\r\n\t ]+$/g,
''
)
// Trimming the end with `.replace()` and a RegExp is typically subject to
// ReDoS. This is safer and faster.
let i = potentialValue.length
while (/[\r\n\t ]/.test(potentialValue.charAt(--i)));
return potentialValue.slice(0, i + 1).replace(/^[\r\n\t ]+/, '')
}
function fill (headers, object) {
@ -72,6 +75,7 @@ class HeadersList {
if (init instanceof HeadersList) {
this[kHeadersMap] = new Map(init[kHeadersMap])
this[kHeadersSortedMap] = init[kHeadersSortedMap]
this.cookies = init.cookies
} else {
this[kHeadersMap] = new Map(init)
this[kHeadersSortedMap] = null
@ -91,6 +95,7 @@ class HeadersList {
clear () {
this[kHeadersMap].clear()
this[kHeadersSortedMap] = null
this.cookies = null
}
// https://fetch.spec.whatwg.org/#concept-header-list-append
@ -104,14 +109,18 @@ class HeadersList {
// 2. Append (name, value) to list.
if (exists) {
this[kHeadersMap].set(lowercaseName, { name: exists.name, value: `${exists.value}, ${value}` })
const delimiter = lowercaseName === 'cookie' ? '; ' : ', '
this[kHeadersMap].set(lowercaseName, {
name: exists.name,
value: `${exists.value}${delimiter}${value}`
})
} else {
this[kHeadersMap].set(lowercaseName, { name, value })
}
if (lowercaseName === 'set-cookie') {
this.cookies ??= []
this.cookies.push([name, value])
this.cookies.push(value)
}
}
@ -121,7 +130,7 @@ class HeadersList {
const lowercaseName = name.toLowerCase()
if (lowercaseName === 'set-cookie') {
this.cookies = [[name, value]]
this.cookies = [value]
}
// 1. If list contains name, then set the value of
@ -164,15 +173,16 @@ class HeadersList {
}
}
get [kHeadersCaseInsensitive] () {
/** @type {string[]} */
const flatList = []
get entries () {
const headers = {}
for (const { name, value } of this[kHeadersMap].values()) {
flatList.push(name, value)
if (this[kHeadersMap].size) {
for (const { name, value } of this[kHeadersMap].values()) {
headers[name] = value
}
}
return flatList
return headers
}
}
@ -379,18 +389,74 @@ class Headers {
return this[kHeadersList].set(name, value)
}
get [kHeadersSortedMap] () {
if (!this[kHeadersList][kHeadersSortedMap]) {
this[kHeadersList][kHeadersSortedMap] = new Map([...this[kHeadersList]].sort((a, b) => a[0] < b[0] ? -1 : 1))
// https://fetch.spec.whatwg.org/#dom-headers-getsetcookie
getSetCookie () {
webidl.brandCheck(this, Headers)
// 1. If thiss header list does not contain `Set-Cookie`, then return « ».
// 2. Return the values of all headers in thiss header list whose name is
// a byte-case-insensitive match for `Set-Cookie`, in order.
const list = this[kHeadersList].cookies
if (list) {
return [...list]
}
return this[kHeadersList][kHeadersSortedMap]
return []
}
// https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine
get [kHeadersSortedMap] () {
if (this[kHeadersList][kHeadersSortedMap]) {
return this[kHeadersList][kHeadersSortedMap]
}
// 1. Let headers be an empty list of headers with the key being the name
// and value the value.
const headers = []
// 2. Let names be the result of convert header names to a sorted-lowercase
// set with all the names of the headers in list.
const names = [...this[kHeadersList]].sort((a, b) => a[0] < b[0] ? -1 : 1)
const cookies = this[kHeadersList].cookies
// 3. For each name of names:
for (const [name, value] of names) {
// 1. If name is `set-cookie`, then:
if (name === 'set-cookie') {
// 1. Let values be a list of all values of headers in list whose name
// is a byte-case-insensitive match for name, in order.
// 2. For each value of values:
// 1. Append (name, value) to headers.
for (const value of cookies) {
headers.push([name, value])
}
} else {
// 2. Otherwise:
// 1. Let value be the result of getting name from list.
// 2. Assert: value is non-null.
assert(value !== null)
// 3. Append (name, value) to headers.
headers.push([name, value])
}
}
this[kHeadersList][kHeadersSortedMap] = headers
// 4. Return headers.
return headers
}
keys () {
webidl.brandCheck(this, Headers)
return makeIterator(
() => [...this[kHeadersSortedMap].entries()],
() => [...this[kHeadersSortedMap].values()],
'Headers',
'key'
)
@ -400,7 +466,7 @@ class Headers {
webidl.brandCheck(this, Headers)
return makeIterator(
() => [...this[kHeadersSortedMap].entries()],
() => [...this[kHeadersSortedMap].values()],
'Headers',
'value'
)
@ -410,7 +476,7 @@ class Headers {
webidl.brandCheck(this, Headers)
return makeIterator(
() => [...this[kHeadersSortedMap].entries()],
() => [...this[kHeadersSortedMap].values()],
'Headers',
'key+value'
)
@ -451,6 +517,7 @@ Object.defineProperties(Headers.prototype, {
get: kEnumerableProperty,
has: kEnumerableProperty,
set: kEnumerableProperty,
getSetCookie: kEnumerableProperty,
keys: kEnumerableProperty,
values: kEnumerableProperty,
entries: kEnumerableProperty,

View file

@ -37,9 +37,12 @@ const {
isErrorLike,
fullyReadBody,
readableStreamClose,
isomorphicEncode
isomorphicEncode,
urlIsLocal,
urlIsHttpHttpsScheme,
urlHasHttpsScheme
} = require('./util')
const { kState, kHeaders, kGuard, kRealm, kHeadersCaseInsensitive } = require('./symbols')
const { kState, kHeaders, kGuard, kRealm } = require('./symbols')
const assert = require('assert')
const { safelyExtractBody } = require('./body')
const {
@ -53,7 +56,7 @@ const {
const { kHeadersList } = require('../core/symbols')
const EE = require('events')
const { Readable, pipeline } = require('stream')
const { isErrored, isReadable } = require('../core/util')
const { isErrored, isReadable, nodeMajor, nodeMinor } = require('../core/util')
const { dataURLProcessor, serializeAMimeType } = require('./dataURL')
const { TransformStream } = require('stream/web')
const { getGlobalDispatcher } = require('../global')
@ -64,10 +67,6 @@ const { STATUS_CODES } = require('http')
let resolveObjectURL
let ReadableStream = globalThis.ReadableStream
const nodeVersion = process.versions.node.split('.')
const nodeMajor = Number(nodeVersion[0])
const nodeMinor = Number(nodeVersion[1])
class Fetch extends EE {
constructor (dispatcher) {
super()
@ -276,7 +275,7 @@ function finalizeAndReportTiming (response, initiatorType = 'other') {
let cacheState = response.cacheState
// 6. If originalURLs scheme is not an HTTP(S) scheme, then return.
if (!/^https?:/.test(originalURL.protocol)) {
if (!urlIsHttpHttpsScheme(originalURL)) {
return
}
@ -301,7 +300,7 @@ function finalizeAndReportTiming (response, initiatorType = 'other') {
// capability.
// TODO: given globals relevant settings objects cross-origin isolated
// capability?
response.timingInfo.endTime = coarsenedSharedCurrentTime()
timingInfo.endTime = coarsenedSharedCurrentTime()
// 10. Set responses timing info to timingInfo.
response.timingInfo = timingInfo
@ -319,7 +318,7 @@ function finalizeAndReportTiming (response, initiatorType = 'other') {
// https://w3c.github.io/resource-timing/#dfn-mark-resource-timing
function markResourceTiming (timingInfo, originalURL, initiatorType, globalThis, cacheState) {
if (nodeMajor >= 18 && nodeMinor >= 2) {
if (nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 2)) {
performance.markResourceTiming(timingInfo, originalURL, initiatorType, globalThis, cacheState)
}
}
@ -534,10 +533,7 @@ async function mainFetch (fetchParams, recursive = false) {
// 3. If requests local-URLs-only flag is set and requests current URL is
// not local, then set response to a network error.
if (
request.localURLsOnly &&
!/^(about|blob|data):/.test(requestCurrentURL(request).protocol)
) {
if (request.localURLsOnly && !urlIsLocal(requestCurrentURL(request))) {
response = makeNetworkError('local URLs only')
}
@ -627,7 +623,7 @@ async function mainFetch (fetchParams, recursive = false) {
}
// requests current URLs scheme is not an HTTP(S) scheme
if (!/^https?:/.test(requestCurrentURL(request).protocol)) {
if (!urlIsHttpHttpsScheme(requestCurrentURL(request))) {
// Return a network error.
return makeNetworkError('URL scheme must be a HTTP(S) scheme')
}
@ -1134,7 +1130,7 @@ async function httpRedirectFetch (fetchParams, response) {
// 6. If locationURLs scheme is not an HTTP(S) scheme, then return a network
// error.
if (!/^https?:/.test(locationURL.protocol)) {
if (!urlIsHttpHttpsScheme(locationURL)) {
return makeNetworkError('URL scheme must be a HTTP(S) scheme')
}
@ -1209,7 +1205,7 @@ async function httpRedirectFetch (fetchParams, response) {
// 14. If requests body is non-null, then set requests body to the first return
// value of safely extracting requests bodys source.
if (request.body != null) {
assert(request.body.source)
assert(request.body.source != null)
request.body = safelyExtractBody(request.body.source)[0]
}
@ -1403,7 +1399,7 @@ async function httpNetworkOrCacheFetch (
// header if httpRequests header list contains that headers name.
// TODO: https://github.com/whatwg/fetch/issues/1285#issuecomment-896560129
if (!httpRequest.headersList.contains('accept-encoding')) {
if (/^https:/.test(requestCurrentURL(httpRequest).protocol)) {
if (urlHasHttpsScheme(requestCurrentURL(httpRequest))) {
httpRequest.headersList.append('accept-encoding', 'br, gzip, deflate')
} else {
httpRequest.headersList.append('accept-encoding', 'gzip, deflate')
@ -1849,6 +1845,7 @@ async function httpNetworkFetch (
// 4. Set bytes to the result of handling content codings given
// codings and bytes.
let bytes
let isFailure
try {
const { done, value } = await fetchParams.controller.next()
@ -1863,6 +1860,10 @@ async function httpNetworkFetch (
bytes = undefined
} else {
bytes = err
// err may be propagated from the result of calling readablestream.cancel,
// which might not be an error. https://github.com/nodejs/undici/issues/2009
isFailure = true
}
}
@ -1882,7 +1883,7 @@ async function httpNetworkFetch (
timingInfo.decodedBodySize += bytes?.byteLength ?? 0
// 6. If bytes is failure, then terminate fetchParamss controller.
if (isErrorLike(bytes)) {
if (isFailure) {
fetchParams.controller.terminate(bytes)
return
}
@ -1949,7 +1950,7 @@ async function httpNetworkFetch (
origin: url.origin,
method: request.method,
body: fetchParams.controller.dispatcher.isMockActive ? request.body && request.body.source : body,
headers: request.headersList[kHeadersCaseInsensitive],
headers: request.headersList.entries,
maxRedirections: 0,
upgrade: request.mode === 'websocket' ? 'websocket' : undefined
},
@ -1983,7 +1984,9 @@ async function httpNetworkFetch (
const val = headersList[n + 1].toString('latin1')
if (key.toLowerCase() === 'content-encoding') {
codings = val.split(',').map((x) => x.trim())
// https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1
// "All content-coding values are case-insensitive..."
codings = val.toLowerCase().split(',').map((x) => x.trim())
} else if (key.toLowerCase() === 'location') {
location = val
}
@ -2002,9 +2005,10 @@ async function httpNetworkFetch (
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding
if (request.method !== 'HEAD' && request.method !== 'CONNECT' && !nullBodyStatus.includes(status) && !willFollow) {
for (const coding of codings) {
if (/(x-)?gzip/.test(coding)) {
// https://www.rfc-editor.org/rfc/rfc9112.html#section-7.2
if (coding === 'x-gzip' || coding === 'gzip') {
decoders.push(zlib.createGunzip())
} else if (/(x-)?deflate/.test(coding)) {
} else if (coding === 'deflate') {
decoders.push(zlib.createInflate())
} else if (coding === 'br') {
decoders.push(zlib.createBrotliDecompress())

View file

@ -9,7 +9,8 @@ const util = require('../core/util')
const {
isValidHTTPToken,
sameOrigin,
normalizeMethod
normalizeMethod,
makePolicyContainer
} = require('./util')
const {
forbiddenMethods,
@ -28,10 +29,12 @@ const { getGlobalOrigin } = require('./global')
const { URLSerializer } = require('./dataURL')
const { kHeadersList } = require('../core/symbols')
const assert = require('assert')
const { getMaxListeners, setMaxListeners, getEventListeners, defaultMaxListeners } = require('events')
let TransformStream = globalThis.TransformStream
const kInit = Symbol('init')
const kAbortController = Symbol('abortController')
const requestFinalizer = new FinalizationRegistry(({ signal, abort }) => {
signal.removeEventListener('abort', abort)
@ -50,10 +53,14 @@ class Request {
input = webidl.converters.RequestInfo(input)
init = webidl.converters.RequestInit(init)
// TODO
// https://html.spec.whatwg.org/multipage/webappapis.html#environment-settings-object
this[kRealm] = {
settingsObject: {
baseUrl: getGlobalOrigin()
baseUrl: getGlobalOrigin(),
get origin () {
return this.baseUrl?.origin
},
policyContainer: makePolicyContainer()
}
}
@ -122,12 +129,12 @@ class Request {
}
// 10. If init["window"] exists and is non-null, then throw a TypeError.
if (init.window !== undefined && init.window != null) {
if (init.window != null) {
throw new TypeError(`'window' option '${window}' must be null`)
}
// 11. If init["window"] exists, then set window to "no-window".
if (init.window !== undefined) {
if ('window' in init) {
window = 'no-window'
}
@ -348,12 +355,34 @@ class Request {
if (signal.aborted) {
ac.abort(signal.reason)
} else {
// Keep a strong ref to ac while request object
// is alive. This is needed to prevent AbortController
// from being prematurely garbage collected.
// See, https://github.com/nodejs/undici/issues/1926.
this[kAbortController] = ac
const acRef = new WeakRef(ac)
const abort = function () {
acRef.deref()?.abort(this.reason)
const ac = acRef.deref()
if (ac !== undefined) {
ac.abort(this.reason)
}
}
// Third-party AbortControllers may not work with these.
// See, https://github.com/nodejs/undici/pull/1910#issuecomment-1464495619.
try {
// If the max amount of listeners is equal to the default, increase it
// This is only available in node >= v19.9.0
if (typeof getMaxListeners === 'function' && getMaxListeners(signal) === defaultMaxListeners) {
setMaxListeners(100, signal)
} else if (getEventListeners(signal, 'abort').length >= defaultMaxListeners) {
setMaxListeners(100, signal)
}
} catch {}
signal.addEventListener('abort', abort, { once: true })
requestFinalizer.register(this, { signal, abort })
requestFinalizer.register(ac, { signal, abort })
}
}
@ -413,7 +442,7 @@ class Request {
// non-null, and requests method is `GET` or `HEAD`, then throw a
// TypeError.
if (
((init.body !== undefined && init.body != null) || inputBody != null) &&
(init.body != null || inputBody != null) &&
(request.method === 'GET' || request.method === 'HEAD')
) {
throw new TypeError('Request with GET/HEAD method cannot have body.')
@ -423,7 +452,7 @@ class Request {
let initBody = null
// 36. If init["body"] exists and is non-null, then:
if (init.body !== undefined && init.body != null) {
if (init.body != null) {
// 1. Let Content-Type be null.
// 2. Set initBody and Content-Type to the result of extracting
// init["body"], with keepalive set to requests keepalive.

View file

@ -348,9 +348,7 @@ function makeNetworkError (reason) {
status: 0,
error: isError
? reason
: new Error(reason ? String(reason) : reason, {
cause: isError ? reason : undefined
}),
: new Error(reason ? String(reason) : reason),
aborted: reason && reason.name === 'AbortError'
})
}
@ -469,7 +467,7 @@ function initializeResponse (response, init, body) {
// 5. If init["headers"] exists, then fill responses headers with init["headers"].
if ('headers' in init && init.headers != null) {
fill(response[kState].headersList, init.headers)
fill(response[kHeaders], init.headers)
}
// 6. If body was given, then:
@ -571,5 +569,6 @@ module.exports = {
makeResponse,
makeAppropriateNetworkError,
filterResponse,
Response
Response,
cloneResponse
}

View file

@ -6,6 +6,5 @@ module.exports = {
kSignal: Symbol('signal'),
kState: Symbol('state'),
kGuard: Symbol('guard'),
kRealm: Symbol('realm'),
kHeadersCaseInsensitive: Symbol('headers case insensitive')
kRealm: Symbol('realm')
}

238
node_modules/undici/lib/fetch/util.js generated vendored
View file

@ -1,6 +1,7 @@
'use strict'
const { redirectStatus, badPorts, referrerPolicy: referrerPolicyTokens } = require('./constants')
const { getGlobalOrigin } = require('./global')
const { performance } = require('perf_hooks')
const { isBlobLike, toUSVString, ReadableStreamFrom } = require('../core/util')
const assert = require('assert')
@ -36,9 +37,11 @@ function responseLocationURL (response, requestFragment) {
// `Location` and responses header list.
let location = response.headersList.get('location')
// 3. If location is a value, then set location to the result of parsing
// location with responses URL.
location = location ? new URL(location, responseURL(response)) : null
// 3. If location is a header value, then set location to the result of
// parsing location with responses URL.
if (location !== null && isValidHeaderValue(location)) {
location = new URL(location, responseURL(response))
}
// 4. If location is a URL whose fragment is null, then set locations
// fragment to requestFragment.
@ -61,7 +64,7 @@ function requestBadPort (request) {
// 2. If urls scheme is an HTTP(S) scheme and urls port is a bad port,
// then return blocked.
if (/^https?:/.test(url.protocol) && badPorts.includes(url.port)) {
if (urlIsHttpHttpsScheme(url) && badPorts.includes(url.port)) {
return 'blocked'
}
@ -267,7 +270,7 @@ function appendRequestOriginHeader (request) {
// 2. If requests response tainting is "cors" or requests mode is "websocket", then append (`Origin`, serializedOrigin) to requests header list.
if (request.responseTainting === 'cors' || request.mode === 'websocket') {
if (serializedOrigin) {
request.headersList.append('Origin', serializedOrigin)
request.headersList.append('origin', serializedOrigin)
}
// 3. Otherwise, if requests method is neither `GET` nor `HEAD`, then:
@ -282,7 +285,7 @@ function appendRequestOriginHeader (request) {
case 'strict-origin':
case 'strict-origin-when-cross-origin':
// If requests origin is a tuple origin, its scheme is "https", and requests current URLs scheme is not "https", then set serializedOrigin to `null`.
if (/^https:/.test(request.origin) && !/^https:/.test(requestCurrentURL(request))) {
if (request.origin && urlHasHttpsScheme(request.origin) && !urlHasHttpsScheme(requestCurrentURL(request))) {
serializedOrigin = null
}
break
@ -298,7 +301,7 @@ function appendRequestOriginHeader (request) {
if (serializedOrigin) {
// 2. Append (`Origin`, serializedOrigin) to requests header list.
request.headersList.append('Origin', serializedOrigin)
request.headersList.append('origin', serializedOrigin)
}
}
}
@ -327,14 +330,17 @@ function createOpaqueTimingInfo (timingInfo) {
// https://html.spec.whatwg.org/multipage/origin.html#policy-container
function makePolicyContainer () {
// TODO
return {}
// Note: the fetch spec doesn't make use of embedder policy or CSP list
return {
referrerPolicy: 'strict-origin-when-cross-origin'
}
}
// https://html.spec.whatwg.org/multipage/origin.html#clone-a-policy-container
function clonePolicyContainer () {
// TODO
return {}
function clonePolicyContainer (policyContainer) {
return {
referrerPolicy: policyContainer.referrerPolicy
}
}
// https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer
@ -342,104 +348,76 @@ function determineRequestsReferrer (request) {
// 1. Let policy be request's referrer policy.
const policy = request.referrerPolicy
// Return no-referrer when empty or policy says so
if (policy == null || policy === '' || policy === 'no-referrer') {
return 'no-referrer'
}
// Note: policy cannot (shouldn't) be null or an empty string.
assert(policy)
// 2. Let environment be requests client.
// 2. Let environment be the request client
const environment = request.client
let referrerSource = null
/**
* 3, Switch on requests referrer:
"client"
If environments global object is a Window object, then
Let document be the associated Document of environments global object.
If documents origin is an opaque origin, return no referrer.
While document is an iframe srcdoc document,
let document be documents browsing contexts browsing context containers node document.
Let referrerSource be documents URL.
Otherwise, let referrerSource be environments creation URL.
a URL
Let referrerSource be requests referrer.
*/
// 3. Switch on requests referrer:
if (request.referrer === 'client') {
// Not defined in Node but part of the spec
if (request.client?.globalObject?.constructor?.name === 'Window' ) { // eslint-disable-line
const origin = environment.globalObject.self?.origin ?? environment.globalObject.location?.origin
// Note: node isn't a browser and doesn't implement document/iframes,
// so we bypass this step and replace it with our own.
// If documents origin is an opaque origin, return no referrer.
if (origin == null || origin === 'null') return 'no-referrer'
const globalOrigin = getGlobalOrigin()
// Let referrerSource be documents URL.
referrerSource = new URL(environment.globalObject.location.href)
} else {
// 3(a)(II) If environment's global object is not Window,
// Let referrerSource be environments creationURL
if (environment?.globalObject?.location == null) {
return 'no-referrer'
}
referrerSource = new URL(environment.globalObject.location.href)
if (!globalOrigin || globalOrigin.origin === 'null') {
return 'no-referrer'
}
// note: we need to clone it as it's mutated
referrerSource = new URL(globalOrigin)
} else if (request.referrer instanceof URL) {
// 3(b) If requests's referrer is a URL instance, then make
// referrerSource be requests's referrer.
// Let referrerSource be requests referrer.
referrerSource = request.referrer
} else {
// If referrerSource neither client nor instance of URL
// then return "no-referrer".
return 'no-referrer'
}
const urlProtocol = referrerSource.protocol
// 4. Let requests referrerURL be the result of stripping referrerSource for
// use as a referrer.
let referrerURL = stripURLForReferrer(referrerSource)
// If url's scheme is a local scheme (i.e. one of "about", "data", "javascript", "file")
// then return "no-referrer".
if (
urlProtocol === 'about:' || urlProtocol === 'data:' ||
urlProtocol === 'blob:'
) {
return 'no-referrer'
// 5. Let referrerOrigin be the result of stripping referrerSource for use as
// a referrer, with the origin-only flag set to true.
const referrerOrigin = stripURLForReferrer(referrerSource, true)
// 6. If the result of serializing referrerURL is a string whose length is
// greater than 4096, set referrerURL to referrerOrigin.
if (referrerURL.toString().length > 4096) {
referrerURL = referrerOrigin
}
let temp
let referrerOrigin
// 4. Let requests's referrerURL be the result of stripping referrer
// source for use as referrer (using util function, without origin only)
const referrerUrl = (temp = stripURLForReferrer(referrerSource)).length > 4096
// 5. Let referrerOrigin be the result of stripping referrer
// source for use as referrer (using util function, with originOnly true)
? (referrerOrigin = stripURLForReferrer(referrerSource, true))
// 6. If result of seralizing referrerUrl is a string whose length is greater than
// 4096, then set referrerURL to referrerOrigin
: temp
const areSameOrigin = sameOrigin(request, referrerUrl)
const isNonPotentiallyTrustWorthy = isURLPotentiallyTrustworthy(referrerUrl) &&
const areSameOrigin = sameOrigin(request, referrerURL)
const isNonPotentiallyTrustWorthy = isURLPotentiallyTrustworthy(referrerURL) &&
!isURLPotentiallyTrustworthy(request.url)
// NOTE: How to treat step 7?
// 8. Execute the switch statements corresponding to the value of policy:
switch (policy) {
case 'origin': return referrerOrigin != null ? referrerOrigin : stripURLForReferrer(referrerSource, true)
case 'unsafe-url': return referrerUrl
case 'unsafe-url': return referrerURL
case 'same-origin':
return areSameOrigin ? referrerOrigin : 'no-referrer'
case 'origin-when-cross-origin':
return areSameOrigin ? referrerUrl : referrerOrigin
case 'strict-origin-when-cross-origin':
/**
* 1. If the origin of referrerURL and the origin of requests current URL are the same,
* then return referrerURL.
* 2. If referrerURL is a potentially trustworthy URL and requests current URL is not a
* potentially trustworthy URL, then return no referrer.
* 3. Return referrerOrigin
*/
if (areSameOrigin) return referrerOrigin
// else return isNonPotentiallyTrustWorthy ? 'no-referrer' : referrerOrigin
return areSameOrigin ? referrerURL : referrerOrigin
case 'strict-origin-when-cross-origin': {
const currentURL = requestCurrentURL(request)
// 1. If the origin of referrerURL and the origin of requests current
// URL are the same, then return referrerURL.
if (sameOrigin(referrerURL, currentURL)) {
return referrerURL
}
// 2. If referrerURL is a potentially trustworthy URL and requests
// current URL is not a potentially trustworthy URL, then return no
// referrer.
if (isURLPotentiallyTrustworthy(referrerURL) && !isURLPotentiallyTrustworthy(currentURL)) {
return 'no-referrer'
}
// 3. Return referrerOrigin.
return referrerOrigin
}
case 'strict-origin': // eslint-disable-line
/**
* 1. If referrerURL is a potentially trustworthy URL and
@ -458,15 +436,42 @@ function determineRequestsReferrer (request) {
default: // eslint-disable-line
return isNonPotentiallyTrustWorthy ? 'no-referrer' : referrerOrigin
}
}
function stripURLForReferrer (url, originOnly = false) {
const urlObject = new URL(url.href)
urlObject.username = ''
urlObject.password = ''
urlObject.hash = ''
/**
* @see https://w3c.github.io/webappsec-referrer-policy/#strip-url
* @param {URL} url
* @param {boolean|undefined} originOnly
*/
function stripURLForReferrer (url, originOnly) {
// 1. Assert: url is a URL.
assert(url instanceof URL)
return originOnly ? urlObject.origin : urlObject.href
// 2. If urls scheme is a local scheme, then return no referrer.
if (url.protocol === 'file:' || url.protocol === 'about:' || url.protocol === 'blank:') {
return 'no-referrer'
}
// 3. Set urls username to the empty string.
url.username = ''
// 4. Set urls password to the empty string.
url.password = ''
// 5. Set urls fragment to null.
url.hash = ''
// 6. If the origin-only flag is true, then:
if (originOnly) {
// 1. Set urls path to « the empty string ».
url.pathname = ''
// 2. Set urls query to null.
url.search = ''
}
// 7. Return url.
return url
}
function isURLPotentiallyTrustworthy (url) {
@ -633,7 +638,9 @@ function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) {
*/
function sameOrigin (A, B) {
// 1. If A and B are the same opaque origin, then return true.
// "opaque origin" is an internal value we cannot access, ignore.
if (A.origin === B.origin && A.origin === 'null') {
return true
}
// 2. If A and B are both tuple origins and their schemes,
// hosts, and port are identical, then return true.
@ -939,6 +946,41 @@ async function readAllBytes (reader, successSteps, failureSteps) {
}
}
/**
* @see https://fetch.spec.whatwg.org/#is-local
* @param {URL} url
*/
function urlIsLocal (url) {
assert('protocol' in url) // ensure it's a url object
const protocol = url.protocol
return protocol === 'about:' || protocol === 'blob:' || protocol === 'data:'
}
/**
* @param {string|URL} url
*/
function urlHasHttpsScheme (url) {
if (typeof url === 'string') {
return url.startsWith('https:')
}
return url.protocol === 'https:'
}
/**
* @see https://fetch.spec.whatwg.org/#http-scheme
* @param {URL} url
*/
function urlIsHttpHttpsScheme (url) {
assert('protocol' in url) // ensure it's a url object
const protocol = url.protocol
return protocol === 'http:' || protocol === 'https:'
}
/**
* Fetch supports node >= 16.8.0, but Object.hasOwn was added in v16.9.0.
*/
@ -983,5 +1025,9 @@ module.exports = {
isReadableStreamLike,
readableStreamClose,
isomorphicEncode,
isomorphicDecode
isomorphicDecode,
urlIsLocal,
urlHasHttpsScheme,
urlIsHttpHttpsScheme,
readAllBytes
}

View file

@ -51,6 +51,13 @@ webidl.argumentLengthCheck = function ({ length }, min, ctx) {
}
}
webidl.illegalConstructor = function () {
throw webidl.errors.exception({
header: 'TypeError',
message: 'Illegal constructor'
})
}
// https://tc39.es/ecma262/#sec-ecmascript-data-types-and-values
webidl.util.Type = function (V) {
switch (typeof V) {

View file

@ -2,9 +2,13 @@
/**
* @see https://encoding.spec.whatwg.org/#concept-encoding-get
* @param {string} label
* @param {string|undefined} label
*/
function getEncoding (label) {
if (!label) {
return 'failure'
}
// 1. Remove any leading and trailing ASCII whitespace from label.
// 2. If label is an ASCII case-insensitive match for any of the
// labels listed in the table below, then return the

View file

@ -188,7 +188,11 @@ function buildKey (opts) {
}
function generateKeyValues (data) {
return Object.entries(data).reduce((keyValuePairs, [key, value]) => [...keyValuePairs, key, value], [])
return Object.entries(data).reduce((keyValuePairs, [key, value]) => [
...keyValuePairs,
Buffer.from(`${key}`),
Array.isArray(value) ? value.map(x => Buffer.from(`${x}`)) : Buffer.from(`${value}`)
], [])
}
/**

3
node_modules/undici/lib/pool.js generated vendored
View file

@ -32,6 +32,8 @@ class Pool extends PoolBase {
tls,
maxCachedSessions,
socketPath,
autoSelectFamily,
autoSelectFamilyAttemptTimeout,
...options
} = {}) {
super()
@ -54,6 +56,7 @@ class Pool extends PoolBase {
maxCachedSessions,
socketPath,
timeout: connectTimeout == null ? 10e3 : connectTimeout,
...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),
...connect
})
}

View file

@ -3,7 +3,7 @@
const { kProxy, kClose, kDestroy, kInterceptors } = require('./core/symbols')
const { URL } = require('url')
const Agent = require('./agent')
const Client = require('./client')
const Pool = require('./pool')
const DispatcherBase = require('./dispatcher-base')
const { InvalidArgumentError, RequestAbortedError } = require('./core/errors')
const buildConnector = require('./core/connect')
@ -34,6 +34,10 @@ function buildProxyOptions (opts) {
}
}
function defaultFactory (origin, opts) {
return new Pool(origin, opts)
}
class ProxyAgent extends DispatcherBase {
constructor (opts) {
super(opts)
@ -51,6 +55,12 @@ class ProxyAgent extends DispatcherBase {
throw new InvalidArgumentError('Proxy opts.uri is mandatory')
}
const { clientFactory = defaultFactory } = opts
if (typeof clientFactory !== 'function') {
throw new InvalidArgumentError('Proxy opts.clientFactory must be a function.')
}
this[kRequestTls] = opts.requestTls
this[kProxyTls] = opts.proxyTls
this[kProxyHeaders] = opts.headers || {}
@ -69,7 +79,7 @@ class ProxyAgent extends DispatcherBase {
const connect = buildConnector({ ...opts.proxyTls })
this[kConnectEndpoint] = buildConnector({ ...opts.requestTls })
this[kClient] = new Client(resolvedUrl, { connect })
this[kClient] = clientFactory(resolvedUrl, { connect })
this[kAgent] = new Agent({
...opts,
connect: async (opts, callback) => {

97
node_modules/undici/lib/timers.js generated vendored Normal file
View file

@ -0,0 +1,97 @@
'use strict'
let fastNow = Date.now()
let fastNowTimeout
const fastTimers = []
function onTimeout () {
fastNow = Date.now()
let len = fastTimers.length
let idx = 0
while (idx < len) {
const timer = fastTimers[idx]
if (timer.state === 0) {
timer.state = fastNow + timer.delay
} else if (timer.state > 0 && fastNow >= timer.state) {
timer.state = -1
timer.callback(timer.opaque)
}
if (timer.state === -1) {
timer.state = -2
if (idx !== len - 1) {
fastTimers[idx] = fastTimers.pop()
} else {
fastTimers.pop()
}
len -= 1
} else {
idx += 1
}
}
if (fastTimers.length > 0) {
refreshTimeout()
}
}
function refreshTimeout () {
if (fastNowTimeout && fastNowTimeout.refresh) {
fastNowTimeout.refresh()
} else {
clearTimeout(fastNowTimeout)
fastNowTimeout = setTimeout(onTimeout, 1e3)
if (fastNowTimeout.unref) {
fastNowTimeout.unref()
}
}
}
class Timeout {
constructor (callback, delay, opaque) {
this.callback = callback
this.delay = delay
this.opaque = opaque
// -2 not in timer list
// -1 in timer list but inactive
// 0 in timer list waiting for time
// > 0 in timer list waiting for time to expire
this.state = -2
this.refresh()
}
refresh () {
if (this.state === -2) {
fastTimers.push(this)
if (!fastNowTimeout || fastTimers.length === 1) {
refreshTimeout()
}
}
this.state = 0
}
clear () {
this.state = -1
}
}
module.exports = {
setTimeout (callback, delay, opaque) {
return delay < 1e3
? setTimeout(callback, delay, opaque)
: new Timeout(callback, delay, opaque)
},
clearTimeout (timeout) {
if (timeout instanceof Timeout) {
timeout.clear()
} else {
clearTimeout(timeout)
}
}
}

View file

@ -5,19 +5,17 @@ const diagnosticsChannel = require('diagnostics_channel')
const { uid, states } = require('./constants')
const {
kReadyState,
kResponse,
kExtensions,
kProtocol,
kSentClose,
kByteParser,
kReceivedClose
} = require('./symbols')
const { fireEvent, failWebsocketConnection } = require('./util')
const { CloseEvent } = require('./events')
const { ByteParser } = require('./receiver')
const { makeRequest } = require('../fetch/request')
const { fetching } = require('../fetch/index')
const { getGlobalDispatcher } = require('../..')
const { Headers } = require('../fetch/headers')
const { getGlobalDispatcher } = require('../global')
const { kHeadersList } = require('../core/symbols')
const channels = {}
channels.open = diagnosticsChannel.channel('undici:websocket:open')
@ -29,8 +27,10 @@ channels.socketError = diagnosticsChannel.channel('undici:websocket:socket_error
* @param {URL} url
* @param {string|string[]} protocols
* @param {import('./websocket').WebSocket} ws
* @param {(response: any) => void} onEstablish
* @param {Partial<import('../../types/websocket').WebSocketInit>} options
*/
function establishWebSocketConnection (url, protocols, ws) {
function establishWebSocketConnection (url, protocols, ws, onEstablish, options) {
// 1. Let requestURL be a copy of url, with its scheme set to "http", if urls
// scheme is "ws", and to "https" otherwise.
const requestURL = url
@ -51,6 +51,13 @@ function establishWebSocketConnection (url, protocols, ws) {
redirect: 'error'
})
// Note: undici extension, allow setting custom headers.
if (options.headers) {
const headersList = new Headers(options.headers)[kHeadersList]
request.headersList = headersList
}
// 3. Append (`Upgrade`, `websocket`) to requests header list.
// 4. Append (`Connection`, `Upgrade`) to requests header list.
// Note: both of these are handled by undici currently.
@ -91,7 +98,7 @@ function establishWebSocketConnection (url, protocols, ws) {
const controller = fetching({
request,
useParallelQueue: true,
dispatcher: getGlobalDispatcher(),
dispatcher: options.dispatcher ?? getGlobalDispatcher(),
processResponse (response) {
// 1. If response is a network error or its status is not 101,
// fail the WebSocket connection.
@ -173,67 +180,25 @@ function establishWebSocketConnection (url, protocols, ws) {
return
}
// processResponse is called when the "responses header list has been received and initialized."
// once this happens, the connection is open
ws[kResponse] = response
const parser = new ByteParser(ws)
response.socket.ws = ws // TODO: use symbol
ws[kByteParser] = parser
whenConnectionEstablished(ws)
response.socket.on('data', onSocketData)
response.socket.on('close', onSocketClose)
response.socket.on('error', onSocketError)
parser.on('drain', onParserDrain)
if (channels.open.hasSubscribers) {
channels.open.publish({
address: response.socket.address(),
protocol: secProtocol,
extensions: secExtension
})
}
onEstablish(response)
}
})
return controller
}
/**
* @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol
* @param {import('./websocket').WebSocket} ws
*/
function whenConnectionEstablished (ws) {
const { [kResponse]: response } = ws
// 1. Change the ready state to OPEN (1).
ws[kReadyState] = states.OPEN
// 2. Change the extensions attributes value to the extensions in use, if
// it is not the null value.
// https://datatracker.ietf.org/doc/html/rfc6455#section-9.1
const extensions = response.headersList.get('sec-websocket-extensions')
if (extensions !== null) {
ws[kExtensions] = extensions
}
// 3. Change the protocol attributes value to the subprotocol in use, if
// it is not the null value.
// https://datatracker.ietf.org/doc/html/rfc6455#section-1.9
const protocol = response.headersList.get('sec-websocket-protocol')
if (protocol !== null) {
ws[kProtocol] = protocol
}
// 4. Fire an event named open at the WebSocket object.
fireEvent('open', ws)
if (channels.open.hasSubscribers) {
channels.open.publish({
address: response.socket.address(),
protocol,
extensions
})
}
}
/**
* @param {Buffer} chunk
*/
@ -243,10 +208,6 @@ function onSocketData (chunk) {
}
}
function onParserDrain () {
this.ws[kResponse].socket.resume()
}
/**
* @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol
* @see https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.4

View file

@ -43,7 +43,7 @@ class WebsocketFrameSend {
buffer[1] = payloadLength
if (payloadLength === 126) {
new DataView(buffer.buffer).setUint16(2, bodyLength)
buffer.writeUInt16BE(bodyLength, 2)
} else if (payloadLength === 127) {
// Clear extended payload length
buffer[2] = buffer[3] = 0

View file

@ -5,10 +5,7 @@ module.exports = {
kReadyState: Symbol('ready state'),
kController: Symbol('controller'),
kResponse: Symbol('response'),
kExtensions: Symbol('extensions'),
kProtocol: Symbol('protocol'),
kBinaryType: Symbol('binary type'),
kClosingFrame: Symbol('closing frame'),
kSentClose: Symbol('sent close'),
kReceivedClose: Symbol('received close'),
kByteParser: Symbol('byte parser')

View file

@ -8,16 +8,17 @@ const {
kWebSocketURL,
kReadyState,
kController,
kExtensions,
kProtocol,
kBinaryType,
kResponse,
kSentClose
kSentClose,
kByteParser
} = require('./symbols')
const { isEstablished, isClosing, isValidSubprotocol, failWebsocketConnection } = require('./util')
const { isEstablished, isClosing, isValidSubprotocol, failWebsocketConnection, fireEvent } = require('./util')
const { establishWebSocketConnection } = require('./connection')
const { WebsocketFrameSend } = require('./frame')
const { ByteParser } = require('./receiver')
const { kEnumerableProperty, isBlobLike } = require('../core/util')
const { getGlobalDispatcher } = require('../global')
const { types } = require('util')
let experimentalWarned = false
@ -32,6 +33,8 @@ class WebSocket extends EventTarget {
}
#bufferedAmount = 0
#protocol = ''
#extensions = ''
/**
* @param {string} url
@ -49,8 +52,10 @@ class WebSocket extends EventTarget {
})
}
const options = webidl.converters['DOMString or sequence<DOMString> or WebSocketInit'](protocols)
url = webidl.converters.USVString(url)
protocols = webidl.converters['DOMString or sequence<DOMString>'](protocols)
protocols = options.protocols
// 1. Let urlRecord be the result of applying the URL parser to url.
let urlRecord
@ -104,7 +109,13 @@ class WebSocket extends EventTarget {
// 1. Establish a WebSocket connection given urlRecord, protocols,
// and client.
this[kController] = establishWebSocketConnection(urlRecord, protocols, this)
this[kController] = establishWebSocketConnection(
urlRecord,
protocols,
this,
(response) => this.#onConnectionEstablished(response),
options
)
// Each WebSocket object has an associated ready state, which is a
// number representing the state of the connection. Initially it must
@ -112,10 +123,8 @@ class WebSocket extends EventTarget {
this[kReadyState] = WebSocket.CONNECTING
// The extensions attribute must initially return the empty string.
this[kExtensions] = ''
// The protocol attribute must initially return the empty string.
this[kProtocol] = ''
// Each WebSocket object has an associated binary type, which is a
// BinaryType. Initially it must be "blob".
@ -368,13 +377,13 @@ class WebSocket extends EventTarget {
get extensions () {
webidl.brandCheck(this, WebSocket)
return this[kExtensions]
return this.#extensions
}
get protocol () {
webidl.brandCheck(this, WebSocket)
return this[kProtocol]
return this.#protocol
}
get onopen () {
@ -476,6 +485,47 @@ class WebSocket extends EventTarget {
this[kBinaryType] = type
}
}
/**
* @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol
*/
#onConnectionEstablished (response) {
// processResponse is called when the "responses header list has been received and initialized."
// once this happens, the connection is open
this[kResponse] = response
const parser = new ByteParser(this)
parser.on('drain', function onParserDrain () {
this.ws[kResponse].socket.resume()
})
response.socket.ws = this
this[kByteParser] = parser
// 1. Change the ready state to OPEN (1).
this[kReadyState] = states.OPEN
// 2. Change the extensions attributes value to the extensions in use, if
// it is not the null value.
// https://datatracker.ietf.org/doc/html/rfc6455#section-9.1
const extensions = response.headersList.get('sec-websocket-extensions')
if (extensions !== null) {
this.#extensions = extensions
}
// 3. Change the protocol attributes value to the subprotocol in use, if
// it is not the null value.
// https://datatracker.ietf.org/doc/html/rfc6455#section-1.9
const protocol = response.headersList.get('sec-websocket-protocol')
if (protocol !== null) {
this.#protocol = protocol
}
// 4. Fire an event named open at the WebSocket object.
fireEvent('open', this)
}
}
// https://websockets.spec.whatwg.org/#dom-websocket-connecting
@ -531,6 +581,36 @@ webidl.converters['DOMString or sequence<DOMString>'] = function (V) {
return webidl.converters.DOMString(V)
}
// This implements the propsal made in https://github.com/whatwg/websockets/issues/42
webidl.converters.WebSocketInit = webidl.dictionaryConverter([
{
key: 'protocols',
converter: webidl.converters['DOMString or sequence<DOMString>'],
get defaultValue () {
return []
}
},
{
key: 'dispatcher',
converter: (V) => V,
get defaultValue () {
return getGlobalDispatcher()
}
},
{
key: 'headers',
converter: webidl.nullableConverter(webidl.converters.HeadersInit)
}
])
webidl.converters['DOMString or sequence<DOMString> or WebSocketInit'] = function (V) {
if (webidl.util.Type(V) === 'Object' && !(Symbol.iterator in V)) {
return webidl.converters.WebSocketInit(V)
}
return { protocols: webidl.converters['DOMString or sequence<DOMString>'](V) }
}
webidl.converters.WebSocketSendData = function (V) {
if (webidl.util.Type(V) === 'Object') {
if (isBlobLike(V)) {