2
0
mirror of https://github.com/tenrok/axios.git synced 2026-05-15 11:59:42 +03:00

feat(adapter): add fetch adapter; (#6371)

This commit is contained in:
Dmitriy Mozgovoy
2024-04-28 22:33:49 +03:00
committed by GitHub
parent 751133eb9e
commit a3ff99b59d
21 changed files with 1015 additions and 127 deletions
+2 -1
View File
@@ -15,7 +15,8 @@ jobs:
strategy:
matrix:
node-version: [12.x, 14.x, 16.x, 18.x, 20.x]
node-version: [12.x, 14.x, 16.x, 18.x, 20.x, 21.x]
fail-fast: false
steps:
- uses: actions/checkout@v3
+5 -2
View File
@@ -268,7 +268,8 @@ declare namespace axios {
| 'document'
| 'json'
| 'text'
| 'stream';
| 'stream'
| 'formdata';
type responseEncoding =
| 'ascii' | 'ASCII'
@@ -353,11 +354,12 @@ declare namespace axios {
upload?: boolean;
download?: boolean;
event?: BrowserProgressEvent;
lengthComputable: boolean;
}
type Milliseconds = number;
type AxiosAdapterName = 'xhr' | 'http' | string;
type AxiosAdapterName = 'fetch' | 'xhr' | 'http' | string;
type AxiosAdapterConfig = AxiosAdapter | AxiosAdapterName;
@@ -415,6 +417,7 @@ declare namespace axios {
lookup?: ((hostname: string, options: object, cb: (err: Error | null, address: LookupAddress | LookupAddress[], family?: AddressFamily) => void) => void) |
((hostname: string, options: object) => Promise<[address: LookupAddressEntry | LookupAddressEntry[], family?: AddressFamily] | LookupAddress>);
withXSRFToken?: boolean | ((config: InternalAxiosRequestConfig) => boolean | undefined);
fetchOptions?: Record<string, any>;
}
// Alias
Vendored
+5 -2
View File
@@ -209,7 +209,8 @@ export type ResponseType =
| 'document'
| 'json'
| 'text'
| 'stream';
| 'stream'
| 'formdata';
export type responseEncoding =
| 'ascii' | 'ASCII'
@@ -294,11 +295,12 @@ export interface AxiosProgressEvent {
upload?: boolean;
download?: boolean;
event?: BrowserProgressEvent;
lengthComputable: boolean;
}
type Milliseconds = number;
type AxiosAdapterName = 'xhr' | 'http' | string;
type AxiosAdapterName = 'fetch' | 'xhr' | 'http' | string;
type AxiosAdapterConfig = AxiosAdapter | AxiosAdapterName;
@@ -356,6 +358,7 @@ export interface AxiosRequestConfig<D = any> {
lookup?: ((hostname: string, options: object, cb: (err: Error | null, address: LookupAddress | LookupAddress[], family?: AddressFamily) => void) => void) |
((hostname: string, options: object) => Promise<[address: LookupAddressEntry | LookupAddressEntry[], family?: AddressFamily] | LookupAddress>);
withXSRFToken?: boolean | ((config: InternalAxiosRequestConfig) => boolean | undefined);
fetchOptions?: Record<string, any>;
}
// Alias
+3 -1
View File
@@ -1,11 +1,13 @@
import utils from '../utils.js';
import httpAdapter from './http.js';
import xhrAdapter from './xhr.js';
import fetchAdapter from './fetch.js';
import AxiosError from "../core/AxiosError.js";
const knownAdapters = {
http: httpAdapter,
xhr: xhrAdapter
xhr: xhrAdapter,
fetch: fetchAdapter
}
utils.forEach(knownAdapters, (fn, value) => {
+197
View File
@@ -0,0 +1,197 @@
import platform from "../platform/index.js";
import utils from "../utils.js";
import AxiosError from "../core/AxiosError.js";
import composeSignals from "../helpers/composeSignals.js";
import {trackStream} from "../helpers/trackStream.js";
import AxiosHeaders from "../core/AxiosHeaders.js";
import progressEventReducer from "../helpers/progressEventReducer.js";
import resolveConfig from "../helpers/resolveConfig.js";
import settle from "../core/settle.js";
const fetchProgressDecorator = (total, fn) => {
const lengthComputable = total != null;
return (loaded) => setTimeout(() => fn({
lengthComputable,
total,
loaded
}));
}
const isFetchSupported = typeof fetch !== 'undefined';
const supportsRequestStreams = isFetchSupported && (() => {
let duplexAccessed = false;
const hasContentType = new Request(platform.origin, {
body: new ReadableStream(),
method: 'POST',
get duplex() {
duplexAccessed = true;
return 'half';
},
}).headers.has('Content-Type');
return duplexAccessed && !hasContentType;
})();
const DEFAULT_CHUNK_SIZE = 64 * 1024;
const resolvers = {
stream: (res) => res.body
};
isFetchSupported && ['text', 'arrayBuffer', 'blob', 'formData'].forEach(type => [
resolvers[type] = utils.isFunction(Response.prototype[type]) ? (res) => res[type]() : (_, config) => {
throw new AxiosError(`Response type ${type} is not supported`, AxiosError.ERR_NOT_SUPPORT, config);
}
])
const getBodyLength = async (body) => {
if(utils.isBlob(body)) {
return body.size;
}
if(utils.isSpecCompliantForm(body)) {
return (await new Request(body).arrayBuffer()).byteLength;
}
if(utils.isArrayBufferView(body)) {
return body.byteLength;
}
if(utils.isURLSearchParams(body)) {
body = body + '';
}
if(utils.isString(body)) {
return (await new TextEncoder().encode(body)).byteLength;
}
}
const resolveBodyLength = async (headers, body) => {
const length = utils.toFiniteNumber(headers.getContentLength());
return length == null ? getBodyLength(body) : length;
}
export default async (config) => {
let {
url,
method,
data,
signal,
cancelToken,
timeout,
onDownloadProgress,
onUploadProgress,
responseType,
headers,
withCredentials = 'same-origin',
fetchOptions
} = resolveConfig(config);
responseType = responseType ? (responseType + '').toLowerCase() : 'text';
let [composedSignal, stopTimeout] = (signal || cancelToken || timeout) ?
composeSignals([signal, cancelToken], timeout) : [];
let finished, request;
const onFinish = () => {
!finished && setTimeout(() => {
composedSignal && composedSignal.unsubscribe();
});
finished = true;
}
try {
if (onUploadProgress && supportsRequestStreams && method !== 'get' && method !== 'head') {
let requestContentLength = await resolveBodyLength(headers, data);
let _request = new Request(url, {
method,
body: data,
duplex: "half"
});
let contentTypeHeader;
if (utils.isFormData(data) && (contentTypeHeader = _request.headers.get('content-type'))) {
headers.setContentType(contentTypeHeader)
}
data = trackStream(_request.body, DEFAULT_CHUNK_SIZE, fetchProgressDecorator(
requestContentLength,
progressEventReducer(onUploadProgress)
));
}
if (!utils.isString(withCredentials)) {
withCredentials = withCredentials ? 'cors' : 'omit';
}
request = new Request(url, {
...fetchOptions,
signal: composedSignal,
method,
headers: headers.normalize().toJSON(),
body: data,
duplex: "half",
withCredentials
});
let response = await fetch(request);
const isStreamResponse = responseType === 'stream' || responseType === 'response';
if (onDownloadProgress || isStreamResponse) {
const options = {};
Object.getOwnPropertyNames(response).forEach(prop => {
options[prop] = response[prop];
});
const responseContentLength = utils.toFiniteNumber(response.headers.get('content-length'));
response = new Response(
trackStream(response.body, DEFAULT_CHUNK_SIZE, onDownloadProgress && fetchProgressDecorator(
responseContentLength,
progressEventReducer(onDownloadProgress, true)
), isStreamResponse && onFinish),
options
);
}
responseType = responseType || 'text';
let responseData = await resolvers[utils.findKey(resolvers, responseType) || 'text'](response, config);
!isStreamResponse && onFinish();
stopTimeout && stopTimeout();
return await new Promise((resolve, reject) => {
settle(resolve, reject, {
data: responseData,
headers: AxiosHeaders.from(response.headers),
status: response.status,
statusText: response.statusText,
config,
request
})
})
} catch (err) {
onFinish();
let {code} = err;
if (err.name === 'NetworkError') {
code = AxiosError.ERR_NETWORK;
}
throw AxiosError.from(err, code, config, request);
}
}
+31 -101
View File
@@ -1,93 +1,39 @@
'use strict';
import utils from './../utils.js';
import settle from './../core/settle.js';
import cookies from './../helpers/cookies.js';
import buildURL from './../helpers/buildURL.js';
import buildFullPath from '../core/buildFullPath.js';
import isURLSameOrigin from './../helpers/isURLSameOrigin.js';
import transitionalDefaults from '../defaults/transitional.js';
import AxiosError from '../core/AxiosError.js';
import CanceledError from '../cancel/CanceledError.js';
import parseProtocol from '../helpers/parseProtocol.js';
import platform from '../platform/index.js';
import AxiosHeaders from '../core/AxiosHeaders.js';
import speedometer from '../helpers/speedometer.js';
function progressEventReducer(listener, isDownloadStream) {
let bytesNotified = 0;
const _speedometer = speedometer(50, 250);
return e => {
const loaded = e.loaded;
const total = e.lengthComputable ? e.total : undefined;
const progressBytes = loaded - bytesNotified;
const rate = _speedometer(progressBytes);
const inRange = loaded <= total;
bytesNotified = loaded;
const data = {
loaded,
total,
progress: total ? (loaded / total) : undefined,
bytes: progressBytes,
rate: rate ? rate : undefined,
estimated: rate && total && inRange ? (total - loaded) / rate : undefined,
event: e
};
data[isDownloadStream ? 'download' : 'upload'] = true;
listener(data);
};
}
import progressEventReducer from '../helpers/progressEventReducer.js';
import resolveConfig from "../helpers/resolveConfig.js";
const isXHRAdapterSupported = typeof XMLHttpRequest !== 'undefined';
export default isXHRAdapterSupported && function (config) {
return new Promise(function dispatchXhrRequest(resolve, reject) {
let requestData = config.data;
const requestHeaders = AxiosHeaders.from(config.headers).normalize();
let {responseType, withXSRFToken} = config;
const _config = resolveConfig(config);
let requestData = _config.data;
const requestHeaders = AxiosHeaders.from(_config.headers).normalize();
let {responseType} = _config;
let onCanceled;
function done() {
if (config.cancelToken) {
config.cancelToken.unsubscribe(onCanceled);
if (_config.cancelToken) {
_config.cancelToken.unsubscribe(onCanceled);
}
if (config.signal) {
config.signal.removeEventListener('abort', onCanceled);
}
}
let contentType;
if (utils.isFormData(requestData)) {
if (platform.hasStandardBrowserEnv || platform.hasStandardBrowserWebWorkerEnv) {
requestHeaders.setContentType(false); // Let the browser set it
} else if ((contentType = requestHeaders.getContentType()) !== false) {
// fix semicolon duplication issue for ReactNative FormData implementation
const [type, ...tokens] = contentType ? contentType.split(';').map(token => token.trim()).filter(Boolean) : [];
requestHeaders.setContentType([type || 'multipart/form-data', ...tokens].join('; '));
if (_config.signal) {
_config.signal.removeEventListener('abort', onCanceled);
}
}
let request = new XMLHttpRequest();
// HTTP basic authentication
if (config.auth) {
const username = config.auth.username || '';
const password = config.auth.password ? unescape(encodeURIComponent(config.auth.password)) : '';
requestHeaders.set('Authorization', 'Basic ' + btoa(username + ':' + password));
}
const fullPath = buildFullPath(config.baseURL, config.url);
request.open(config.method.toUpperCase(), buildURL(fullPath, config.params, config.paramsSerializer), true);
request.open(_config.method.toUpperCase(), _config.url, true);
// Set the request timeout in MS
request.timeout = config.timeout;
request.timeout = _config.timeout;
function onloadend() {
if (!request) {
@@ -149,7 +95,7 @@ export default isXHRAdapterSupported && function (config) {
return;
}
reject(new AxiosError('Request aborted', AxiosError.ECONNABORTED, config, request));
reject(new AxiosError('Request aborted', AxiosError.ECONNABORTED, _config, request));
// Clean up request
request = null;
@@ -159,7 +105,7 @@ export default isXHRAdapterSupported && function (config) {
request.onerror = function handleError() {
// Real errors are hidden from us by the browser
// onerror should only fire if it's a network error
reject(new AxiosError('Network Error', AxiosError.ERR_NETWORK, config, request));
reject(new AxiosError('Network Error', AxiosError.ERR_NETWORK, _config, request));
// Clean up request
request = null;
@@ -167,37 +113,21 @@ export default isXHRAdapterSupported && function (config) {
// Handle timeout
request.ontimeout = function handleTimeout() {
let timeoutErrorMessage = config.timeout ? 'timeout of ' + config.timeout + 'ms exceeded' : 'timeout exceeded';
const transitional = config.transitional || transitionalDefaults;
if (config.timeoutErrorMessage) {
timeoutErrorMessage = config.timeoutErrorMessage;
let timeoutErrorMessage = _config.timeout ? 'timeout of ' + _config.timeout + 'ms exceeded' : 'timeout exceeded';
const transitional = _config.transitional || transitionalDefaults;
if (_config.timeoutErrorMessage) {
timeoutErrorMessage = _config.timeoutErrorMessage;
}
reject(new AxiosError(
timeoutErrorMessage,
transitional.clarifyTimeoutError ? AxiosError.ETIMEDOUT : AxiosError.ECONNABORTED,
config,
_config,
request));
// Clean up request
request = null;
};
// Add xsrf header
// This is only done if running in a standard browser environment.
// Specifically not if we're in a web worker, or react-native.
if(platform.hasStandardBrowserEnv) {
withXSRFToken && utils.isFunction(withXSRFToken) && (withXSRFToken = withXSRFToken(config));
if (withXSRFToken || (withXSRFToken !== false && isURLSameOrigin(fullPath))) {
// Add xsrf header
const xsrfValue = config.xsrfHeaderName && config.xsrfCookieName && cookies.read(config.xsrfCookieName);
if (xsrfValue) {
requestHeaders.set(config.xsrfHeaderName, xsrfValue);
}
}
}
// Remove Content-Type if data is undefined
requestData === undefined && requestHeaders.setContentType(null);
@@ -209,26 +139,26 @@ export default isXHRAdapterSupported && function (config) {
}
// Add withCredentials to request if needed
if (!utils.isUndefined(config.withCredentials)) {
request.withCredentials = !!config.withCredentials;
if (!utils.isUndefined(_config.withCredentials)) {
request.withCredentials = !!_config.withCredentials;
}
// Add responseType to request if needed
if (responseType && responseType !== 'json') {
request.responseType = config.responseType;
request.responseType = _config.responseType;
}
// Handle progress if needed
if (typeof config.onDownloadProgress === 'function') {
request.addEventListener('progress', progressEventReducer(config.onDownloadProgress, true));
if (typeof _config.onDownloadProgress === 'function') {
request.addEventListener('progress', progressEventReducer(_config.onDownloadProgress, true));
}
// Not all browsers support upload events
if (typeof config.onUploadProgress === 'function' && request.upload) {
request.upload.addEventListener('progress', progressEventReducer(config.onUploadProgress));
if (typeof _config.onUploadProgress === 'function' && request.upload) {
request.upload.addEventListener('progress', progressEventReducer(_config.onUploadProgress));
}
if (config.cancelToken || config.signal) {
if (_config.cancelToken || _config.signal) {
// Handle cancellation
// eslint-disable-next-line func-names
onCanceled = cancel => {
@@ -240,13 +170,13 @@ export default isXHRAdapterSupported && function (config) {
request = null;
};
config.cancelToken && config.cancelToken.subscribe(onCanceled);
if (config.signal) {
config.signal.aborted ? onCanceled() : config.signal.addEventListener('abort', onCanceled);
_config.cancelToken && _config.cancelToken.subscribe(onCanceled);
if (_config.signal) {
_config.signal.aborted ? onCanceled() : _config.signal.addEventListener('abort', onCanceled);
}
}
const protocol = parseProtocol(fullPath);
const protocol = parseProtocol(_config.url);
if (protocol && platform.protocols.indexOf(protocol) === -1) {
reject(new AxiosError('Unsupported protocol ' + protocol + ':', AxiosError.ERR_BAD_REQUEST, config));
+4
View File
@@ -100,6 +100,10 @@ class AxiosHeaders {
setHeaders(header, valueOrRewrite)
} else if(utils.isString(header) && (header = header.trim()) && !isValidHeaderName(header)) {
setHeaders(parseHeaders(header), valueOrRewrite);
} else if (utils.isHeaders(header)) {
for (const [key, value] of header.entries()) {
setHeader(value, key, rewrite);
}
} else {
header != null && setHeader(valueOrRewrite, header, rewrite);
}
+7 -2
View File
@@ -37,7 +37,7 @@ const defaults = {
transitional: transitionalDefaults,
adapter: ['xhr', 'http'],
adapter: ['xhr', 'http', 'fetch'],
transformRequest: [function transformRequest(data, headers) {
const contentType = headers.getContentType() || '';
@@ -58,7 +58,8 @@ const defaults = {
utils.isBuffer(data) ||
utils.isStream(data) ||
utils.isFile(data) ||
utils.isBlob(data)
utils.isBlob(data) ||
utils.isReadableStream(data)
) {
return data;
}
@@ -101,6 +102,10 @@ const defaults = {
const forcedJSONParsing = transitional && transitional.forcedJSONParsing;
const JSONRequested = this.responseType === 'json';
if (utils.isResponse(data) || utils.isReadableStream(data)) {
return data;
}
if (data && utils.isString(data) && ((forcedJSONParsing && !this.responseType) || JSONRequested)) {
const silentJSONParsing = transitional && transitional.silentJSONParsing;
const strictJSONParsing = !silentJSONParsing && JSONRequested;
+9 -8
View File
@@ -65,19 +65,20 @@ class AxiosTransformStream extends stream.Transform{
process.nextTick(() => {
self.emit('progress', {
'loaded': bytesTransferred,
'total': totalBytes,
'progress': totalBytes ? (bytesTransferred / totalBytes) : undefined,
'bytes': progressBytes,
'rate': rate ? rate : undefined,
'estimated': rate && totalBytes && bytesTransferred <= totalBytes ?
(totalBytes - bytesTransferred) / rate : undefined
loaded: bytesTransferred,
total: totalBytes,
progress: totalBytes ? (bytesTransferred / totalBytes) : undefined,
bytes: progressBytes,
rate: rate ? rate : undefined,
estimated: rate && totalBytes && bytesTransferred <= totalBytes ?
(totalBytes - bytesTransferred) / rate : undefined,
lengthComputable: totalBytes != null
});
});
}, internals.ticksRate);
const onFinish = () => {
internals.updateProgress(true);
internals.updateProgress.call(true);
};
this.once('end', onFinish);
+46
View File
@@ -0,0 +1,46 @@
import CanceledError from "../cancel/CanceledError.js";
import AxiosError from "../core/AxiosError.js";
const composeSignals = (signals, timeout) => {
let controller = new AbortController();
let aborted;
const onabort = function (cancel) {
if (!aborted) {
aborted = true;
unsubscribe();
const err = cancel instanceof Error ? cancel : this.reason;
controller.abort(err instanceof AxiosError ? err : new CanceledError(err instanceof Error ? err.message : err));
}
}
let timer = timeout && setTimeout(() => {
onabort(new AxiosError(`timeout ${timeout} of ms exceeded`, AxiosError.ETIMEDOUT))
}, timeout)
const unsubscribe = () => {
if (signals) {
timer && clearTimeout(timer);
timer = null;
signals.forEach(signal => {
signal &&
(signal.removeEventListener ? signal.removeEventListener('abort', onabort) : signal.unsubscribe(onabort));
});
signals = null;
}
}
signals.forEach((signal) => signal && signal.addEventListener && signal.addEventListener('abort', onabort));
const {signal} = controller;
signal.unsubscribe = unsubscribe;
return [signal, () => {
timer && clearTimeout(timer);
timer = null;
}];
}
export default composeSignals;
+32
View File
@@ -0,0 +1,32 @@
import speedometer from "./speedometer.js";
import throttle from "./throttle.js";
export default (listener, isDownloadStream, freq = 3) => {
let bytesNotified = 0;
const _speedometer = speedometer(50, 250);
return throttle(e => {
const loaded = e.loaded;
const total = e.lengthComputable ? e.total : undefined;
const progressBytes = loaded - bytesNotified;
const rate = _speedometer(progressBytes);
const inRange = loaded <= total;
bytesNotified = loaded;
const data = {
loaded,
total,
progress: total ? (loaded / total) : undefined,
bytes: progressBytes,
rate: rate ? rate : undefined,
estimated: rate && total && inRange ? (total - loaded) / rate : undefined,
event: e,
lengthComputable: total != null
};
data[isDownloadStream ? 'download' : 'upload'] = true;
listener(data);
}, freq);
}
+57
View File
@@ -0,0 +1,57 @@
import platform from "../platform/index.js";
import utils from "../utils.js";
import isURLSameOrigin from "./isURLSameOrigin.js";
import cookies from "./cookies.js";
import buildFullPath from "../core/buildFullPath.js";
import mergeConfig from "../core/mergeConfig.js";
import AxiosHeaders from "../core/AxiosHeaders.js";
import buildURL from "./buildURL.js";
export default (config) => {
const newConfig = mergeConfig({}, config);
let {data, withXSRFToken, xsrfHeaderName, xsrfCookieName, headers, auth} = newConfig;
newConfig.headers = headers = AxiosHeaders.from(headers);
newConfig.url = buildURL(buildFullPath(newConfig.baseURL, newConfig.url), config.params, config.paramsSerializer);
// HTTP basic authentication
if (auth) {
headers.set('Authorization', 'Basic ' +
btoa((auth.username || '') + ':' + (auth.password ? unescape(encodeURIComponent(auth.password)) : ''))
);
}
let contentType;
if (utils.isFormData(data)) {
if (platform.hasStandardBrowserEnv || platform.hasStandardBrowserWebWorkerEnv) {
headers.setContentType(undefined); // Let the browser set it
} else if ((contentType = headers.getContentType()) !== false) {
// fix semicolon duplication issue for ReactNative FormData implementation
const [type, ...tokens] = contentType ? contentType.split(';').map(token => token.trim()).filter(Boolean) : [];
headers.setContentType([type || 'multipart/form-data', ...tokens].join('; '));
}
}
// Add xsrf header
// This is only done if running in a standard browser environment.
// Specifically not if we're in a web worker, or react-native.
if (platform.hasStandardBrowserEnv) {
withXSRFToken && utils.isFunction(withXSRFToken) && (withXSRFToken = withXSRFToken(newConfig));
if (withXSRFToken || (withXSRFToken !== false && isURLSameOrigin(newConfig.url))) {
// Add xsrf header
const xsrfValue = xsrfHeaderName && xsrfCookieName && cookies.read(xsrfCookieName);
if (xsrfValue) {
headers.set(xsrfHeaderName, xsrfValue);
}
}
}
return newConfig;
}
+5 -3
View File
@@ -10,7 +10,9 @@ function throttle(fn, freq) {
let timestamp = 0;
const threshold = 1000 / freq;
let timer = null;
return function throttled(force, args) {
return function throttled() {
const force = this === true;
const now = Date.now();
if (force || now - timestamp > threshold) {
if (timer) {
@@ -18,13 +20,13 @@ function throttle(fn, freq) {
timer = null;
}
timestamp = now;
return fn.apply(null, args);
return fn.apply(null, arguments);
}
if (!timer) {
timer = setTimeout(() => {
timer = null;
timestamp = Date.now();
return fn.apply(null, args);
return fn.apply(null, arguments);
}, threshold - (now - timestamp));
}
};
+56
View File
@@ -0,0 +1,56 @@
export const streamChunk = function* (chunk, chunkSize) {
let len = chunk.byteLength;
if (!chunkSize || len < chunkSize) {
yield chunk;
return;
}
let pos = 0;
let end;
while (pos < len) {
end = pos + chunkSize;
yield chunk.slice(pos, end);
pos = end;
}
}
const encoder = new TextEncoder();
export const readBytes = async function* (iterable, chunkSize) {
for await (const chunk of iterable) {
yield* streamChunk(ArrayBuffer.isView(chunk) ? chunk : (await encoder.encode(String(chunk))), chunkSize);
}
}
export const trackStream = (stream, chunkSize, onProgress, onFinish) => {
const iterator = readBytes(stream, chunkSize);
let bytes = 0;
return new ReadableStream({
type: 'bytes',
async pull(controller) {
const {done, value} = await iterator.next();
if (done) {
controller.close();
onFinish();
return;
}
let len = value.byteLength;
onProgress && onProgress(bytes += len);
controller.enqueue(new Uint8Array(value));
},
cancel(reason) {
onFinish(reason);
return iterator.return();
}
}, {
highWaterMark: 2
})
}
+4 -1
View File
@@ -40,8 +40,11 @@ const hasStandardBrowserWebWorkerEnv = (() => {
);
})();
const origin = hasBrowserEnv && window.location.href || 'http://localhost';
export {
hasBrowserEnv,
hasStandardBrowserWebWorkerEnv,
hasStandardBrowserEnv
hasStandardBrowserEnv,
origin
}
+7 -2
View File
@@ -209,6 +209,8 @@ const isFormData = (thing) => {
*/
const isURLSearchParams = kindOfTest('URLSearchParams');
const [isReadableStream, isRequest, isResponse, isHeaders] = ['ReadableStream', 'Request', 'Response', 'Headers'].map(kindOfTest);
/**
* Trim excess whitespace off the beginning and end of a string
*
@@ -597,8 +599,7 @@ const toObjectSet = (arrayOrString, delimiter) => {
const noop = () => {}
const toFiniteNumber = (value, defaultValue) => {
value = +value;
return Number.isFinite(value) ? value : defaultValue;
return value != null && Number.isFinite(value = +value) ? value : defaultValue;
}
const ALPHA = 'abcdefghijklmnopqrstuvwxyz'
@@ -679,6 +680,10 @@ export default {
isBoolean,
isObject,
isPlainObject,
isReadableStream,
isRequest,
isResponse,
isHeaders,
isUndefined,
isDate,
isFile,
+30
View File
@@ -19,6 +19,7 @@
"@commitlint/cli": "^17.8.1",
"@commitlint/config-conventional": "^17.8.1",
"@release-it/conventional-changelog": "^5.1.1",
"@rollup/plugin-alias": "^5.1.0",
"@rollup/plugin-babel": "^5.3.1",
"@rollup/plugin-commonjs": "^15.1.0",
"@rollup/plugin-json": "^4.1.0",
@@ -3238,6 +3239,26 @@
"release-it": "^15.4.1"
}
},
"node_modules/@rollup/plugin-alias": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/@rollup/plugin-alias/-/plugin-alias-5.1.0.tgz",
"integrity": "sha512-lpA3RZ9PdIG7qqhEfv79tBffNaoDuukFDrmhLqg9ifv99u/ehn+lOg30x2zmhf8AQqQUZaMk/B9fZraQ6/acDQ==",
"dev": true,
"dependencies": {
"slash": "^4.0.0"
},
"engines": {
"node": ">=14.0.0"
},
"peerDependencies": {
"rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0"
},
"peerDependenciesMeta": {
"rollup": {
"optional": true
}
}
},
"node_modules/@rollup/plugin-babel": {
"version": "5.3.1",
"resolved": "https://registry.npmjs.org/@rollup/plugin-babel/-/plugin-babel-5.3.1.tgz",
@@ -27077,6 +27098,15 @@
"semver": "7.3.8"
}
},
"@rollup/plugin-alias": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/@rollup/plugin-alias/-/plugin-alias-5.1.0.tgz",
"integrity": "sha512-lpA3RZ9PdIG7qqhEfv79tBffNaoDuukFDrmhLqg9ifv99u/ehn+lOg30x2zmhf8AQqQUZaMk/B9fZraQ6/acDQ==",
"dev": true,
"requires": {
"slash": "^4.0.0"
}
},
"@rollup/plugin-babel": {
"version": "5.3.1",
"resolved": "https://registry.npmjs.org/@rollup/plugin-babel/-/plugin-babel-5.3.1.tgz",
+3 -2
View File
@@ -135,7 +135,8 @@
"stream-throttle": "^0.1.3",
"string-replace-async": "^3.0.2",
"terser-webpack-plugin": "^4.2.3",
"typescript": "^4.9.5"
"typescript": "^4.9.5",
"@rollup/plugin-alias": "^5.1.0"
},
"browser": {
"./lib/adapters/http.js": "./lib/helpers/null.js",
@@ -215,4 +216,4 @@
"@commitlint/config-conventional"
]
}
}
}
+22 -2
View File
@@ -4,7 +4,8 @@ import {terser} from "rollup-plugin-terser";
import json from '@rollup/plugin-json';
import { babel } from '@rollup/plugin-babel';
import autoExternal from 'rollup-plugin-auto-external';
import bundleSize from 'rollup-plugin-bundle-size'
import bundleSize from 'rollup-plugin-bundle-size';
import aliasPlugin from '@rollup/plugin-alias';
import path from 'path';
const lib = require("./package.json");
@@ -13,7 +14,7 @@ const name = "axios";
const namedInput = './index.js';
const defaultInput = './lib/axios.js';
const buildConfig = ({es5, browser = true, minifiedVersion = true, ...config}) => {
const buildConfig = ({es5, browser = true, minifiedVersion = true, alias, ...config}) => {
const {file} = config.output;
const ext = path.extname(file);
const basename = path.basename(file, ext);
@@ -29,9 +30,13 @@ const buildConfig = ({es5, browser = true, minifiedVersion = true, ...config}) =
file: `${path.dirname(file)}/${basename}.${(minified ? ['min', ...extArr] : extArr).join('.')}`
},
plugins: [
aliasPlugin({
entries: alias || []
}),
json(),
resolve({browser}),
commonjs(),
minified && terser(),
minified && bundleSize(),
...(es5 ? [babel({
@@ -69,6 +74,21 @@ export default async () => {
banner
}
}),
// browser ESM bundle for CDN with fetch adapter only
// Downsizing from 12.97 kB (gzip) to 12.23 kB (gzip)
/* ...buildConfig({
input: namedInput,
output: {
file: `dist/esm/${outputFileName}-fetch.js`,
format: "esm",
preferConst: true,
exports: "named",
banner
},
alias: [
{ find: './xhr.js', replacement: '../helpers/null.js' }
]
}),*/
// Browser UMD bundle for CDN
...buildConfig({
+117
View File
@@ -0,0 +1,117 @@
import http from "http";
import stream from "stream";
import getStream from "get-stream";
import {Throttle} from "stream-throttle";
import formidable from "formidable";
export const LOCAL_SERVER_URL = 'http://localhost:4444';
export const SERVER_HANDLER_STREAM_ECHO = (req, res) => req.pipe(res);
export const setTimeoutAsync = (ms) => new Promise(resolve=> setTimeout(resolve, ms));
export const startHTTPServer = (handlerOrOptions, options) => {
const {handler, useBuffering = false, rate = undefined, port = 4444, keepAlive = 1000} =
Object.assign(typeof handlerOrOptions === 'function' ? {
handler: handlerOrOptions
} : handlerOrOptions || {}, options);
return new Promise((resolve, reject) => {
const server = http.createServer(handler || async function (req, res) {
try {
req.headers['content-length'] && res.setHeader('content-length', req.headers['content-length']);
let dataStream = req;
if (useBuffering) {
dataStream = stream.Readable.from(await getStream(req));
}
let streams = [dataStream];
if (rate) {
streams.push(new Throttle({rate}))
}
streams.push(res);
stream.pipeline(streams, (err) => {
err && console.log('Server warning: ' + err.message)
});
} catch (err){
console.warn('HTTP server error:', err);
}
}).listen(port, function (err) {
err ? reject(err) : resolve(this);
});
server.keepAliveTimeout = keepAlive;
});
}
export const stopHTTPServer = async (server, timeout = 10000) => {
if (server) {
if (typeof server.closeAllConnections === 'function') {
server.closeAllConnections();
}
await Promise.race([new Promise(resolve => server.close(resolve)), setTimeoutAsync(timeout)]);
}
}
export const handleFormData = (req) => {
return new Promise((resolve, reject) => {
const form = new formidable.IncomingForm();
form.parse(req, (err, fields, files) => {
if (err) {
return reject(err);
}
resolve({fields, files});
});
});
}
export const nodeVersion = process.versions.node.split('.').map(v => parseInt(v, 10));
export const generateReadable = (length = 1024 * 1024, chunkSize = 10 * 1024, sleep = 50) => {
return stream.Readable.from(async function* (){
let dataLength = 0;
while(dataLength < length) {
const leftBytes = length - dataLength;
const chunk = Buffer.alloc(leftBytes > chunkSize? chunkSize : leftBytes);
dataLength += chunk.length;
yield chunk;
if (sleep) {
await setTimeoutAsync(sleep);
}
}
}());
}
export const makeReadableStream = (chunk = 'chunk', n = 10, timeout = 100) => {
return new ReadableStream({
async pull(controller) {
await setTimeoutAsync(timeout);
n-- ? controller.enqueue(chunk) : controller.close();
}
},
{
highWaterMark: 1
}
)
}
export const makeEchoStream = (echo) => new WritableStream({
write(chunk) {
echo && console.log(`Echo chunk`, chunk);
}
})
+373
View File
@@ -0,0 +1,373 @@
import assert from 'assert';
import {
startHTTPServer,
stopHTTPServer,
LOCAL_SERVER_URL,
setTimeoutAsync,
makeReadableStream,
generateReadable,
makeEchoStream
} from '../../helpers/server.js';
import axios from '../../../index.js';
import stream from "stream";
import {AbortController} from "abortcontroller-polyfill/dist/cjs-ponyfill.js";
import util from "util";
const pipelineAsync = util.promisify(stream.pipeline);
const fetchAxios = axios.create({
baseURL: LOCAL_SERVER_URL,
adapter: 'fetch'
});
let server;
describe('supports fetch with nodejs', function () {
before(function () {
if (typeof fetch !== 'function') {
this.skip();
}
})
afterEach(async function () {
await stopHTTPServer(server);
server = null;
});
describe('responses', async () => {
it(`should support text response type`, async () => {
const originalData = 'my data';
server = await startHTTPServer((req, res) => res.end(originalData));
const {data} = await fetchAxios.get('/', {
responseType: 'text'
});
assert.deepStrictEqual(data, originalData);
});
it(`should support arraybuffer response type`, async () => {
const originalData = 'my data';
server = await startHTTPServer((req, res) => res.end(originalData));
const {data} = await fetchAxios.get('/', {
responseType: 'arraybuffer'
});
assert.deepStrictEqual(data, Uint8Array.from(await new TextEncoder().encode(originalData)).buffer);
});
it(`should support blob response type`, async () => {
const originalData = 'my data';
server = await startHTTPServer((req, res) => res.end(originalData));
const {data} = await fetchAxios.get('/', {
responseType: 'blob'
});
assert.deepStrictEqual(data, new Blob([originalData]));
});
it(`should support stream response type`, async () => {
const originalData = 'my data';
server = await startHTTPServer((req, res) => res.end(originalData));
const {data} = await fetchAxios.get('/', {
responseType: 'stream'
});
assert.ok(data instanceof ReadableStream, 'data is not instanceof ReadableStream');
let response = new Response(data);
assert.deepStrictEqual(await response.text(), originalData);
});
it(`should support formData response type`, async function () {
this.timeout(5000);
const originalData = new FormData();
originalData.append('x', '123');
server = await startHTTPServer(async (req, res) => {
const response = await new Response(originalData);
res.setHeader('Content-Type', response.headers.get('Content-Type'));
res.end(await response.text());
});
const {data} = await fetchAxios.get('/', {
responseType: 'formdata'
});
assert.ok(data instanceof FormData, 'data is not instanceof FormData');
assert.deepStrictEqual(Object.fromEntries(data.entries()), Object.fromEntries(originalData.entries()));
});
it(`should support json response type`, async () => {
const originalData = {x: 'my data'};
server = await startHTTPServer((req, res) => res.end(JSON.stringify(originalData)));
const {data} = await fetchAxios.get('/', {
responseType: 'json'
});
assert.deepStrictEqual(data, originalData);
});
});
describe("progress", () => {
describe('upload', function () {
it('should support upload progress capturing', async function () {
this.timeout(15000);
server = await startHTTPServer({
rate: 100 * 1024
});
let content = '';
const count = 10;
const chunk = "test";
const chunkLength = Buffer.byteLength(chunk);
const contentLength = count * chunkLength;
const readable = stream.Readable.from(async function* () {
let i = count;
while (i-- > 0) {
await setTimeoutAsync(1100);
content += chunk;
yield chunk;
}
}());
const samples = [];
const {data} = await fetchAxios.post('/', readable, {
onUploadProgress: ({loaded, total, progress, bytes, upload}) => {
console.log(`Upload Progress ${loaded} from ${total} bytes (${(progress * 100).toFixed(1)}%)`);
samples.push({
loaded,
total,
progress,
bytes,
upload
});
},
headers: {
'Content-Length': contentLength
},
responseType: 'text'
});
await setTimeoutAsync(500);
assert.strictEqual(data, content);
assert.deepStrictEqual(samples, Array.from(function* () {
for (let i = 1; i <= 10; i++) {
yield ({
loaded: chunkLength * i,
total: contentLength,
progress: (chunkLength * i) / contentLength,
bytes: 4,
upload: true
});
}
}()));
});
it('should not fail with get method', async() => {
server = await startHTTPServer((req, res) => res.end('OK'));
const {data} = await fetchAxios.get('/', {
onUploadProgress() {
}
});
assert.strictEqual(data, 'OK');
});
});
describe('download', function () {
it('should support download progress capturing', async function () {
this.timeout(15000);
server = await startHTTPServer({
rate: 100 * 1024
});
let content = '';
const count = 10;
const chunk = "test";
const chunkLength = Buffer.byteLength(chunk);
const contentLength = count * chunkLength;
const readable = stream.Readable.from(async function* () {
let i = count;
while (i-- > 0) {
await setTimeoutAsync(1100);
content += chunk;
yield chunk;
}
}());
const samples = [];
const {data} = await fetchAxios.post('/', readable, {
onDownloadProgress: ({loaded, total, progress, bytes, download}) => {
console.log(`Download Progress ${loaded} from ${total} bytes (${(progress * 100).toFixed(1)}%)`);
samples.push({
loaded,
total,
progress,
bytes,
download
});
},
headers: {
'Content-Length': contentLength
},
responseType: 'text',
maxRedirects: 0
});
await setTimeoutAsync(500);
assert.strictEqual(data, content);
assert.deepStrictEqual(samples, Array.from(function* () {
for (let i = 1; i <= 10; i++) {
yield ({
loaded: chunkLength * i,
total: contentLength,
progress: (chunkLength * i) / contentLength,
bytes: 4,
download: true
});
}
}()));
});
});
});
it('should support basic auth', async () => {
server = await startHTTPServer((req, res) => res.end(req.headers.authorization));
const user = 'foo';
const headers = {Authorization: 'Bearer 1234'};
const res = await axios.get('http://' + user + '@localhost:4444/', {headers: headers});
const base64 = Buffer.from(user + ':', 'utf8').toString('base64');
assert.equal(res.data, 'Basic ' + base64);
});
it("should support stream.Readable as a payload", async () => {
server = await startHTTPServer();
const {data} = await fetchAxios.post('/', stream.Readable.from('OK'));
assert.strictEqual(data, 'OK');
});
describe('request aborting', function() {
it('should be able to abort the request stream', async function () {
server = await startHTTPServer({
rate: 100000,
useBuffering: true
});
const controller = new AbortController();
setTimeout(() => {
controller.abort();
}, 500);
await assert.rejects(async () => {
await fetchAxios.post('/', makeReadableStream(), {
responseType: 'stream',
signal: controller.signal
});
}, /CanceledError/);
});
it('should be able to abort the response stream', async function () {
server = await startHTTPServer((req, res) => {
pipelineAsync(generateReadable(10000, 10), res);
});
const controller = new AbortController();
setTimeout(() => {
controller.abort(new Error('test'));
}, 800);
const {data} = await fetchAxios.get('/', {
responseType: 'stream',
signal: controller.signal
});
await assert.rejects(async () => {
await data.pipeTo(makeEchoStream());
}, /^(AbortError|CanceledError):/);
});
});
it('should support a timeout', async () => {
server = await startHTTPServer(async(req, res) => {
await setTimeoutAsync(1000);
res.end('OK');
});
const timeout = 500;
const ts = Date.now();
await assert.rejects(async() => {
await fetchAxios('/', {
timeout
})
}, /timeout/);
const passed = Date.now() - ts;
assert.ok(passed >= timeout - 5, `early cancellation detected (${passed} ms)`);
});
it('should combine baseURL and url', async () => {
server = await startHTTPServer();
const res = await fetchAxios('/foo');
assert.equal(res.config.baseURL, LOCAL_SERVER_URL);
assert.equal(res.config.url, '/foo');
});
it('should support params', async() => {
server = await startHTTPServer((req, res) => res.end(req.url));
const {data} = await fetchAxios.get('/?test=1', {
params: {
foo: 1,
bar: 2
}
});
assert.strictEqual(data, '/?test=1&foo=1&bar=2');
});
});