Do proper proxying

This commit is contained in:
mrjvs 2024-01-04 20:57:54 +01:00
parent 882e26fa1b
commit 3d192e8bb8
3 changed files with 97 additions and 22 deletions

View File

@ -2,7 +2,7 @@ import { getBodyBuffer } from '@/utils/body';
import {
getProxyHeaders,
getAfterResponseHeaders,
cleanupHeadersBeforeProxy,
getBlacklistedHeaders,
} from '@/utils/headers';
import {
createTokenIfNeeded,
@ -39,8 +39,8 @@ export default defineEventHandler(async (event) => {
const token = await createTokenIfNeeded(event);
// proxy
cleanupHeadersBeforeProxy(event);
await proxyRequest(event, destination, {
await specificProxyRequest(event, destination, {
blacklistedHeaders: getBlacklistedHeaders(),
fetchOptions: {
redirect: 'follow',
headers: getProxyHeaders(event.headers),

View File

@ -1,4 +1,10 @@
import { H3Event } from 'h3';
const headerMap: Record<string, string> = {
'X-Cookie': 'Cookie',
'X-Referer': 'Referer',
'X-Origin': 'Origin',
'X-User-Agent': 'User-Agent',
'X-X-Real-Ip': 'X-Real-Ip',
};
const blacklistedHeaders = [
'cf-connecting-ip',
@ -11,7 +17,7 @@ const blacklistedHeaders = [
'x-forwarded-proto',
'forwarded',
'x-real-ip',
'user-agent',
...Object.keys(headerMap),
];
function copyHeader(
@ -33,13 +39,6 @@ export function getProxyHeaders(headers: Headers): Headers {
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:93.0) Gecko/20100101 Firefox/93.0',
);
const headerMap: Record<string, string> = {
'X-Cookie': 'Cookie',
'X-Referer': 'Referer',
'X-Origin': 'Origin',
'X-User-Agent': 'User-Agent',
'X-X-Real-Ip': 'X-Real-Ip',
};
Object.entries(headerMap).forEach((entry) => {
copyHeader(headers, output, entry[0], entry[1]);
});
@ -64,14 +63,6 @@ export function getAfterResponseHeaders(
};
}
export function removeHeadersFromEvent(event: H3Event, key: string) {
const normalizedKey = key.toLowerCase();
if (event.node.req.headers[normalizedKey])
delete event.node.req.headers[normalizedKey];
}
export function cleanupHeadersBeforeProxy(event: H3Event) {
blacklistedHeaders.forEach((key) => {
removeHeadersFromEvent(event, key);
});
export function getBlacklistedHeaders() {
return blacklistedHeaders;
}

84
src/utils/proxy.ts Normal file
View File

@ -0,0 +1,84 @@
import {
H3Event,
Duplex,
ProxyOptions,
getProxyRequestHeaders,
RequestHeaders,
} from 'h3';
const PayloadMethods = new Set(['PATCH', 'POST', 'PUT', 'DELETE']);
export interface ExtraProxyOptions {
blacklistedHeaders?: string[];
}
function mergeHeaders(
defaults: HeadersInit,
...inputs: (HeadersInit | RequestHeaders | undefined)[]
) {
const _inputs = inputs.filter(Boolean) as HeadersInit[];
if (_inputs.length === 0) {
return defaults;
}
const merged = new Headers(defaults);
for (const input of _inputs) {
if (input.entries) {
for (const [key, value] of (input.entries as any)()) {
if (value !== undefined) {
merged.set(key, value);
}
}
} else {
for (const [key, value] of Object.entries(input)) {
if (value !== undefined) {
merged.set(key, value);
}
}
}
}
return merged;
}
export async function specificProxyRequest(
event: H3Event,
target: string,
opts: ProxyOptions & ExtraProxyOptions = {},
) {
let body;
let duplex: Duplex | undefined;
if (PayloadMethods.has(event.method)) {
if (opts.streamRequest) {
body = getRequestWebStream(event);
duplex = 'half';
} else {
body = await readRawBody(event, false).catch(() => undefined);
}
}
const method = opts.fetchOptions?.method || event.method;
const oldHeaders = getProxyRequestHeaders(event);
opts.blacklistedHeaders?.forEach((header) => {
const keys = Object.keys(oldHeaders).filter(
(v) => v.toLowerCase() === header.toLowerCase(),
);
keys.forEach((k) => delete oldHeaders[k]);
});
const fetchHeaders = mergeHeaders(
oldHeaders,
opts.fetchOptions?.headers,
opts.headers,
);
(fetchHeaders.forEach as any)(console.log);
return sendProxy(event, target, {
...opts,
fetchOptions: {
method,
body,
duplex,
...opts.fetchOptions,
headers: fetchHeaders,
},
});
}