feat: add OpenAI API mock service

- add proxy server to intercept OpenAI API requests
- implement mock responses for models, chat, and embeddings endpoints
- add self-signed certificate generation for HTTPS support
- include request forwarding for non-OpenAI endpoints
This commit is contained in:
Fanis Tharropoulos 2025-02-11 13:56:49 +02:00
parent e4280e1768
commit 80a1ba8136
No known key found for this signature in database
3 changed files with 498 additions and 0 deletions

View File

@ -34,6 +34,7 @@
"license": "GPL",
"devDependencies": {
"@eslint/js": "^9.17.0",
"@faker-js/faker": "^9.4.0",
"@ianvs/prettier-plugin-sort-imports": "^4.4.0",
"@types/command-line-args": "^5.2.3",
"@types/command-line-usage": "^5.0.4",
@ -43,6 +44,7 @@
"@types/node": "^22.10.2",
"@types/tar-stream": "^3.1.3",
"eslint": "^9.17.0",
"openai": "^4.83.0",
"prettier": "^3.4.2",
"tsup": "^8.3.5",
"type-fest": "^4.31.0",

144
benchmark/pnpm-lock.yaml generated
View File

@ -69,6 +69,9 @@ importers:
'@eslint/js':
specifier: ^9.17.0
version: 9.17.0
'@faker-js/faker':
specifier: ^9.4.0
version: 9.4.0
'@ianvs/prettier-plugin-sort-imports':
specifier: ^4.4.0
version: 4.4.0(prettier@3.4.2)
@ -96,6 +99,9 @@ importers:
eslint:
specifier: ^9.17.0
version: 9.17.0
openai:
specifier: ^4.83.0
version: 4.83.0(zod@3.24.1)
prettier:
specifier: ^3.4.2
version: 3.4.2
@ -479,6 +485,10 @@ packages:
resolution: {integrity: sha512-zSkKow6H5Kdm0ZUQUB2kV5JIXqoG0+uH5YADhaEHswm664N9Db8dXSi0nMJpacpMf+MyyglF1vnZohpEg5yUtg==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
'@faker-js/faker@9.4.0':
resolution: {integrity: sha512-85+k0AxaZSTowL0gXp8zYWDIrWclTbRPg/pm/V0dSFZ6W6D4lhcG3uuZl4zLsEKfEvs69xDbLN2cHQudwp95JA==}
engines: {node: '>=18.0.0', npm: '>=9.0.0'}
'@grpc/grpc-js@1.12.5':
resolution: {integrity: sha512-d3iiHxdpg5+ZcJ6jnDSOT8Z0O0VMVGy34jAnYLUX8yd36b1qn8f1TwOA/Lc7TsOh03IkPJ38eGI5qD2EjNkoEA==}
engines: {node: '>=12.10.0'}
@ -820,6 +830,12 @@ packages:
'@types/k6@0.54.2':
resolution: {integrity: sha512-B5LPxeQm97JnUTpoKNE1UX9jFp+JiJCAXgZOa2P7aChxVoPQXKfWMzK+739xHq3lPkKj1aV+HeOxkP56g/oWBg==}
'@types/node-fetch@2.6.12':
resolution: {integrity: sha512-8nneRWKCg3rMtF69nLQJnOYUcbafYeFSjqkw3jCRLsqkWFlHaoQrr5mXmofFGOx3DKn7UfmBMyov8ySvLRVldA==}
'@types/node@18.19.75':
resolution: {integrity: sha512-UIksWtThob6ZVSyxcOqCLOUNg/dyO1Qvx4McgeuhrEtHTLFTf7BBhEazaE4K806FGTPtzd/2sE90qn4fVr7cyw==}
'@types/node@20.17.12':
resolution: {integrity: sha512-vo/wmBgMIiEA23A/knMfn/cf37VnuF52nZh5ZoW0GWt4e4sxNquibrMRJ7UQsA06+MBx9r/H1jsI9grYjQCQlw==}
@ -910,6 +926,10 @@ packages:
'@vitest/utils@3.0.2':
resolution: {integrity: sha512-Qu01ZYZlgHvDP02JnMBRpX43nRaZtNpIzw3C1clDXmn8eakgX6iQVGzTQ/NjkIr64WD8ioqOjkaYRVvHQI5qiw==}
abort-controller@3.0.0:
resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==}
engines: {node: '>=6.5'}
acorn-jsx@5.3.2:
resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==}
peerDependencies:
@ -920,6 +940,10 @@ packages:
engines: {node: '>=0.4.0'}
hasBin: true
agentkeepalive@4.6.0:
resolution: {integrity: sha512-kja8j7PjmncONqaTsB8fQ+wE2mSU2DJ9D4XKoJ5PFWIdRMa6SLSN1ff4mOr4jCbfRSsxR4keIiySJU0N9T5hIQ==}
engines: {node: '>= 8.0.0'}
ajv@6.12.6:
resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==}
@ -1228,6 +1252,10 @@ packages:
resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==}
engines: {node: '>=0.10.0'}
event-target-shim@5.0.1:
resolution: {integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==}
engines: {node: '>=6'}
execa@9.5.2:
resolution: {integrity: sha512-EHlpxMCpHWSAh1dgS6bVeoLAXGnJNdR93aabr4QCGbzOM73o5XmRfM/e5FUqsw3aagP8S8XEWUWFAxnRBnAF0Q==}
engines: {node: ^18.19.0 || >=20.5.0}
@ -1316,10 +1344,17 @@ packages:
resolution: {integrity: sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==}
engines: {node: '>=14'}
form-data-encoder@1.7.2:
resolution: {integrity: sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==}
form-data@4.0.1:
resolution: {integrity: sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==}
engines: {node: '>= 6'}
formdata-node@4.4.1:
resolution: {integrity: sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ==}
engines: {node: '>= 12.20'}
fs-extra@11.2.0:
resolution: {integrity: sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==}
engines: {node: '>=14.14'}
@ -1382,6 +1417,9 @@ packages:
resolution: {integrity: sha512-/1/GPCpDUCCYwlERiYjxoczfP0zfvZMU/OWgQPMya9AbAE24vseigFdhAMObpc8Q4lc/kjutPfUddDYyAmejnA==}
engines: {node: '>=18.18.0'}
humanize-ms@1.2.1:
resolution: {integrity: sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==}
iconv-lite@0.4.24:
resolution: {integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==}
engines: {node: '>=0.10.0'}
@ -1617,6 +1655,19 @@ packages:
resolution: {integrity: sha512-DpbZ/UDI0B+TxJB1JysXSfi1++3YK2xLBqQLTlRN0b4zxlZ2MoiB+dKKV8dMRzt1fAFjRKDknXOVJgpl+a4Amw==}
engines: {node: '>=18'}
node-domexception@1.0.0:
resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==}
engines: {node: '>=10.5.0'}
node-fetch@2.7.0:
resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==}
engines: {node: 4.x || >=6.0.0}
peerDependencies:
encoding: ^0.1.0
peerDependenciesMeta:
encoding:
optional: true
npm-run-path@6.0.0:
resolution: {integrity: sha512-9qny7Z9DsQU8Ou39ERsPU4OZQlSTP47ShQzuKZ6PRXpYLtIFgl/DEBYEXKlvcEa+9tHVcK8CF81Y2V72qaZhWA==}
engines: {node: '>=18'}
@ -1632,6 +1683,18 @@ packages:
resolution: {integrity: sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==}
engines: {node: '>=18'}
openai@4.83.0:
resolution: {integrity: sha512-fmTsqud0uTtRKsPC7L8Lu55dkaTwYucqncDHzVvO64DKOpNTuiYwjbR/nVgpapXuYy8xSnhQQPUm+3jQaxICgw==}
hasBin: true
peerDependencies:
ws: ^8.18.0
zod: ^3.23.8
peerDependenciesMeta:
ws:
optional: true
zod:
optional: true
optionator@0.9.4:
resolution: {integrity: sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==}
engines: {node: '>= 0.8.0'}
@ -1977,6 +2040,9 @@ packages:
resolution: {integrity: sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==}
engines: {node: '>=6'}
tr46@0.0.3:
resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==}
tr46@1.0.1:
resolution: {integrity: sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==}
@ -2053,6 +2119,9 @@ packages:
resolution: {integrity: sha512-ya4mg/30vm+DOWfBg4YK3j2WD6TWtRkCbasOJr40CseYENzCUby/7rIvXA99JGsQHeNxLbnXdyLLxKSv3tauFw==}
engines: {node: '>=12.17'}
undici-types@5.26.5:
resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==}
undici-types@6.19.8:
resolution: {integrity: sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==}
@ -2134,9 +2203,19 @@ packages:
jsdom:
optional: true
web-streams-polyfill@4.0.0-beta.3:
resolution: {integrity: sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==}
engines: {node: '>= 14'}
webidl-conversions@3.0.1:
resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==}
webidl-conversions@4.0.2:
resolution: {integrity: sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==}
whatwg-url@5.0.0:
resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==}
whatwg-url@7.1.0:
resolution: {integrity: sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==}
@ -2445,6 +2524,8 @@ snapshots:
dependencies:
levn: 0.4.1
'@faker-js/faker@9.4.0': {}
'@grpc/grpc-js@1.12.5':
dependencies:
'@grpc/proto-loader': 0.7.13
@ -2772,6 +2853,15 @@ snapshots:
'@types/k6@0.54.2': {}
'@types/node-fetch@2.6.12':
dependencies:
'@types/node': 22.10.2
form-data: 4.0.1
'@types/node@18.19.75':
dependencies:
undici-types: 5.26.5
'@types/node@20.17.12':
dependencies:
undici-types: 6.19.8
@ -2913,12 +3003,20 @@ snapshots:
loupe: 3.1.2
tinyrainbow: 2.0.0
abort-controller@3.0.0:
dependencies:
event-target-shim: 5.0.1
acorn-jsx@5.3.2(acorn@8.14.0):
dependencies:
acorn: 8.14.0
acorn@8.14.0: {}
agentkeepalive@4.6.0:
dependencies:
humanize-ms: 1.2.1
ajv@6.12.6:
dependencies:
fast-deep-equal: 3.1.3
@ -3277,6 +3375,8 @@ snapshots:
esutils@2.0.3: {}
event-target-shim@5.0.1: {}
execa@9.5.2:
dependencies:
'@sindresorhus/merge-streams': 4.0.0
@ -3366,12 +3466,19 @@ snapshots:
cross-spawn: 7.0.6
signal-exit: 4.1.0
form-data-encoder@1.7.2: {}
form-data@4.0.1:
dependencies:
asynckit: 0.4.0
combined-stream: 1.0.8
mime-types: 2.1.35
formdata-node@4.4.1:
dependencies:
node-domexception: 1.0.0
web-streams-polyfill: 4.0.0-beta.3
fs-extra@11.2.0:
dependencies:
graceful-fs: 4.2.11
@ -3430,6 +3537,10 @@ snapshots:
human-signals@8.0.0: {}
humanize-ms@1.2.1:
dependencies:
ms: 2.1.3
iconv-lite@0.4.24:
dependencies:
safer-buffer: 2.1.2
@ -3614,6 +3725,12 @@ snapshots:
optionalDependencies:
'@rollup/rollup-linux-x64-gnu': 4.29.1
node-domexception@1.0.0: {}
node-fetch@2.7.0:
dependencies:
whatwg-url: 5.0.0
npm-run-path@6.0.0:
dependencies:
path-key: 4.0.0
@ -3629,6 +3746,20 @@ snapshots:
dependencies:
mimic-function: 5.0.1
openai@4.83.0(zod@3.24.1):
dependencies:
'@types/node': 18.19.75
'@types/node-fetch': 2.6.12
abort-controller: 3.0.0
agentkeepalive: 4.6.0
form-data-encoder: 1.7.2
formdata-node: 4.4.1
node-fetch: 2.7.0
optionalDependencies:
zod: 3.24.1
transitivePeerDependencies:
- encoding
optionator@0.9.4:
dependencies:
deep-is: 0.1.4
@ -3988,6 +4119,8 @@ snapshots:
totalist@3.0.1:
optional: true
tr46@0.0.3: {}
tr46@1.0.1:
dependencies:
punycode: 2.3.1
@ -4061,6 +4194,8 @@ snapshots:
typical@7.3.0: {}
undici-types@5.26.5: {}
undici-types@6.19.8: {}
undici-types@6.20.0: {}
@ -4138,8 +4273,17 @@ snapshots:
- supports-color
- terser
web-streams-polyfill@4.0.0-beta.3: {}
webidl-conversions@3.0.1: {}
webidl-conversions@4.0.2: {}
whatwg-url@5.0.0:
dependencies:
tr46: 0.0.3
webidl-conversions: 3.0.1
whatwg-url@7.1.0:
dependencies:
lodash.sortby: 4.7.0

View File

@ -0,0 +1,352 @@
import { execSync } from "child_process";
import { existsSync, readFileSync } from "fs";
import { createServer as createHttpServer } from "http";
import { createServer as createHttpsServer, request as httpsRequest } from "https";
import { Socket } from "net";
import { join } from "path";
import { URL } from "url";
import type { ErrorWithMessage } from "@/utils/error";
import type { IncomingMessage, RequestOptions, ServerResponse } from "http";
import type { RequestOptions as HttpsRequestOptions } from "https";
import type OpenAI from "openai";
import type { Ora } from "ora";
import { faker } from "@faker-js/faker";
import { ResultAsync } from "neverthrow";
import { toErrorWithMessage } from "@/utils/error";
import { logger } from "@/utils/logger";
type ModelsResponse = OpenAI.PageResponse<OpenAI.Models.Model>;
type ChatCompletionResponse = OpenAI.Chat.Completions.ChatCompletion;
type ChatCompletionChunkResponse = OpenAI.Chat.Completions.ChatCompletionChunk;
type EmbeddingCreateResponse = OpenAI.Embeddings.CreateEmbeddingResponse;
type MockResponse = ModelsResponse | ChatCompletionResponse | ChatCompletionChunkResponse | EmbeddingCreateResponse;
const DEFAULT_MOCK_RESPONSES: Record<string, MockResponse> = {
"/v1/models": {
object: "list",
data: [
{
id: "gpt-4-turbo",
object: "model",
created: 1687883450,
owned_by: "openai",
},
{
id: "gpt-4",
object: "model",
created: 1687882410,
owned_by: "openai",
},
{
id: "gpt-3.5-turbo",
object: "model",
created: 1677649963,
owned_by: "openai",
},
],
},
"/v1/chat/completions": {
id: "chatcmpl-123",
object: "chat.completion",
created: 1677858242,
model: "gpt-4-turbo",
usage: {
prompt_tokens: 13,
completion_tokens: 7,
total_tokens: 20,
},
choices: [
{
message: {
role: "assistant",
refusal: null,
content: "Hello! How can I assist you today?",
},
logprobs: null,
finish_reason: "stop",
index: 0,
},
],
},
};
export class OpenAIProxyService {
private proxyServer;
private mockServer;
private readonly proxyPort: number;
private readonly mockPort: number;
private mockResponses: Record<string, MockResponse>;
constructor(
private readonly spinner: Ora,
private readonly workingDirectory: string,
private readonly numDim: number,
proxyPort = 8443,
mockPort = 8444,
) {
this.proxyPort = proxyPort;
this.mockPort = mockPort;
this.numDim = numDim;
this.mockResponses = { ...DEFAULT_MOCK_RESPONSES, ...this.generateEmbeddingsResponse() };
const certificates = this.generateCertificates();
this.mockServer = createHttpsServer(certificates, this.handleMockRequest.bind(this));
this.proxyServer = createHttpServer(this.handleProxyRequest.bind(this));
this.proxyServer.on("connect", this.handleConnect.bind(this));
}
private generateEmbeddingsResponse(): { "/v1/embeddings": OpenAI.Embeddings.CreateEmbeddingResponse } {
return {
"/v1/embeddings": {
model: "gpt-3.5-turbo",
object: "list",
usage: {
prompt_tokens: 13,
total_tokens: 13,
},
data: [
{
embedding: Array.from({ length: this.numDim }, () =>
faker.number.float({ min: -1, max: 1, fractionDigits: 6 }),
),
index: 0,
object: "embedding",
},
],
},
};
}
private generateCertificates() {
const certPath = join(this.workingDirectory, "cert.pem");
const keyPath = join(this.workingDirectory, "key.pem");
if (!existsSync(certPath) || !existsSync(keyPath)) {
this.spinner.info("Generating self-signed certificate...");
execSync(
'openssl req -x509 -newkey rsa:2048 -keyout key.pem -out cert.pem -days 365 -nodes -subj "/CN=localhost"',
{
cwd: this.workingDirectory,
stdio: "ignore",
},
);
}
return {
key: readFileSync(keyPath),
cert: readFileSync(certPath),
};
}
private handleMockRequest(req: IncomingMessage, res: ServerResponse) {
logger.debug(`[OpenAI Interceptor]: Handling mock request: ${req.method} ${req.url}`);
if (!req.url) {
res.writeHead(404);
res.end(JSON.stringify({ error: { message: "Not found" } }));
return;
}
const mockResponse = this.mockResponses[req.url];
if (!mockResponse) {
res.writeHead(404);
res.end(JSON.stringify({ error: { message: "Not found" } }));
return;
}
if (req.method === "POST" && req.url === "/v1/chat/completions") {
this.handleChatCompletions(req, res, mockResponse);
} else {
this.sendMockResponse(res, mockResponse);
}
}
private handleChatCompletions(req: IncomingMessage, res: ServerResponse, mockResponse: MockResponse) {
let body = "";
req.on("data", (chunk: Buffer) => {
body += chunk.toString();
});
req.on("end", () => {
try {
const requestData = JSON.parse(body) as OpenAI.Chat.Completions.ChatCompletion;
logger.debug("[OpenAI Interceptor]: Received chat request:", requestData);
if (!("model" in mockResponse && "model" in requestData)) {
res.writeHead(400, { "Content-Type": "application/json" });
res.end(JSON.stringify({ error: { message: "Missing model parameter" } }));
return;
}
const response = {
...mockResponse,
model: requestData.model || mockResponse.model,
};
this.sendMockResponse(res, response);
} catch (error) {
logger.error("Error parsing request body:", error);
res.writeHead(400, { "Content-Type": "application/json" });
res.end(JSON.stringify({ error: { message: "Invalid request body" } }));
}
});
}
private sendMockResponse(res: ServerResponse, response: MockResponse) {
res.writeHead(200, {
"Content-Type": "application/json",
"Access-Control-Allow-Origin": "*",
});
res.end(JSON.stringify(response));
}
private handleProxyRequest(req: IncomingMessage, res: ServerResponse) {
if (!req.url || !req.headers.host) {
res.writeHead(400);
res.end("Bad Request");
return;
}
const targetUrl = new URL(req.url.startsWith("http") ? req.url : `http://${req.headers.host}${req.url}`);
if (targetUrl.hostname === "api.openai.com") {
this.forwardToMock(req, res, targetUrl);
} else {
// Forward to original destination
const options: RequestOptions = {
hostname: targetUrl.hostname,
port: targetUrl.port || 80,
path: targetUrl.pathname + targetUrl.search,
method: req.method,
headers: req.headers,
};
const proxyReq = httpsRequest(options, (proxyRes: IncomingMessage) => {
res.writeHead(proxyRes.statusCode ?? 500, proxyRes.headers);
proxyRes.pipe(res);
});
proxyReq.on("error", (error: Error) => {
logger.error("Proxy request error:", error);
res.writeHead(500);
res.end("Proxy Error");
});
req.pipe(proxyReq);
}
}
private forwardToMock(req: IncomingMessage, res: ServerResponse, targetUrl: URL) {
const options: HttpsRequestOptions = {
hostname: "localhost",
port: this.mockPort,
path: targetUrl.pathname,
method: req.method,
headers: {
...req.headers,
host: "localhost",
},
};
const proxyReq = httpsRequest(options, (proxyRes: IncomingMessage) => {
res.writeHead(proxyRes.statusCode ?? 500, proxyRes.headers);
proxyRes.pipe(res);
});
proxyReq.on("error", (error: Error) => {
logger.error("Mock request error:", error);
res.writeHead(500);
res.end("Mock Server Error");
});
req.pipe(proxyReq);
}
private handleConnect(req: IncomingMessage, clientSocket: Socket, head: Buffer) {
if (!req.url) {
clientSocket.end();
return;
}
const [targetHost, targetPortStr] = req.url.split(":");
const targetPort = targetPortStr ? parseInt(targetPortStr) || 443 : 443;
if (targetHost === "api.openai.com") {
const mockSocket = new Socket();
mockSocket.connect(this.mockPort, "localhost", () => {
clientSocket.write("HTTP/1.1 200 Connection Established\r\n\r\n");
mockSocket.write(head);
mockSocket.pipe(clientSocket);
clientSocket.pipe(mockSocket);
});
mockSocket.on("error", (error: Error) => {
logger.error("Mock socket error:", error);
clientSocket.end();
});
} else {
if (!targetHost) {
clientSocket.end();
return;
}
const serverSocket = new Socket();
serverSocket.connect(targetPort, targetHost, () => {
clientSocket.write("HTTP/1.1 200 Connection Established\r\n\r\n");
serverSocket.write(head);
serverSocket.pipe(clientSocket);
clientSocket.pipe(serverSocket);
});
serverSocket.on("error", (error: Error) => {
logger.error("Server socket error:", error);
clientSocket.end();
});
}
clientSocket.on("error", (error: Error) => {
logger.error("Client socket error:", error);
clientSocket.end();
});
}
public setMockResponse(path: string, response: MockResponse): void {
this.mockResponses[path] = response;
}
public start(): ResultAsync<void, ErrorWithMessage> {
return ResultAsync.fromPromise(
new Promise<void>((resolve, reject) => {
this.mockServer
.listen(this.mockPort, () => {
this.spinner.text = `\n Mock HTTPS server running on port ${this.mockPort}`;
this.proxyServer
.listen(this.proxyPort, () => {
this.spinner.succeed(`Proxy server running on port ${this.proxyPort}`);
resolve();
})
.on("error", reject);
})
.on("error", reject);
}),
toErrorWithMessage,
);
}
public stop(): ResultAsync<void, ErrorWithMessage> {
return ResultAsync.fromPromise(
new Promise<void>((resolve, reject) => {
this.mockServer.close((err) => {
if (err) reject(err);
this.proxyServer.close((err) => {
if (err) reject(err);
this.spinner.succeed("Servers stopped");
resolve();
});
});
}),
toErrorWithMessage,
);
}
}