feat: support socks proxy for OpenAI SDK (#175)

* feat: support socks proxy https://github.com/web-infra-dev/midscene-example/issues/14

* feat: show error for invalid json
This commit is contained in:
yuyutaotao 2024-12-10 09:24:21 +08:00 committed by GitHub
parent 91896c421a
commit 2a73bcfc20
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 46 additions and 16 deletions

View File

@ -11,18 +11,21 @@ Required:
export OPENAI_API_KEY="sk-abcdefghijklmnopqrstuvwxyz"
```
Optional:
Optional configs:
```bash
# optional, if you want to use a customized endpoint
# if you want to use a customized endpoint
export OPENAI_BASE_URL="https://..."
# optional, if you want to use Azure OpenAI Service
# if you want to use Azure OpenAI Service
export OPENAI_USE_AZURE="true"
# optional, if you want to specify a model name other than gpt-4o
# if you want to specify a model name other than gpt-4o
export MIDSCENE_MODEL_NAME='claude-3-opus-20240229';
# optional, if you want to pass customized JSON data to the `init` process of OpenAI SDK
# if you want to pass customized JSON data to the `init` process of OpenAI SDK
export MIDSCENE_OPENAI_INIT_CONFIG_JSON='{"baseURL":"....","defaultHeaders":{"key": "value"}}'
# if you want to use proxy. Midscene uses `socks-proxy-agent` under the hood.
export MIDSCENE_OPENAI_SOCKS_PROXY="socks5://127.0.0.1:1080"
```

View File

@ -25,4 +25,7 @@ export MIDSCENE_MODEL_NAME='claude-3-opus-20240229';
# 可选, 如果你想变更 SDK 的初始化参数
export MIDSCENE_OPENAI_INIT_CONFIG_JSON='{"baseURL":"....","defaultHeaders":{"key": "value"}}'
# 可选, 如果你想使用代理。Midscene 使用 `socks-proxy-agent` 作为底层库。
export MIDSCENE_OPENAI_SOCKS_PROXY="socks5://127.0.0.1:1080"
```

View File

@ -37,9 +37,10 @@
"prepublishOnly": "npm run build"
},
"dependencies": {
"@midscene/shared": "workspace:*",
"openai": "4.57.1",
"optional": "0.1.4",
"@midscene/shared": "workspace:*"
"socks-proxy-agent": "8.0.4"
},
"devDependencies": {
"@modern-js/module-tools": "2.60.6",

View File

@ -1,19 +1,22 @@
import assert from 'node:assert';
import { AIResponseFormat, type AIUsageInfo } from '@/types';
import { ifInBrowser } from '@midscene/shared/utils';
import OpenAI, { type ClientOptions, AzureOpenAI } from 'openai';
import OpenAI, { AzureOpenAI } from 'openai';
import type { ChatCompletionMessageParam } from 'openai/resources';
import { SocksProxyAgent } from 'socks-proxy-agent';
import {
MIDSCENE_DANGEROUSLY_PRINT_ALL_CONFIG,
MIDSCENE_DEBUG_AI_PROFILE,
MIDSCENE_LANGSMITH_DEBUG,
MIDSCENE_MODEL_NAME,
MIDSCENE_OPENAI_INIT_CONFIG_JSON,
MIDSCENE_OPENAI_SOCKS_PROXY,
OPENAI_API_KEY,
OPENAI_BASE_URL,
OPENAI_USE_AZURE,
allAIConfig,
getAIConfig,
getAIConfigInJson,
} from '../../env';
import { AIActionType } from '../common';
import { findElementSchema } from '../prompt/element_inspector';
@ -40,12 +43,15 @@ export function getModelName() {
async function createOpenAI() {
let openai: OpenAI | AzureOpenAI;
const extraConfigString = getAIConfig(MIDSCENE_OPENAI_INIT_CONFIG_JSON);
const extraConfig = extraConfigString ? JSON.parse(extraConfigString) : {};
const extraConfig = getAIConfigInJson(MIDSCENE_OPENAI_INIT_CONFIG_JSON);
const socksProxy = getAIConfig(MIDSCENE_OPENAI_SOCKS_PROXY);
const socksAgent = socksProxy ? new SocksProxyAgent(socksProxy) : undefined;
if (getAIConfig(OPENAI_USE_AZURE)) {
openai = new AzureOpenAI({
baseURL: getAIConfig(OPENAI_BASE_URL),
apiKey: getAIConfig(OPENAI_API_KEY),
httpAgent: socksAgent,
...extraConfig,
dangerouslyAllowBrowser: true,
});
@ -53,6 +59,7 @@ async function createOpenAI() {
openai = new OpenAI({
baseURL: getAIConfig(OPENAI_BASE_URL),
apiKey: getAIConfig(OPENAI_API_KEY),
httpAgent: socksAgent,
...extraConfig,
dangerouslyAllowBrowser: true,
});

View File

@ -7,6 +7,7 @@ export const MIDSCENE_DEBUG_AI_PROFILE = 'MIDSCENE_DEBUG_AI_PROFILE';
export const MIDSCENE_DANGEROUSLY_PRINT_ALL_CONFIG =
'MIDSCENE_DANGEROUSLY_PRINT_ALL_CONFIG';
export const MIDSCENE_DEBUG_MODE = 'MIDSCENE_DEBUG_MODE';
export const MIDSCENE_OPENAI_SOCKS_PROXY = 'MIDSCENE_OPENAI_SOCKS_PROXY';
export const OPENAI_API_KEY = 'OPENAI_API_KEY';
export const OPENAI_BASE_URL = 'OPENAI_BASE_URL';
export const MIDSCENE_MODEL_TEXT_ONLY = 'MIDSCENE_MODEL_TEXT_ONLY';
@ -36,6 +37,8 @@ const allConfigFromEnv = () => {
[MATCH_BY_POSITION]: process.env[MATCH_BY_POSITION] || undefined,
[MIDSCENE_REPORT_TAG_NAME]:
process.env[MIDSCENE_REPORT_TAG_NAME] || undefined,
[MIDSCENE_OPENAI_SOCKS_PROXY]:
process.env[MIDSCENE_OPENAI_SOCKS_PROXY] || undefined,
};
};
@ -50,6 +53,20 @@ export const getAIConfig = (
return allConfigFromEnv()[configKey];
};
export const getAIConfigInJson = (configKey: keyof typeof userConfig) => {
const config = getAIConfig(configKey);
try {
return config ? JSON.parse(config) : undefined;
} catch (error: any) {
throw new Error(
`Failed to parse json config: ${configKey}. ${error.message}`,
{
cause: error,
},
);
}
};
export const allAIConfig = () => {
return { ...allConfigFromEnv(), ...userConfig };
};

View File

@ -9,6 +9,7 @@ import {
MIDSCENE_DEBUG_MODE,
MIDSCENE_OPENAI_INIT_CONFIG_JSON,
getAIConfig,
getAIConfigInJson,
} from './env';
import type { Rect, ReportDumpWithAttributes } from './types';
@ -226,16 +227,11 @@ function debugLog(...message: any[]) {
}
let lastReportedRepoUrl = '';
export function uploadTestInfoToServer({
testUrl,
}: {
testUrl: string;
}) {
export function uploadTestInfoToServer({ testUrl }: { testUrl: string }) {
let repoUrl = '';
let userEmail = '';
const extraConfigString = getAIConfig(MIDSCENE_OPENAI_INIT_CONFIG_JSON);
const extraConfig = extraConfigString ? JSON.parse(extraConfigString) : {};
const extraConfig = getAIConfigInJson(MIDSCENE_OPENAI_INIT_CONFIG_JSON);
const serverUrl = extraConfig?.REPORT_SERVER_URL;
try {

3
pnpm-lock.yaml generated
View File

@ -151,6 +151,9 @@ importers:
optional:
specifier: 0.1.4
version: 0.1.4
socks-proxy-agent:
specifier: 8.0.4
version: 8.0.4
devDependencies:
'@modern-js/module-tools':
specifier: 2.60.6