Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add HTTP_PROXY variable and proxy process when access endpoints' api services #55

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions examples/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,13 @@ AZURE_OPENAI_ENDPOINT=...
AZURE_OPENAI_API_KEY=...
```

Optional proxy setting: For some endpoints, proxy is needed to access services.
You can set your http proxy environment variable that looks like the following:
```
# For Proxy
HTTP_PROXY=http://host:port
```

## Step 4: Run the examples

Examples can be found in the `examples` directory.
Expand Down
69 changes: 69 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
],
"dependencies": {
"axios": "^1.4.0",
"https-proxy-agent": "^7.0.1",
"typescript": "^5.1.3"
},
"devDependencies": {
Expand Down
32 changes: 23 additions & 9 deletions src/model.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import axios from "axios";
import { Result, success, error } from "./result";

import { HttpsProxyAgent } from 'https-proxy-agent';

/**
* Represents a AI language model that can complete prompts. TypeChat uses an implementation of this
* interface to communicate with an AI service that can translate natural language requests to JSON
Expand Down Expand Up @@ -39,17 +41,19 @@ export interface TypeChatLanguageModel {
* @returns An instance of `TypeChatLanguageModel`.
*/
export function createLanguageModel(env: Record<string, string | undefined>): TypeChatLanguageModel {
const httpProxy = env.HTTP_PROXY ?? "";

if (env.OPENAI_API_KEY) {
const apiKey = env.OPENAI_API_KEY ?? missingEnvironmentVariable("OPENAI_API_KEY");
const model = env.OPENAI_MODEL ?? missingEnvironmentVariable("OPENAI_MODEL");
const endPoint = env.OPENAI_ENDPOINT ?? "https://api.openai.com/v1/chat/completions";
const org = env.OPENAI_ORGANIZATION ?? "";
return createOpenAILanguageModel(apiKey, model, endPoint, org);
return createOpenAILanguageModel(apiKey, model, endPoint, org, httpProxy);
}
if (env.AZURE_OPENAI_API_KEY) {
const apiKey = env.AZURE_OPENAI_API_KEY ?? missingEnvironmentVariable("AZURE_OPENAI_API_KEY");
const endPoint = env.AZURE_OPENAI_ENDPOINT ?? missingEnvironmentVariable("AZURE_OPENAI_ENDPOINT");
return createAzureOpenAILanguageModel(apiKey, endPoint);
return createAzureOpenAILanguageModel(apiKey, endPoint, httpProxy);
}
missingEnvironmentVariable("OPENAI_API_KEY or AZURE_OPENAI_API_KEY");
}
Expand All @@ -59,15 +63,19 @@ export function createLanguageModel(env: Record<string, string | undefined>): Ty
* @param apiKey The OpenAI API key.
* @param model The model name.
* @param endPoint The URL of the OpenAI REST API endpoint. Defaults to "https://api.openai.com/v1/chat/completions".
* @param org The OpenAI organization id.
* @param org: The OpenAI organization id.
* @param httpProxy: The HTTP proxy setting.
* @returns An instance of `TypeChatLanguageModel`.
*/
export function createOpenAILanguageModel(apiKey: string, model: string, endPoint = "https://api.openai.com/v1/chat/completions", org = ""): TypeChatLanguageModel {
return createAxiosLanguageModel(endPoint, {
headers: {
export function createOpenAILanguageModel(apiKey: string, model: string, endPoint = "https://api.openai.com/v1/chat/completions", org = "", httpProxy = ""): TypeChatLanguageModel {
const agent = new HttpsProxyAgent(httpProxy);
return createAxiosLanguageModel(endPoint, {
headers: {
Authorization: `Bearer ${apiKey}`,
"OpenAI-Organization": org
}
},
httpAgent: agent,
httpsAgent: agent
}, { model });
}

Expand All @@ -77,10 +85,16 @@ export function createOpenAILanguageModel(apiKey: string, model: string, endPoin
* "https://{your-resource-name}.openai.azure.com/openai/deployments/{your-deployment-name}/chat/completions?api-version={API-version}".
* Example deployment names are "gpt-35-turbo" and "gpt-4". An example API versions is "2023-05-15".
* @param apiKey The Azure OpenAI API key.
* @param httpProxy: The HTTP proxy setting.
* @returns An instance of `TypeChatLanguageModel`.
*/
export function createAzureOpenAILanguageModel(apiKey: string, endPoint: string,): TypeChatLanguageModel {
return createAxiosLanguageModel(endPoint, { headers: { "api-key": apiKey } }, {});
export function createAzureOpenAILanguageModel(apiKey: string, endPoint: string, httpProxy = ""): TypeChatLanguageModel {
const agent = new HttpsProxyAgent(httpProxy);
return createAxiosLanguageModel(endPoint, {
headers: { "api-key": apiKey },
httpAgent: agent,
httpsAgent: agent
}, {});
}

/**
Expand Down