Skip to content

Commit

Permalink
feat: allow setting OPENAI_API_BASE
Browse files Browse the repository at this point in the history
  • Loading branch information
Manouchehri committed Apr 26, 2024
1 parent fab53a1 commit 455f511
Show file tree
Hide file tree
Showing 7 changed files with 12 additions and 1 deletion.
1 change: 1 addition & 0 deletions .env.dev.example
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ S3_REGION=

# Prompt playground
OPENAI_API_KEY=""
OPENAI_API_BASE="https://api.openai.com/v1"
ANTHROPIC_API_KEY=""

# Set during docker build of application
Expand Down
1 change: 1 addition & 0 deletions .env.local.example
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ SALT="salt"

# Prompt playground
OPENAI_API_KEY=""
OPENAI_API_BASE="https://api.openai.com/v1"
ANTHROPIC_API_KEY=""

# Redis
Expand Down
1 change: 1 addition & 0 deletions .env.prod.example
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@ LANGFUSE_CSP_ENFORCE_HTTPS="true"

# Prompt playground
# OPENAI_API_KEY=""
# OPENAI_API_BASE="https://api.openai.com/v1"
# ANTHROPIC_API_KEY=""

# Betterstack
Expand Down
6 changes: 5 additions & 1 deletion packages/shared/src/server/llm/fetchLLMCompletion.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ type LLMCompletionParams = {
functionCall?: LLMFunctionCall;
callbacks?: BaseCallbackHandler[];
apiKey?: string;
apiBase?: string;
};

type FetchLLMCompletionParams = LLMCompletionParams & {
Expand Down Expand Up @@ -57,7 +58,7 @@ export async function fetchLLMCompletion(
params: FetchLLMCompletionParams
): Promise<string | IterableReadableStream<Uint8Array> | unknown> {
// the apiKey must never be printed to the console
const { messages, modelParams, streaming, callbacks, apiKey } = params;
const { messages, modelParams, streaming, callbacks, apiKey, apiBase } = params;
const finalMessages = messages.map((message) => {
if (message.role === ChatMessageRole.User)
return new HumanMessage(message.content);
Expand All @@ -84,6 +85,9 @@ export async function fetchLLMCompletion(
temperature: modelParams.temperature,
maxTokens: modelParams.max_tokens,
topP: modelParams.top_p,
configuration: {
baseURL: apiBase,
},
callbacks,
});
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ export default async function chatCompletionHandler(req: NextRequest) {
modelParams.provider === "openai"
? env.OPENAI_API_KEY
: env.ANTHROPIC_API_KEY,
apiBase: env.OPENAI_API_BASE,
});

return new StreamingTextResponse(stream);
Expand Down
2 changes: 2 additions & 0 deletions web/src/env.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@ export const env = createEnv({
LANGFUSE_WORKER_PASSWORD: z.string().optional(),
// Prompt playground
OPENAI_API_KEY: z.string().optional(),
OPENAI_API_BASE: z.string().optional(),
ANTHROPIC_API_KEY: z.string().optional(),
TURNSTILE_SECRET_KEY: z.string().optional(),
},
Expand Down Expand Up @@ -173,6 +174,7 @@ export const env = createEnv({
LANGFUSE_WORKER_PASSWORD: process.env.LANGFUSE_WORKER_PASSWORD,
// Prompt playground
OPENAI_API_KEY: process.env.OPENAI_API_KEY,
OPENAI_API_BASE: process.env.OPENAI_API_BASE,
ANTHROPIC_API_KEY: process.env.ANTHROPIC_API_KEY,
TURNSTILE_SECRET_KEY: process.env.TURNSTILE_SECRET_KEY,
NEXT_PUBLIC_TURNSTILE_SITE_KEY: process.env.NEXT_PUBLIC_TURNSTILE_SITE_KEY,
Expand Down
1 change: 1 addition & 0 deletions worker/src/eval-service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -279,6 +279,7 @@ export const evaluate = async ({
completion = await fetchLLMCompletion({
streaming: false,
apiKey: decrypt(apiKey.secret_key), // decrypt the secret key
// apiBase: apiKey.api_base,
messages: [{ role: ChatMessageRole.System, content: prompt }],
modelParams: {
provider: provider,
Expand Down

0 comments on commit 455f511

Please sign in to comment.