Skip to content

Commit

Permalink
🐛 fix: fix client config
Browse files Browse the repository at this point in the history
  • Loading branch information
arvinxx committed Sep 11, 2023
1 parent 554dd8a commit d62f1b3
Show file tree
Hide file tree
Showing 6 changed files with 5 additions and 23 deletions.
3 changes: 0 additions & 3 deletions next.config.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,6 @@ const nextConfig = {
reactStrictMode: true,
pageExtensions: ['page.tsx', 'api.ts'],
transpilePackages: ['@lobehub/ui'],
env: {
USE_AZURE_OPENAI: process.env.USE_AZURE_OPENAI === '1',
},
webpack(config) {
config.experiments = {
asyncWebAssembly: true,
Expand Down
14 changes: 0 additions & 14 deletions src/config/client.ts

This file was deleted.

2 changes: 2 additions & 0 deletions src/config/server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ declare global {
AZURE_API_VERSION?: string;
OPENAI_API_KEY?: string;
OPENAI_PROXY_URL?: string;
USE_AZURE_OPENAI?: string;
}
}
}
Expand All @@ -25,5 +26,6 @@ export const getServerConfig = () => {

OPENAI_API_KEY: process.env.OPENAI_API_KEY,
OPENAI_PROXY_URL: process.env.OPENAI_PROXY_URL,
USE_AZURE_OPENAI: process.env.USE_AZURE_OPENAI === '1',
};
};
2 changes: 0 additions & 2 deletions src/const/settings.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import { getClientConfig } from '@/config/client';
import { DEFAULT_OPENAI_MODEL_LIST } from '@/const/llm';
import { DEFAULT_AGENT_META } from '@/const/meta';
import { LanguageModel } from '@/types/llm';
Expand Down Expand Up @@ -38,7 +37,6 @@ export const DEFAULT_LLM_CONFIG: GlobalLLMConfig = {
openAI: {
OPENAI_API_KEY: '',
models: DEFAULT_OPENAI_MODEL_LIST,
useAzure: getClientConfig().USE_AZURE_OPENAI,
},
};

Expand Down
4 changes: 2 additions & 2 deletions src/pages/api/openai/chat.api.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import OpenAI from 'openai';

import { getClientConfig } from '@/config/client';
import { getServerConfig } from '@/config/server';
import { getOpenAIAuthFromRequest } from '@/const/fetch';
import { ErrorType } from '@/types/fetch';
import { OpenAIStreamPayload } from '@/types/openai';
Expand All @@ -26,7 +26,7 @@ export default async function handler(req: Request) {

let openai: OpenAI;

const { USE_AZURE_OPENAI } = getClientConfig();
const { USE_AZURE_OPENAI } = getServerConfig();
const useAzureOpenAI = useAzure || USE_AZURE_OPENAI;

if (useAzureOpenAI) {
Expand Down
3 changes: 1 addition & 2 deletions src/pages/settings/features/Settings/LLM/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ import { memo } from 'react';
import { Trans, useTranslation } from 'react-i18next';
import { Flexbox } from 'react-layout-kit';

import { getClientConfig } from '@/config/client';
import { FORM_STYLE } from '@/const/layoutTokens';
import { globalSelectors, useEffectAfterGlobalHydrated, useGlobalStore } from '@/store/global';

Expand Down Expand Up @@ -83,7 +82,7 @@ const LLM = memo(() => {
},
{
children: (
<Switch disabled={getClientConfig().USE_AZURE_OPENAI} />
<Switch />
// <Flexbox gap={4}>
// <div>
//
Expand Down

0 comments on commit d62f1b3

Please sign in to comment.