Skip to content

Commit

Permalink
OpenAI 模型管理
Browse files Browse the repository at this point in the history
  • Loading branch information
Yurunsoft committed Sep 15, 2023
1 parent a35c05a commit 7d08bbd
Show file tree
Hide file tree
Showing 5 changed files with 110 additions and 8 deletions.
9 changes: 7 additions & 2 deletions admin/src/views/config/components/config-openai.vue
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,10 @@
>
<n-space vertical>
<n-card title="接口配置">
<api-manager v-model:apis="formData.apis" :models="props.value['config:openai'].config.models" />
<api-manager v-model:apis="formData.apis" :models="formData.models" />
</n-card>
<n-card title="模型管理">
<models-manager v-model:model-configs="formData.models" />
</n-card>
</n-space>
<n-space class="w-full pt-16px" :size="24" justify="center">
Expand All @@ -24,12 +27,14 @@ import type { FormInst, FormRules } from 'naive-ui';
import { defineConfigComponent } from '@/store';
import type { ConfigComponentProps, ConfigComponentEmit } from '@/store';
import ApiManager from './api-manager.vue';
import ModelsManager from './models-manager.vue';
const props = defineProps<ConfigComponentProps>();
const emit = defineEmits<ConfigComponentEmit>();
const rules: FormRules = {};
const formData = ref({
apis: []
apis: [],
models: []
});
const form = ref<FormInst>();
Expand Down
97 changes: 97 additions & 0 deletions admin/src/views/config/components/models-manager.vue
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
<template>
<n-scrollbar x-scrollable>
<n-table :single-line="false" striped class="w-max min-w-full">
<thead>
<tr class="text-center">
<th width="240">模型名称</th>
<th width="130">Token 输入倍率</th>
<th width="130">Token 输出倍率</th>
<th width="130">最大 Token</th>
<th width="70">付费标志</th>
<th>选中提示</th>
<th width="70">启用</th>
<th width="70">删除</th>
</tr>
</thead>
<tbody>
<tr v-for="(item, index) in modelConfigsData" :key="index">
<td>
<n-input v-model:value="item.model" />
</td>
<td>
<n-input-number v-model:value="item.inputTokenMultiple" :min="0" />
</td>
<td>
<n-input-number v-model:value="item.outputTokenMultiple" :min="0" />
</td>
<td><n-input-number v-model:value="item.maxTokens" :min="0" /></td>
<td class="text-center"><n-switch v-model:value="item.paying" /></td>
<td><n-input v-model:value="item.tips" type="textarea" :rows="2" /></td>
<td class="text-center"><n-switch v-model:value="item.enable" /></td>
<td>
<n-popconfirm :on-positive-click="() => handleDeleteModel(index)">
<template #default>确认删除?</template>
<template #trigger>
<n-button text block type="primary">删除</n-button>
</template>
</n-popconfirm>
</td>
</tr>
</tbody>
</n-table>
<n-space justify="center" class="mt-2">
<n-button type="primary" @click="handleAddModelConfig">
<icon-ic-round-plus class="mr-4px text-20px" />
增加一项
</n-button>
</n-space>
</n-scrollbar>
</template>

<script setup lang="tsx">
import { watch, reactive } from 'vue';
export interface Props {
modelConfigs: any[];
}
export interface Emit {
(e: 'update:modelConfigs', modelConfigs: any[]): void;
}
const props = defineProps<Props>();
const emit = defineEmits<Emit>();
const modelConfigsData = reactive(
(() => {
return props.modelConfigs.map(item => {
return {
...item,
enable: item.enable === undefined ? true : item.enable,
inputTokenMultiple: parseFloat(item.inputTokenMultiple),
outputTokenMultiple: parseFloat(item.outputTokenMultiple)
};
});
})()
);
watch(modelConfigsData, value => {
emit('update:modelConfigs', value);
});
function handleDeleteModel(index: number) {
modelConfigsData.splice(index, 1);
}
function handleAddModelConfig() {
modelConfigsData.push({
model: '',
inputTokenMultiple: 1,
outputTokenMultiple: 1,
maxTokens: 4096,
tips: '',
enable: true
});
}
</script>

<style scoped></style>
4 changes: 2 additions & 2 deletions server/Module/Chat/Model/Redis/ChatConfig.php
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,8 @@ public function getModelConfigs(): array
return $this->modelConfigs = [
new ModelConfig(['model' => 'gpt-3.5-turbo', 'inputTokenMultiple' => '0.75', 'outputTokenMultiple' => '1.0', 'maxTokens' => 4096]),
new ModelConfig(['model' => 'gpt-3.5-turbo-16k', 'inputTokenMultiple' => '1.5', 'outputTokenMultiple' => '2.0', 'maxTokens' => 16384]),
new ModelConfig(['model' => 'gpt-4', 'enable' => false, 'inputTokenMultiple' => '150', 'outputTokenMultiple' => '3.0', 'maxTokens' => 8192]),
new ModelConfig(['model' => 'gpt-4-32k', 'enable' => false, 'inputTokenMultiple' => '300', 'outputTokenMultiple' => '6.0', 'maxTokens' => 32768]),
new ModelConfig(['model' => 'gpt-4', 'enable' => false, 'inputTokenMultiple' => '15', 'outputTokenMultiple' => '30', 'maxTokens' => 8192]),
new ModelConfig(['model' => 'gpt-4-32k', 'enable' => false, 'inputTokenMultiple' => '30', 'outputTokenMultiple' => '60', 'maxTokens' => 32768]),
];
}

Expand Down
4 changes: 2 additions & 2 deletions server/Module/Embedding/Model/Redis/EmbeddingConfig.php
Original file line number Diff line number Diff line change
Expand Up @@ -225,8 +225,8 @@ public function getChatModelConfigs(): ?array
return $this->chatModelConfigs = [
new ModelConfig(['model' => 'gpt-3.5-turbo', 'inputTokenMultiple' => '0.75', 'outputTokenMultiple' => '1.0', 'maxTokens' => 4096]),
new ModelConfig(['model' => 'gpt-3.5-turbo-16k', 'inputTokenMultiple' => '1.5', 'outputTokenMultiple' => '2.0', 'maxTokens' => 16384]),
new ModelConfig(['model' => 'gpt-4', 'enable' => false, 'inputTokenMultiple' => '150', 'outputTokenMultiple' => '3.0', 'maxTokens' => 8192]),
new ModelConfig(['model' => 'gpt-4-32k', 'enable' => false, 'inputTokenMultiple' => '300', 'outputTokenMultiple' => '6.0', 'maxTokens' => 32768]),
new ModelConfig(['model' => 'gpt-4', 'enable' => false, 'inputTokenMultiple' => '15', 'outputTokenMultiple' => '30', 'maxTokens' => 8192]),
new ModelConfig(['model' => 'gpt-4-32k', 'enable' => false, 'inputTokenMultiple' => '30', 'outputTokenMultiple' => '60', 'maxTokens' => 32768]),
];
}

Expand Down
4 changes: 2 additions & 2 deletions server/Module/OpenAI/Model/Redis/OpenAIConfig.php
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,8 @@ public function getModels(): ?array
return $this->models = [
new ModelConfig(['model' => 'gpt-3.5-turbo', 'inputTokenMultiple' => '0.75', 'outputTokenMultiple' => '1.0', 'maxTokens' => 4096]),
new ModelConfig(['model' => 'gpt-3.5-turbo-16k', 'inputTokenMultiple' => '1.5', 'outputTokenMultiple' => '2.0', 'maxTokens' => 16384]),
new ModelConfig(['model' => 'gpt-4', 'enable' => false, 'inputTokenMultiple' => '150', 'outputTokenMultiple' => '3.0', 'maxTokens' => 8192]),
new ModelConfig(['model' => 'gpt-4-32k', 'enable' => false, 'inputTokenMultiple' => '300', 'outputTokenMultiple' => '6.0', 'maxTokens' => 32768]),
new ModelConfig(['model' => 'gpt-4', 'enable' => false, 'inputTokenMultiple' => '15', 'outputTokenMultiple' => '30', 'maxTokens' => 8192]),
new ModelConfig(['model' => 'gpt-4-32k', 'enable' => false, 'inputTokenMultiple' => '30', 'outputTokenMultiple' => '60', 'maxTokens' => 32768]),
new ModelConfig(['model' => 'text-embedding-ada-002', 'inputTokenMultiple' => '0.05', 'outputTokenMultiple' => '0.05', 'maxTokens' => 8191]),
];
}
Expand Down

0 comments on commit 7d08bbd

Please sign in to comment.