Skip to content

Commit 2ec9e8f

Browse files
committed
refactor(llm): simplify LLM configuration and remove OpenAI-specific code #257
- Remove OpenAI-specific settings and code, focusing on custom LLM configuration. - Update property labels for clarity and consistency. - Simplify LLM factory to only support custom LLM providers. - Remove unused enums and constants related to OpenAI.
1 parent 3f71f8e commit 2ec9e8f

File tree

8 files changed

+50
-136
lines changed

8 files changed

+50
-136
lines changed

core/src/main/kotlin/cc/unitmesh/devti/llms/LlmFactory.kt

Lines changed: 1 addition & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,6 @@ package cc.unitmesh.devti.llms
22

33
import cc.unitmesh.devti.llms.custom.CustomLLMProvider
44
import cc.unitmesh.devti.llms.custom.InlayCustomLLMProvider
5-
import cc.unitmesh.devti.llms.openai.OpenAIProvider
6-
import cc.unitmesh.devti.settings.AIEngines
7-
import cc.unitmesh.devti.settings.AutoDevSettingsState
85
import cc.unitmesh.devti.settings.coder.AutoDevCoderSettingService
96
import com.intellij.openapi.components.Service
107
import com.intellij.openapi.components.service
@@ -13,15 +10,9 @@ import com.intellij.openapi.project.Project
1310

1411
@Service
1512
class LlmFactory {
16-
private val aiEngine: AIEngines
17-
get() = AIEngines.values()
18-
.find { it.name.lowercase() == AutoDevSettingsState.getInstance().aiEngine.lowercase() } ?: AIEngines.OpenAI
1913

2014
fun create(project: Project): LLMProvider {
21-
return when (aiEngine) {
22-
AIEngines.OpenAI -> project.getService(OpenAIProvider::class.java)
23-
AIEngines.Custom -> project.getService(CustomLLMProvider::class.java)
24-
}
15+
return project.getService(CustomLLMProvider::class.java)
2516
}
2617

2718
fun createForInlayCodeComplete(project: Project): LLMProvider {

core/src/main/kotlin/cc/unitmesh/devti/llms/custom/CustomLLMProvider.kt

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,15 @@ class CustomLLMProvider(val project: Project) : LLMProvider, CustomSSEProcessor(
2424
private val url get() = autoDevSettingsState.customEngineServer
2525
private val key get() = autoDevSettingsState.customEngineToken
2626

27-
override val requestFormat: String get() = autoDevSettingsState.customEngineRequestFormat
28-
override val responseFormat get() = autoDevSettingsState.customEngineResponseFormat
27+
private val modelName: String
28+
get() = AutoDevSettingsState.getInstance().customModel
29+
30+
override val requestFormat: String get() = autoDevSettingsState.customEngineRequestFormat.ifEmpty {
31+
"""{ "customFields": {"model": "$modelName", "temperature": 0.0, "stream": true} }"""
32+
}
33+
override val responseFormat get() = autoDevSettingsState.customEngineResponseFormat.ifEmpty {
34+
"\$.choices[0].delta.content"
35+
}
2936

3037
private var client = OkHttpClient()
3138
private val timeout = Duration.ofSeconds(defaultTimeout)
@@ -69,11 +76,7 @@ class CustomLLMProvider(val project: Project) : LLMProvider, CustomSSEProcessor(
6976
clearMessage()
7077
}
7178

72-
return if (autoDevSettingsState.customEngineResponseType == ResponseType.SSE.name) {
73-
streamSSE(call, promptText, keepHistory, messages)
74-
} else {
75-
streamJson(call, promptText, messages)
76-
}
79+
return streamSSE(call, promptText, keepHistory, messages)
7780
}
7881

7982
fun prompt(instruction: String, input: String): String {

core/src/main/kotlin/cc/unitmesh/devti/llms/openai/OpenAIProvider.kt

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@ import cc.unitmesh.devti.coder.recording.Recording
99
import cc.unitmesh.devti.coder.recording.RecordingInstruction
1010
import cc.unitmesh.devti.settings.AutoDevSettingsState
1111
import cc.unitmesh.devti.settings.coder.coderSetting
12-
import cc.unitmesh.devti.settings.SELECT_CUSTOM_MODEL
1312
import com.intellij.openapi.components.Service
1413
import com.intellij.openapi.components.service
1514
import com.intellij.openapi.diagnostic.Logger
@@ -35,13 +34,7 @@ import java.time.Duration
3534
class OpenAIProvider(val project: Project) : LLMProvider {
3635
private val timeout = Duration.ofSeconds(defaultTimeout)
3736
private val openAiVersion: String
38-
get() {
39-
val model = AutoDevSettingsState.getInstance().openAiModel
40-
if(model == SELECT_CUSTOM_MODEL) {
41-
return AutoDevSettingsState.getInstance().customModel
42-
}
43-
return model
44-
}
37+
get() = AutoDevSettingsState.getInstance().customModel
4538
private val openAiKey: String
4639
get() = AutoDevSettingsState.getInstance().openAiKey
4740

core/src/main/kotlin/cc/unitmesh/devti/settings/AutoDevSettingsState.kt

Lines changed: 2 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -15,21 +15,15 @@ import java.time.format.DateTimeFormatter
1515
@State(name = "cc.unitmesh.devti.settings.DevtiSettingsState", storages = [Storage("DevtiSettings.xml")])
1616
class AutoDevSettingsState : PersistentStateComponent<AutoDevSettingsState> {
1717
var openAiKey = ""
18-
var openAiModel = DEFAULT_AI_MODEL
1918
var delaySeconds = ""
2019

21-
var aiEngine = DEFAULT_AI_ENGINE
2220
var customOpenAiHost = ""
2321
var customEngineServer = ""
2422
var customEngineToken = ""
2523
var customPrompts = ""
2624
var customModel = ""
2725

28-
var customEngineResponseType = ResponseType.SSE.name
29-
/**
30-
* should be a json path
31-
*/
32-
var customEngineResponseFormat = ""
26+
var customEngineResponseFormat = "\$.choices[0].delta.content"
3327
/**
3428
* should be a json
3529
* {
@@ -40,11 +34,7 @@ class AutoDevSettingsState : PersistentStateComponent<AutoDevSettingsState> {
4034
*
4135
* @see docs/custom-llm-server.md
4236
*/
43-
var customEngineRequestFormat = ""
44-
45-
@OptionTag(value = "lastCheckTime", converter = ZonedDateTimeConverter::class)
46-
var lastCheck: ZonedDateTime? = null
47-
37+
var customEngineRequestFormat = """{ "customFields": {"model": "deepseek-chat", "temperature": 0.0, "stream": true} }"""
4838

4939
var language = DEFAULT_HUMAN_LANGUAGE
5040
var maxTokenLength = MAX_TOKEN_LENGTH.toString()

core/src/main/kotlin/cc/unitmesh/devti/settings/Constants.kt

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,13 @@
11
package cc.unitmesh.devti.settings
22

3-
val OPENAI_MODEL = arrayOf("gpt-3.5-turbo", "gpt-3.5-turbo-16k", "gpt-4", "custom")
43
val AI_ENGINES = arrayOf("OpenAI", "Custom")
54

6-
enum class AIEngines {
7-
OpenAI, Custom
8-
}
9-
105
enum class ResponseType {
116
SSE, JSON;
127
}
138

149
val DEFAULT_AI_ENGINE = AI_ENGINES[0]
1510

16-
val DEFAULT_AI_MODEL = OPENAI_MODEL[0]
17-
1811
@Suppress("unused")
1912
enum class HUMAN_LANGUAGES(val abbr: String, val display: String) {
2013
ENGLISH("en", "English"),

core/src/main/kotlin/cc/unitmesh/devti/settings/LLMSettingComponent.kt

Lines changed: 30 additions & 76 deletions
Original file line numberDiff line numberDiff line change
@@ -8,32 +8,28 @@ import com.intellij.ide.actions.RevealFileAction
88
import com.intellij.idea.LoggerFactory
99
import com.intellij.openapi.project.ProjectManager
1010
import com.intellij.ui.EditorTextField
11+
import com.intellij.ui.JBColor
1112
import com.intellij.ui.dsl.builder.panel
1213
import com.intellij.util.ui.FormBuilder
1314
import javax.swing.JPanel
1415

1516
class LLMSettingComponent(private val settings: AutoDevSettingsState) {
1617
// 以下 LLMParam 变量不要改名,因为这些变量名会被用作配置文件的 key
17-
private val languageParam by LLMParam.creating({ LanguageChangedCallback.language = it}) {
18-
ComboBox(settings.language, HUMAN_LANGUAGES.values().map { it.display }) }
19-
private val aiEngineParam by LLMParam.creating(onChange = { onSelectedEngineChanged() }) {
20-
ComboBox(settings.aiEngine, AIEngines.values().toList().map { it.name })
18+
private val languageParam by LLMParam.creating({ LanguageChangedCallback.language = it }) {
19+
ComboBox(settings.language, HUMAN_LANGUAGES.values().map { it.display })
2120
}
21+
2222
private val delaySecondsParam by LLMParam.creating { Editable(settings.delaySeconds) }
2323
private val maxTokenLengthParam by LLMParam.creating { Editable(settings.maxTokenLength) }
24-
private val openAIModelsParam by LLMParam.creating { ComboBox(settings.openAiModel, OPENAI_MODEL.toList()) }
25-
private val openAIKeyParam by LLMParam.creating { Password(settings.openAiKey) }
2624
private val customModelParam: LLMParam by LLMParam.creating { Editable(settings.customModel) }
2725
private val customOpenAIHostParam: LLMParam by LLMParam.creating { Editable(settings.customOpenAiHost) }
2826

2927
private val customEngineServerParam by LLMParam.creating { Editable(settings.customEngineServer) }
3028
private val customEngineTokenParam by LLMParam.creating { Password(settings.customEngineToken) }
3129

32-
private val customEngineResponseTypeParam by LLMParam.creating { ComboBox(settings.customEngineResponseType, ResponseType.values().map { it.name }.toList()) }
3330
private val customEngineResponseFormatParam by LLMParam.creating { Editable(settings.customEngineResponseFormat) }
3431
private val customEngineRequestBodyFormatParam by LLMParam.creating { Editable(settings.customEngineRequestFormat) }
3532

36-
3733
val project = ProjectManager.getInstance().openProjects.firstOrNull()
3834
private val customEnginePrompt: EditorTextField by lazy {
3935
JsonLanguageField(
@@ -44,33 +40,14 @@ class LLMSettingComponent(private val settings: AutoDevSettingsState) {
4440
).apply { LanguageChangedCallback.placeholder("autodev.custom.prompt.placeholder", this, 1) }
4541
}
4642

47-
private val llmGroups = mapOf<AIEngines, List<LLMParam>>(
48-
AIEngines.OpenAI to listOf(
49-
openAIModelsParam,
50-
openAIKeyParam,
51-
customModelParam,
52-
customOpenAIHostParam,
53-
),
54-
AIEngines.Custom to listOf(
55-
customEngineResponseTypeParam,
56-
customEngineServerParam,
57-
customEngineTokenParam,
58-
customEngineResponseFormatParam,
59-
customEngineRequestBodyFormatParam,
60-
),
61-
)
62-
63-
64-
private val onSelectedEngineChanged: () -> Unit = {
65-
applySettings(settings, updateParams = false)
66-
}
67-
private val _currentSelectedEngine: AIEngines
68-
get() = AIEngines.values().firstOrNull { it.name.lowercase() == aiEngineParam.value.lowercase() } ?: AIEngines.OpenAI
69-
7043
private val currentLLMParams: List<LLMParam>
7144
get() {
72-
return llmGroups[_currentSelectedEngine]
73-
?: throw IllegalStateException("Unknown engine: ${aiEngineParam.value}")
45+
return listOf(
46+
customEngineServerParam,
47+
customEngineTokenParam,
48+
customEngineResponseFormatParam,
49+
customEngineRequestBodyFormatParam,
50+
)
7451
}
7552

7653
private fun FormBuilder.addLLMParams(llmParams: List<LLMParam>): FormBuilder = apply {
@@ -111,38 +88,31 @@ class LLMSettingComponent(private val settings: AutoDevSettingsState) {
11188

11289

11390
fun applySettings(settings: AutoDevSettingsState, updateParams: Boolean = false) {
114-
115-
if (updateParams && engineChanged(settings).also { updateParams(settings) }) {
116-
return
117-
}
11891
panel.removeAll()
11992

12093
formBuilder
121-
.addLLMParam(languageParam)
122-
.addSeparator()
123-
.addTooltip("For Custom LLM, config Custom Engine Server & Custom Engine Token & Custom Response Format")
124-
.addLLMParam(aiEngineParam)
125-
.addLLMParam(maxTokenLengthParam)
126-
.addLLMParam(delaySecondsParam)
94+
.addLLMParam(languageParam)
12795
.addSeparator()
128-
.addComponent(panel {
129-
row {
130-
comment("For OpenAI LLM, config OpenAI Key & OpenAI Model & Custom OpenAI Host <a>Open Log for Debug</a>") {
131-
RevealFileAction.openFile(LoggerFactory.getLogFilePath())
132-
}
133-
}
134-
})
135-
.addLLMParams(currentLLMParams)
136-
.addComponent(panel {
137-
if (project != null) {
138-
testLLMConnection(project)
96+
.addLLMParams(currentLLMParams)
97+
.addLLMParam(maxTokenLengthParam)
98+
.addLLMParam(delaySecondsParam)
99+
.addSeparator()
100+
.addComponent(panel {
101+
if (project != null) {
102+
testLLMConnection(project)
103+
}
104+
105+
row {
106+
text(AutoDevBundle.message("settings.autodev.coder.testConnectionButton.tips")).apply {
107+
this.component.foreground = JBColor.RED
139108
}
140-
})
141-
.addVerticalGap(2)
142-
.addSeparator()
143-
.addLabeledComponent(jBLabel("settings.autodev.coder.customEnginePrompt", 1), customEnginePrompt, 1, true)
144-
.addComponentFillVertically(JPanel(), 0)
145-
.panel
109+
}
110+
})
111+
.addVerticalGap(2)
112+
.addSeparator()
113+
.addLabeledComponent(jBLabel("settings.autodev.coder.customEnginePrompt", 1), customEnginePrompt, 1, true)
114+
.addComponentFillVertically(JPanel(), 0)
115+
.panel
146116

147117
panel.invalidate()
148118
panel.repaint()
@@ -151,15 +121,11 @@ class LLMSettingComponent(private val settings: AutoDevSettingsState) {
151121
private fun updateParams(settings: AutoDevSettingsState) {
152122
settings.apply {
153123
maxTokenLengthParam.value = maxTokenLength
154-
openAIKeyParam.value = openAiKey
155124
customModelParam.value = customModel
156125
customOpenAIHostParam.value = customOpenAiHost
157126
customEngineServerParam.value = customEngineServer
158-
customEngineResponseTypeParam.value = customEngineResponseType
159127
customEngineTokenParam.value = customEngineToken
160-
openAIModelsParam.value = openAiModel
161128
languageParam.value = language
162-
aiEngineParam.value = aiEngine
163129
customEnginePrompt.text = customPrompts
164130
customEngineResponseFormatParam.value = customEngineResponseFormat
165131
customEngineRequestBodyFormatParam.value = customEngineRequestFormat
@@ -170,16 +136,12 @@ class LLMSettingComponent(private val settings: AutoDevSettingsState) {
170136
fun exportSettings(destination: AutoDevSettingsState) {
171137
destination.apply {
172138
maxTokenLength = maxTokenLengthParam.value
173-
openAiKey = openAIKeyParam.value
174139
customModel = customModelParam.value
175140
customOpenAiHost = customOpenAIHostParam.value
176-
aiEngine = aiEngineParam.value
177141
language = languageParam.value
178142
customEngineServer = customEngineServerParam.value
179-
customEngineResponseType = customEngineResponseTypeParam.value
180143
customEngineToken = customEngineTokenParam.value
181144
customPrompts = customEnginePrompt.text
182-
openAiModel = openAIModelsParam.value
183145
customEngineResponseFormat = customEngineResponseFormatParam.value
184146
customEngineRequestFormat = customEngineRequestBodyFormatParam.value
185147
delaySeconds = delaySecondsParam.value
@@ -188,25 +150,17 @@ class LLMSettingComponent(private val settings: AutoDevSettingsState) {
188150

189151
fun isModified(settings: AutoDevSettingsState): Boolean {
190152
return settings.maxTokenLength != maxTokenLengthParam.value ||
191-
settings.openAiKey != openAIKeyParam.value ||
192153
settings.customModel != customModelParam.value ||
193-
settings.aiEngine != aiEngineParam.value ||
194154
settings.language != languageParam.value ||
195155
settings.customEngineServer != customEngineServerParam.value ||
196-
settings.customEngineResponseType != customEngineResponseTypeParam.value ||
197156
settings.customEngineToken != customEngineTokenParam.value ||
198157
settings.customPrompts != customEnginePrompt.text ||
199-
settings.openAiModel != openAIModelsParam.value ||
200158
settings.customOpenAiHost != customOpenAIHostParam.value ||
201159
settings.customEngineResponseFormat != customEngineResponseFormatParam.value ||
202160
settings.customEngineRequestFormat != customEngineRequestBodyFormatParam.value ||
203161
settings.delaySeconds != delaySecondsParam.value
204162
}
205163

206-
private fun engineChanged(settings: AutoDevSettingsState): Boolean {
207-
return settings.aiEngine != aiEngineParam.value
208-
}
209-
210164
init {
211165
applySettings(settings)
212166
LanguageChangedCallback.language = AutoDevSettingsState.getInstance().language

core/src/main/resources/messages/AutoDevBundle_en.properties

Lines changed: 3 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -52,19 +52,14 @@ label.submit.issue=<a href="https://github.com/unit-mesh/auto-dev/issues">Want n
5252

5353
# don't remove the following line and don't rename them unless change [LLMSettingCompoent] class
5454
settings.languageParam=Language
55-
settings.customOpenAIHostParam=Custom OpenAI Host
56-
settings.openAIKeyParam=OpenAI API key
57-
settings.customEngineTokenParam=Custom Engine Token
58-
settings.openAIModelsParam=OpenAI Models
5955
settings.gitTypeParam=Git Type
6056
settings.gitLabUrlParam=URL of Gitlab Server
6157
settings.gitLabTokenParam=Gitlab token
6258
settings.gitHubTokenParam=GitHub token
6359
settings.maxTokenLengthParam=Max token length
64-
settings.customEngineServerParam=Custom Engine Server
65-
settings.customAIKeyParam=Custom AI Engine Key
66-
settings.aiEngineParam=AI Engine
67-
settings.customModelParam= Custom Model
60+
settings.customEngineServerParam=LLM Server Address
61+
settings.customModelParam=Model Name
62+
settings.customEngineTokenParam=LLM Key
6863

6964
settings.delaySecondsParam=Quest Delay Seconds
7065
settings.customEngineResponseFormatParam=Custom Response Format (Json Path)

core/src/main/resources/messages/AutoDevBundle_zh.properties

Lines changed: 3 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -52,19 +52,14 @@ label.submit.issue=<a href="https://github.com/unit-mesh/auto-dev/issues">想要
5252

5353
# 请勿删除以下行,也不要重命名它们,除非更改 [LLMSettingCompoent] 类
5454
settings.languageParam=语言
55-
settings.customOpenAIHostParam=自定义 OpenAI Host
56-
settings.openAIKeyParam=OpenAI API 密钥
57-
settings.customEngineTokenParam=自定义引擎令牌
58-
settings.openAIModelsParam=OpenAI 模型
5955
settings.gitTypeParam=Git 类型
6056
settings.gitLabUrlParam=Gitlab 服务器 URL
6157
settings.gitLabTokenParam=Gitlab 令牌
6258
settings.gitHubTokenParam=GitHub 令牌
6359
settings.maxTokenLengthParam=最大 token 长度
64-
settings.customEngineServerParam=自定义 LLM 服务器
65-
settings.customAIKeyParam=自定义 LLM 服务器密钥
66-
settings.aiEngineParam=LLM 服务器
67-
settings.customModelParam=自定义模型
60+
settings.customEngineServerParam=LLM 服务器 URL
61+
settings.customModelParam=模型名称
62+
settings.customEngineTokenParam=LLM 服务器密钥
6863

6964
settings.delaySecondsParam=请求延迟秒数
7065
settings.customEngineResponseFormatParam=自定义响应格式(Json 路径)

0 commit comments

Comments
 (0)