rename llms to providers & fix config change detection & use official ollama4j

master
io42630 1 week ago
parent 7e7bd03970
commit 656001f2b2

@ -1,24 +1,25 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="Run Plugin" type="GradleRunConfiguration" factoryName="Gradle">
<log_file alias="idea.log" path="$PROJECT_DIR$/build/idea-sandbox/system/log/idea.log"/>
<configuration default="false" name="dev" type="GradleRunConfiguration" factoryName="Gradle">
<log_file alias="idea.log" path="$PROJECT_DIR$/build/idea-sandbox/system/log/idea.log" />
<ExternalSystemSettings>
<option name="executionName"/>
<option name="externalProjectPath" value="$PROJECT_DIR$"/>
<option name="externalSystemIdString" value="GRADLE"/>
<option name="scriptParameters" value=""/>
<option name="executionName" />
<option name="externalProjectPath" value="$PROJECT_DIR$" />
<option name="externalSystemIdString" value="GRADLE" />
<option name="scriptParameters" value="--debug" />
<option name="taskDescriptions">
<list/>
<list />
</option>
<option name="taskNames">
<list>
<option value="runIde"/>
<option value="runIde" />
</list>
</option>
<option name="vmOptions" value=""/>
<option name="vmOptions" value="" />
</ExternalSystemSettings>
<ExternalSystemDebugServerProcess>true</ExternalSystemDebugServerProcess>
<ExternalSystemReattachDebugProcess>true</ExternalSystemReattachDebugProcess>
<DebugAllEnabled>false</DebugAllEnabled>
<method v="2"/>
<RunAsTest>false</RunAsTest>
<method v="2" />
</configuration>
</component>

@ -13,7 +13,7 @@ repositories {
dependencies {
compileOnly("org.projectlombok:lombok:1.18.38")
implementation("io.github.amithkoujalgi:ollama4j:1.0.70")
implementation("io.github.ollama4j:ollama4j:1.0.100")
implementation("org.slf4j:slf4j-jdk14:2.1.0-alpha1")
implementation("com.google.guava:guava:33.4.8-jre")
implementation("dev.langchain4j:langchain4j:0.36.2")

@ -1,23 +1,36 @@
package com.plexworlds.l3.config
import com.plexworlds.l3.llm.Ollama
import com.plexworlds.l3.llm.provider.Ollama
import com.intellij.openapi.options.SearchableConfigurable
import com.plexworlds.l3.llm.LLMs
import com.plexworlds.l3.llm.provider.Providers
import javax.swing.JComponent
import org.slf4j.Logger
import org.slf4j.LoggerFactory
class L3Config : SearchableConfigurable {
private var panel: L3SettingsPanel? = null
private val logger: Logger = LoggerFactory.getLogger(L3Config::class.java)
override fun createComponent(): JComponent {
return L3SettingsPanel().also { panel = it }.mainPanel
val panel = L3SettingsPanel().also { this.panel = it }
return panel.mainPanel
}
override fun isModified(): Boolean {
val panel = this.panel ?: return false
if (panel == null) {
logger.error("L3SettingsPanel is null")
return false
}
val l3PersistentState = L3PersistentState.getInstance()
return panel.modelField.text != l3PersistentState.model
// || panel.urlField.text != l3State.url
val isProviderModified = panel?.providerComboBox?.selectedItem != l3PersistentState.provider
val isModelModified = panel?.modelField?.text != l3PersistentState.model;
val isUrlModified = panel?.urlField?.text != l3PersistentState.url
val isModified = isProviderModified || isModelModified || isUrlModified;
logger.debug("isModified: $isModified")
return isModified
}
override fun reset() {
@ -31,7 +44,7 @@ class L3Config : SearchableConfigurable {
val panel = this.panel ?: return
val state = L3PersistentState.getInstance()
val llm = LLMs.valueOf(panel.providerComboBox.selectedItem as String).llm
val llm = Providers.valueOf(panel.providerComboBox.selectedItem as String).provider
val model = panel.modelField.text
llm.changeModel(model)
@ -46,6 +59,6 @@ class L3Config : SearchableConfigurable {
this.panel = null
}
override fun getDisplayName() = "AI code completion idea"
override fun getDisplayName() = "Local Llama Link"
override fun getId() = "com.plexworlds.l3.config.L3Config"
}

@ -1,14 +1,17 @@
package com.plexworlds.l3.config
import com.plexworlds.l3.llm.Ollama
import com.plexworlds.l3.llm.provider.Ollama
import com.intellij.openapi.application.ApplicationManager
import com.intellij.openapi.components.PersistentStateComponent
import com.intellij.openapi.components.Storage
import com.intellij.openapi.components.State
import com.intellij.util.xmlb.XmlSerializerUtil
import com.plexworlds.l3.llm.Dummy
import com.plexworlds.l3.llm.LLM
import com.plexworlds.l3.llm.provider.Dummy
import com.plexworlds.l3.llm.provider.Provider
/**
* Presumably used to store the state of the L3 plugin between sessions.
*/
@State(
name = "com.plexworlds.l3.config.L3PersistentState",
storages = [Storage("local-llama-link-plugin.xml")]
@ -22,7 +25,7 @@ class L3PersistentState : PersistentStateComponent<L3PersistentState> {
var url: String = "http://localhost:11434"
@JvmField
var provider: LLM = Dummy
var provider: Provider = Dummy
override fun getState(): L3PersistentState = this

@ -1,7 +1,7 @@
package com.plexworlds.l3.config;
import com.intellij.ui.IdeBorderFactory
import com.plexworlds.l3.llm.LLMs
import com.plexworlds.l3.llm.provider.Providers
import javax.swing.JComboBox
import javax.swing.JPanel
import javax.swing.JTextField
@ -15,7 +15,7 @@ class L3SettingsPanel {
init {
mainPanel.border = IdeBorderFactory.createTitledBorder("Plugin Settings")
providerComboBox.addItem(LLMs.OLLAMA.name)
providerComboBox.addItem(LLMs.DUMMY.name)
providerComboBox.addItem(Providers.OLLAMA.name)
providerComboBox.addItem(Providers.DUMMY.name)
}
}

@ -1,7 +0,0 @@
package com.plexworlds.l3.llm
enum class LLMs(val llm: LLM ) {
OLLAMA(Ollama),
DUMMY(Dummy)
}

@ -1,7 +1,7 @@
package com.plexworlds.l3.llm
package com.plexworlds.l3.llm.provider
object Dummy : LLM {
object Dummy : Provider {
private var model = ""

@ -1,11 +1,11 @@
package com.plexworlds.l3.llm
package com.plexworlds.l3.llm.provider
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI
import io.github.amithkoujalgi.ollama4j.core.utils.Options
import io.github.amithkoujalgi.ollama4j.core.utils.OptionsBuilder
import io.github.ollama4j.OllamaAPI
import io.github.ollama4j.utils.Options
import io.github.ollama4j.utils.OptionsBuilder
import java.net.http.HttpTimeoutException
object Ollama : LLM {
object Ollama : Provider {
private var model = "codellama:7b-code"
@ -17,9 +17,10 @@ object Ollama : LLM {
for (i in 0..<RETRY_COUNT) {
val suggestion = try {
val lama = OllamaAPI(HOST)
OllamaAPI(HOST).apply {
setRequestTimeoutSeconds(4)
}.generate(model, "<PRE> $prefix <SUF>$suffix <MID>", options).response.let {
}.generate(model, "<PRE> $prefix <SUF>$suffix <MID>", null).response.let {
if (it.endsWith(END)) it.substring(0, it.length - END.length).trim(' ', '\t', '\n') else it
}
} catch (e: HttpTimeoutException) {

@ -1,10 +1,10 @@
package com.plexworlds.l3.llm
package com.plexworlds.l3.llm.provider
/**
* This is an interface for a Large Language Model (LLM).
* This is an interface for a Large Language Model (Provider).
* It provides a method to generate a completion suggestion based on a given prefix and suffix.
*/
interface LLM {
interface Provider {
/**
* This method generates a completion suggestion based on a given prefix and suffix.
*
@ -15,7 +15,7 @@ interface LLM {
fun call(prefix: String, suffix: String): String
/**
* This method changes the current model used by the LLM.
* This method changes the current model used by the Provider.
*
* @param model The name of the model to be used.
*/

@ -0,0 +1,7 @@
package com.plexworlds.l3.llm.provider
enum class Providers(val provider: Provider) {
OLLAMA(Ollama),
DUMMY(Dummy)
}
Loading…
Cancel
Save