diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml
new file mode 100644
index 0000000..9e4b853
--- /dev/null
+++ b/.github/workflows/publish.yaml
@@ -0,0 +1,61 @@
+name: Release
+
+# trigger on git push for release branch
+on:
+ workflow_dispatch: # allow manual trigger
+ push:
+ branches:
+ - release
+
+env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+jobs:
+ run:
+ name: Build and publish vsix package
+ strategy:
+ matrix:
+ java-version: [17]
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout current branch
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+ # cache sbt dependencies
+ - uses: coursier/cache-action@v6
+
+ # install java, sbt and node
+ - name: Setup Java and sbt
+ uses: actions/setup-java@v4
+ with:
+ distribution: temurin
+ java-version: ${{ matrix.java-version }}
+ cache: sbt
+ - uses: sbt/setup-sbt@v1
+
+ - name: setup node
+ uses: actions/setup-node@v4
+
+ # install dependencies and build package
+ - name: compile Scala and build js to out/ folder
+ run:
+ sbt buildDebug
+
+ - name: package extension inside out/ folder
+ run: |
+ cd out/
+ ls -la
+ npx vsce package
+
+ - name: Get upload url
+ id: get_upload_url
+ run: |
+ URL=$(curl --silent "https://api.github.com/repos/doofin/functorcoder/releases/latest" | jq -r '.upload_url')
+ echo ::set-output name=UPLOAD_URL::$URL
+
+ - name: Upload VSIX package to github release
+ uses: softprops/action-gh-release@v2
+ # if: startsWith(github.ref, 'refs/tags/')
+ with:
+ files: out/functorcoder-0.0.1.vsix
\ No newline at end of file
diff --git a/.github/workflows/scala.yml b/.github/workflows/scala.yml
index bde01d3..42c95c1 100644
--- a/.github/workflows/scala.yml
+++ b/.github/workflows/scala.yml
@@ -1,4 +1,4 @@
-name: Continuous Integration
+name: CI
on:
push:
@@ -8,7 +8,7 @@ env:
jobs:
run:
- name: Build and Run
+ name: Compile
strategy:
matrix:
java-version: [17]
@@ -26,6 +26,9 @@ jobs:
java-version: ${{ matrix.java-version }}
cache: sbt
+ # cache sbt dependencies
+ - uses: coursier/cache-action@v6
+
- uses: sbt/setup-sbt@v1
- name: compile
diff --git a/.gitignore b/.gitignore
index 1257c9f..36a6d9a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -38,3 +38,5 @@ out
/project/target/
/.bsp/
+*.worksheet.sc
+.sbtopts
\ No newline at end of file
diff --git a/README.md b/README.md
index cecdcb8..d121b9f 100644
--- a/README.md
+++ b/README.md
@@ -1,109 +1,112 @@
-# VSCode Extension in Scala.js
-Write vscode extensions in Scala.js! This is a collection of examples and templates to get you started, with convenient sbt tasks to build and run your extension.
+# functorcoder
+**functorcoder** is an open source AI coding assistant utilizing LLM (Large Language Model) with algebraic and modular design in Scala.js. It aims at providing a clean and extensible architecture for AI coding assistants, which is helpful for understanding basic mechanics if you want to build your own AI coding assistant.
-contains:
-- commands from the vscode command palette
-- inline completion like github copilot
-- language server protocol client
-- code actions (when pressing Alt+Enter at a code location)
+current features implemented:
+- auto completion as you type
+- add documentation quick fix action
-### Setup
-Requirements:
- - [Sbt](https://www.scala-sbt.org/download.html)
+next important features to be implemented:
+- generate multiple files and folders
+- disable/enable auto completion
+features aiming to implement in long term:
+- code generation: completion, documentation
+- code modification: refactoring, optimization, bug fixing
+- code analysis: code understanding, code review, code quality
-Run the vscode extension:
-* Clone this project
-* Open the project in VSCode, run the `import build` task with Metals (it should display a popup automatically).
+## 中文简介
+作为一个copilot的用户,最近受到国产开源模型deepseek的鼓励,希望能在开源社区中贡献一些自己的力量。目前已经有一些ai插件,比如copilot, tabnine,cursor等,还有一些开源的插件,比如continue。我看了下continue的代码,发现它的设计很复杂,代码并不简洁。目前,copilot的体验还可以,但是非常封闭,无法自定义很多地方,比如代码补全的长度,模型的选择等。开源的插件则有很多稳定性问题,bug不少。
-* run below command, which will open a new VSCode window with the extension loaded(first time it will take some time for scalable typed to convert typescript to scala.js):
-```bash
-sbt open
-```
+所以,作为一个scala的爱好者,也希望加深对llm应用的理解,我决定自己用scala.js来实现一个简单的ai助手。
-After the new VSCode (extension development host) window opens:
-* Run the Hello World command from the Command Palette (`⇧⌘P`) in the new VSCode window.
-* Type `hello` and select `Hello World`.
- * You should see a Notification _Hello World!_.
+## Getting Started
+Visit [vscode-scalajs-hello](https://github.com/doofin/vscode-scalajs-hello) to understand how to play with scala.js for VSCode extension development. Basically, sbt is used to build the project and run the extension. There you will learn:
+- setting up the development environment
+- building the project and running the extension
+- packaging the extension
-### Use it as a template
-click on the `Use this template` button to create a new repository with the same structure in github.
+Before loading the extension, you need to add options to vscode user settings, and provide your OpenAI compatible API key and URL. Here is an example:
-### Use it as sbt dependency
-In your `build.sbt` add the following:
-```scala
-lazy val vsc = RootProject(uri("https://github.com/doofin/vscode-scalajs-hello.git"))
-lazy val root = Project("root", file(".")) dependsOn(vsc)
+```json
+"functorcoder": {
+ "apiKey": "somekey",
+ "apiUrl": "https://api.openai.com/v1/chat/completions",
+ "maxTokens": 512,
+ "model": "gpt-4o-mini",
+ }
```
-### Use it as a library
-**Currently not working** due to jitpack missing npm! Welcome to contribute to fix it.
+## Project Structure
+The project is divided into two main parts: the core module and the VSCode extension module under /src/main/scala/functorcoder and /src/main/scala/vscextension respectively.
-You can use this project as a library in your project by adding the following to your `build.sbt`:
-```scala
-resolvers += Resolver.bintrayRepo("jitpack", "https://jitpack.io")
-libraryDependencies += "com.github.doofin" % "vscode-scalajs-hello" % "master-SNAPSHOT" // might be wrong
-```
+**To get started**, read the file `extensionMain.scala` in the VSCode extension module. It is the main entry point for the extension.
-You can find the latest version on
-[jitpack.](https://jitpack.io/#doofin/vscode-scalajs-hello)
+The first part is the core module, we aim keeping it concise. It contains the main logic of the ai coding assistant:
+- Large Language Model (LLM) integration
+- sending propmt to LLM and getting the response
-Note:
- - I recommend using the Metals extension for Scala in VSCode.
- - If you have any issues, please open an issue on this repository.
+The second part is the VSCode extension module, which integrates the core module with the VSCode editor. It contains:
+- commands: commands to be executed in the editor
+- code actions: quick fix actions
+- code completion: auto completion
+- editor ui: status bar, notifications, etc.
+
+It's adopted from the [vscode-scalajs-hello](https://github.com/doofin/vscode-scalajs-hello) project. Refer to it for getting started with the VSCode extension development in Scala.js.
-## Project structure
-The project file structure in src/main/scala is as follows:
+
+project file structure for the core module:
```bash
-src/main/scala
-├── extensionMain.scala // main entry point for the extension
-├── commands.scala, codeActions.scala,etc // files for different extension features
-│ ├── facade // facade for vscode api
-│ ├── io // file and network io functions
+/functorcoder
+├── /src/main/scala/functorcoder
+│ ├── /llm # Integration with LLM (e.g., OpenAI API)
+│ ├── /actions
+│ │ ├── CodeGen.scala # Code completion, generation, and documentation
+│ │ ├── Commands.scala # Commands from functorcoder
+│ │ └── Debug.scala # Debugging module
+│ ├── /types # Types for code, context, and user actions
+│ ├── /editorUI # Integration with the editor (e.g., VSCode)
+│ └── /tests # Unit tests for core modules
+└── /docs # Documentation
```
+The project file structure for the VSCode extension module:
+```bash
+/vscextension
+├── /src/main/scala/vscextension
+│ ├── extensionMain.scala # Main entry point for the extension
+│ ├── commands.scala # Command definitions
+│ ├── codeActions.scala # Code action definitions
+...
+```
-The project uses the following tools:
-* [SBT] build tool for building the project
-* [Scala.js] for general coding
-* [Scalably Typed] for JavaScript facades
-* [scalajs-bundler] for bundling the JavaScript dependencies
-
-SBT is configured with the `build.sbt` file. Scala.js, ScalablyTyped and the bundler are SBT plugins. With these, SBT manages your JavaScript `npm` dependencies. You should never have to run `npm` directly, simply edit the `npmDependencies` settings in `build.sbt`.
-
-[accessible-scala]: https://marketplace.visualstudio.com/items?itemName=scala-center.accessible-scala
-[helloworld-minimal-sample]: https://github.com/Microsoft/vscode-extension-samples/tree/master/helloworld-minimal-sample
-[Scalably Typed]: https://github.com/ScalablyTyped/Converter
-[SBT]: https://www.scala-sbt.org
-[ScalaJS]: http://www.scala-js.org
-[scalajs-bundler]: https://github.com/scalacenter/scalajs-bundler
-## How to code in Scala js?
+## design principles
+I am to design the system with mathematics, algebra and functional programming principles in mind. The system is designed to be modular and extensible, allowing for easy addition of new features and components.
-In general, javascript functions and classes can be used in the same way as in JS/TS!
-If the typechecker disagrees, you can insert casts with `.asInstanceOf[Type]`.
+ Input = {Query, CodeSnippet, Spec}: The set of all possible input types (queries, code snippets, or requirements/specifications).
-The JS types (like `js.Array`) are available from
-```scala
-import scala.scalajs.js
-```
+ Output = {Code, Explanation, Transformation, DebugSuggestion}: The set of all possible outputs.
-The VSCode classes and functions are available from
-```scala
-import typings.vscode.mod as vscode
+The types and objects for Input:
+- code snippet or code file: a piece of code
+- code context: a code snippet with its surrounding code
+- query: natural language query
+- specification: natural language specification
-vscode.window.showInformationMessage("Hello World!")
-```
+The Output:
+- code snippet or code file: a piece of code, including completion, refactoring, optimization, bug fixing
+- explanation: a natural language explanation
+- transformation: the transformation of the input code
+- suggestion: a suggestion for debugging or improvement or refactoring
-Some additional types are available in the `anon` subpackage, for example:
-```scala
-import typings.vscode.anon.Dispose
-// register a command. The cast is necessary due to typescript conversion limitations.
-vscode.commands.registerCommand(name, fun).asInstanceOf[Dispose]
-```
+# feedback
+features to be implemented:
+- refactoring
+- specify which LLM to use
+- RAG(retrieval-augmented generation) to understand the whole code base
+- MCP(model context protocol) to interact with the environment, like external tools, etc.
-You can find more information and tutorials on the [Scala.js website](https://www.scala-js.org/).
# references:
- updated from [vscode-scalajs-hello](https://github.com/pme123/vscode-scalajs-hello) with Scala 3.3.3 and sbt.version=1.9.7.
diff --git a/build.sbt b/build.sbt
index 1d30292..d6c3607 100644
--- a/build.sbt
+++ b/build.sbt
@@ -4,6 +4,7 @@ import org.scalajs.linker.interface.{ModuleKind, ModuleInitializer, ModuleSplitS
val outdir = "out" // output directory for the extension
// open command in sbt
lazy val open = taskKey[Unit]("open vscode")
+lazy val buildDebug = taskKey[Unit]("build debug")
lazy val root = project
.in(file("."))
@@ -12,31 +13,46 @@ lazy val root = project
ScalaJSBundlerPlugin,
ScalablyTypedConverterPlugin
)
- .configs(IntegrationTest)
- .settings(Defaults.itSettings: _*)
- .settings(inConfig(IntegrationTest)(ScalaJSPlugin.testConfigSettings): _*)
+ // .configs(IntegrationTest)
+ // .settings(Defaults.itSettings: _*)
+ // .settings(inConfig(IntegrationTest)(ScalaJSPlugin.testConfigSettings): _*)
.settings(
moduleName := "vscextension",
organization := "com.doofin",
scalaVersion := "3.3.4",
// warn unused imports and vars
scalacOptions ++= Seq(
- "-Wunused:all"
+ "-Wunused:all",
+ "-no-indent"
),
// check if it is running in test
// testOptions += Tests.Setup(_ => sys.props("testing") = "true"),
Compile / fastOptJS / artifactPath := baseDirectory.value / "out" / "extension.js",
Compile / fullOptJS / artifactPath := baseDirectory.value / "out" / "extension.js",
+ resolvers ++= Seq(
+ Resolver.jcenterRepo,
+ "jitpack" at "https://jitpack.io",
+ "Sonatype OSS Snapshots" at "https://oss.sonatype.org/content/repositories/snapshots"
+ ),
libraryDependencies ++= Seq(
// "com.lihaoyi" %%% "utest" % "0.8.2" % "test",
+ "org.latestbit" %%% "circe-tagged-adt-codec" % "0.11.0",
+ "com.github.doofin.stdScala" %%% "stdscala" % "387b33df3a",
+
+ // test dependencies
"org.scalameta" %%% "munit" % "0.7.29" % Test
),
Compile / npmDependencies ++=
Seq(
- "@types/vscode" -> "1.96.0", //
+ // vscode dependencies
+ "@types/vscode" -> "1.96.0",
+ // "@vscode/dts" -> "0.4.1", // it's just a utility to download sources
+ "vscode-languageclient" -> "9.0.1", // working with manuallly created facade
+
+ // other dependencies
"@types/node" -> "16.11.7", // ts 3.7
- "@types/node-fetch" -> "2.5.12", // ts 3.7,compile error for scalablytyped
- "vscode-languageclient" -> "9.0.1" // working with manuallly created facade
+ "@types/node-fetch" -> "2.5.12" // ts 3.7,compile error for scalablytyped
+
),
/* ++ // check if it is running in test
(if (sys.props.get("testing") != Some("true"))
@@ -46,48 +62,47 @@ lazy val root = project
else Seq.empty), */
stIgnore ++= List( // don't generate types with scalablytyped
),
- open := openVSCodeTask().dependsOn(Compile / fastOptJS).value
- // open := openVSCodeTask.dependsOn(Compile / fastOptJS / webpack).value,
- // testFrameworks += new TestFramework("utest.runner.Framework")
- // publishMarketplace := publishMarketplaceTask.dependsOn(fullOptJS in Compile).value
+ open := openVSCodeTask().dependsOn(Compile / fastOptJS).value,
+ buildDebug := openVSCodeTask(openVscode = false).dependsOn(Compile / fastOptJS).value
)
+
addCommandAlias("compile", ";fastOptJS")
-addCommandAlias("dev", "~fastOptJS")
+addCommandAlias("dev", "~buildDebug")
addCommandAlias("fix", ";scalafixEnable;scalafixAll;")
-
+// open, buildDebug are other commands added
+/** prepare the extension and open vscode in extensionDevelopmentPath
+ *
+ * @param openVscode
+ * whether to open vscode or not. If false, it will just prepare the extension
+ * @return
+ */
def openVSCodeTask(openVscode: Boolean = true): Def.Initialize[Task[Unit]] =
Def
.task[Unit] {
- val base = (ThisProject / baseDirectory).value
- val log = (ThisProject / streams).value.log
+ val baseDir = (ThisProject / baseDirectory).value
+ val baseDirPath = baseDir.getCanonicalPath
+ val logger = (ThisProject / streams).value.log
- val path = base.getCanonicalPath
+ printlnOrange("[compiling] extension")
+ val _ = (Compile / fastOptJS).value
// install deps to out dir
- // print info with orange color
- println("\u001b[33m" + "[copying] package.json to out dir" + "\u001b[0m")
- s"cp package.json ${outdir}/package.json" ! log
- if (!(base / outdir / "node_modules").exists) {
- println("\u001b[33m" + "[installing] dependencies into out dir with npm" + "\u001b[0m")
- s"npm install --prefix ${outdir}" ! log
+ printlnOrange("[copying] package.json to out dir")
+ s"cp package.json ${outdir}/package.json" ! logger
+ if (!(baseDir / outdir / "node_modules").exists) {
+ printlnOrange("[installing] dependencies into out dir with npm")
+ s"npm install --prefix ${outdir}" ! logger
} else {
- println("\u001b[33m" + "[skipping] dependencies installation" + "\u001b[0m")
+ printlnOrange("[skipping] dependencies installation")
}
// launch vscode
if (openVscode) {
- println("\u001b[33m" + "[opening] vscode" + "\u001b[0m")
- s"code --extensionDevelopmentPath=$path" ! log
+ val extensionPath = s"${baseDirPath}/${outdir}"
+ printlnOrange(s"[opening] vscode" + s"with extensionDevelopmentPath=${extensionPath}")
+ s"code --extensionDevelopmentPath=$extensionPath" ! logger
}
()
}
-/* lazy val installDependencies = Def.task[Unit] {
- val base = (ThisProject / baseDirectory).value
- val log = (ThisProject / streams).value.log
- if (!(base / "node_module").exists) {
- val pb =
- new java.lang.ProcessBuilder("npm", "install")
- .directory(base)
- .redirectErrorStream(true)
- pb ! log
- }
-} */
+def printlnOrange(msg: Any): Unit = {
+ println("\u001b[33m" + msg + "\u001b[0m")
+}
diff --git a/package.json b/package.json
index 8d11506..88f0446 100644
--- a/package.json
+++ b/package.json
@@ -1,16 +1,20 @@
{
- "name": "vscode-scalajs-hello",
- "displayName": "vscode-scalajs-hello",
- "description": "",
+ "name": "functorcoder",
+ "displayName": "functorcoder",
+ "description": "an ai coding assistant",
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/doofin/functorcoder/"
+ },
"version": "0.0.1",
- "publisher": "test",
+ "publisher": "functorcoder.com",
"categories": [
"Other"
],
"activationEvents": [
"*"
],
- "main": "./out/extension",
+ "main": "./extension",
"engines": {
"vscode": "^1.84.0"
},
@@ -22,22 +26,40 @@
"@types/vscode": "^1.73.0"
},
"contributes": {
+ "configuration": {
+ "type": "object",
+ "title": "functorcoder",
+ "properties": {
+ "openaiApiKey": {
+ "type": "string",
+ "default": ""
+ },
+ "openaiUrl": {
+ "type": "string",
+ "default": ""
+ }
+ }
+ },
"commands": [
{
- "command": "extension.helloWorld",
- "title": "Hello World"
+ "command": "functorcoder.menu",
+ "title": "functorcoder main menu"
+ },
+ {
+ "command": "functorcoder.createFiles",
+ "title": "create files and folders"
}
],
"menus": {
"file/newFile": [
{
- "command": "extension.helloWorld",
+ "command": "functorcoder.menu",
"group": "navigation"
}
],
"editor/context": [
{
- "command": "extension.helloWorld",
+ "command": "functorcoder.menu",
"group": "1_modification"
}
]
diff --git a/src/main/scala/functorcoder/actions/CodeGen.scala b/src/main/scala/functorcoder/actions/CodeGen.scala
new file mode 100644
index 0000000..848b576
--- /dev/null
+++ b/src/main/scala/functorcoder/actions/CodeGen.scala
@@ -0,0 +1,52 @@
+package functorcoder.actions
+
+import functorcoder.llm.llmMain.llmAgent
+import functorcoder.llm.llmPrompt
+import scala.concurrent.Future
+import vscextension.editorAPI
+
+object CodeGen {
+
+ /** Generates a code completion suggestion by sending a prompt to a language model.
+ *
+ * @param codeBefore
+ * The code snippet preceding the hole where completion is required.
+ * @param codeAfter
+ * The code snippet following the hole where completion is required.
+ * @param llm
+ * The language model agent used to generate the completion.
+ * @return
+ * A `Future` containing the generated code completion as a `String`.
+ */
+ def getCompletion(
+ codeBefore: String, // code before the hole
+ codeAfter: String, // code after the hole
+ llm: llmAgent
+ ): Future[String] = {
+
+ val prompt = llmPrompt
+ .Completion(codeWithHole = s"$codeBefore${llmPrompt.promptText.hole}$codeAfter")
+
+ // assistantMessage: String = promptText.prompt1
+ llm.sendPrompt(prompt)
+ }
+
+ def getDocumentation(
+ selectedCode: String,
+ llm: llmAgent
+ ) = {
+ val language = editorAPI.getLanguage()
+ val llmResponse =
+ llm.sendPrompt(
+ llmPrompt.Modification(
+ code = selectedCode, //
+ taskRequirement = llmPrompt.generateDocs(language)
+ )
+ )
+
+ val commandName = functorcoder.actions.Commands.cmdAddDocs._1
+
+ (llmResponse, commandName)
+ }
+
+}
diff --git a/src/main/scala/functorcoder/actions/Commands.scala b/src/main/scala/functorcoder/actions/Commands.scala
new file mode 100644
index 0000000..66ba8f1
--- /dev/null
+++ b/src/main/scala/functorcoder/actions/Commands.scala
@@ -0,0 +1,135 @@
+package functorcoder.actions
+
+import scala.concurrent.ExecutionContext.Implicits.global
+import scala.collection.immutable
+import scala.scalajs.js
+import scala.concurrent.Future
+
+import typings.vscode.mod as vscode
+
+import vscextension.quickPick
+import vscextension.facade.vscodeUtils.showMessageAndLog
+
+import functorcoder.types.editorTypes.codeActionParam
+import functorcoder.llm.llmMain.llmAgent
+import functorcoder.algo.treeParse
+import vscextension.statusBar
+import vscextension.editorAPI
+
+/** Commands are actions that a user can invoke in the vscode extension with command palette (ctrl+shift+p).
+ */
+object Commands {
+ type CommandT = Any => Any
+ // all the commands here
+ val cmdShowMenu =
+ ("functorcoder.menu", quickPick.showMainMenu)
+ val cmdAddDocs =
+ ("functorcoder.addDocumentation", addDocumentation)
+
+ val cmdCreateFiles =
+ ("functorcoder.createFiles", createFilesCmd)
+
+ // list of all commands to be registered
+ def commandList(llm: llmAgent): Seq[(String, CommandT)] =
+ Seq(
+ (cmdShowMenu._1, cmdShowMenu._2(llm)),
+ cmdAddDocs,
+ (cmdCreateFiles._1, cmdCreateFiles._2(llm))
+ )
+
+ // individual command handlers
+ def addDocumentation(arg: Any) = {
+ val param =
+ arg.asInstanceOf[codeActionParam[Future[String]]]
+ val llmResponse = param.param
+
+ statusBar.showSpininngStatusBarItem("functorcoder", llmResponse)
+
+ llmResponse.foreach { response =>
+ // apply the changes to the document
+ vscode.window.activeTextEditor.toOption match {
+ case None =>
+ showMessageAndLog("no active editor!")
+ case Some(ed) =>
+ ed.insertSnippet(
+ new vscode.SnippetString(response + "\n"), //
+ param.range.start // insert at the start of the selection
+ )
+ }
+
+ }
+
+ }
+
+ def createFilesCmd(llm: llmAgent)(arg: Any) = {
+ val currDir = editorAPI.getCurrentDirectory()
+
+ currDir match {
+ case None =>
+ showMessageAndLog("no current directory, please open a file")
+ case Some(value) =>
+ // split the path
+ val pathParts = value.split("/")
+ // generate parent path for and 1 to 5 levels up
+ val parentPaths =
+ (1 to 5).map { i =>
+ pathParts.take(pathParts.length - i).mkString("/")
+ }
+
+ quickPick.createQuickPick(
+ title = "create files/folders",
+ placeHolder = "select a parent folder",
+ items = parentPaths.map { path =>
+ (
+ path,
+ "",
+ { () =>
+ // create the files and folders according to the tree
+ showMessageAndLog("creating files in: " + path)
+ quickPick.createInputBox(
+ title = "Create files/folders under " + path,
+ placeHolder = "describe your project",
+ onInput = { input =>
+ val respFuture = llm.sendPrompt(functorcoder.llm.llmPrompt.CreateFiles(input))
+ respFuture.foreach { response =>
+ // parse the response to a tree of files and folders
+ val treeOpt = treeParse.parse(response)
+ val filesList = treeOpt.map(createFiles.tree2list).getOrElse(Seq()).mkString(", ")
+
+ quickPick.createQuickPick(
+ title = "Files and Folders",
+ placeHolder = "select to apply creating files and folders",
+ items = Seq(
+ (
+ s"create $filesList",
+ "",
+ { () =>
+ treeOpt match {
+ case scala.util.Success(tree) =>
+ createFiles.createFilesAndFolders(
+ tree,
+ path
+ )
+ case scala.util.Failure(exception) =>
+ showMessageAndLog(
+ s"Failed to parse tree: ${treeOpt.toString}, exception: ${exception.getMessage}"
+ )
+ }
+ // create the files and folders according to the tree
+
+ // showMessageAndLog("files created!")
+ }
+ )
+ )
+ )
+ }
+ }
+ )
+
+ }
+ )
+ }
+ )
+ }
+ }
+}
diff --git a/src/main/scala/functorcoder/actions/Debug.scala b/src/main/scala/functorcoder/actions/Debug.scala
new file mode 100644
index 0000000..b247841
--- /dev/null
+++ b/src/main/scala/functorcoder/actions/Debug.scala
@@ -0,0 +1,2 @@
+package functorcoder.actions
+object Debug {}
diff --git a/src/main/scala/functorcoder/actions/createFiles.scala b/src/main/scala/functorcoder/actions/createFiles.scala
new file mode 100644
index 0000000..da4e317
--- /dev/null
+++ b/src/main/scala/functorcoder/actions/createFiles.scala
@@ -0,0 +1,76 @@
+package functorcoder.actions
+
+import com.doofin.stdScala.dataTypes.Tree.TreeNode
+import functorcoder.algo.treeParse
+import vscextension.facade.vscodeUtils.showMessageAndLog
+import pprint.PPrinter.BlackWhite
+
+/** create files and folders according to the prompt
+ */
+object createFiles {
+
+ /** parse the prompt response to list of files and folders
+ *
+ * The prompt response is like: [(dir1/file1,"content1"), (dir2/file2,"content2")]
+ *
+ * it should return list like: List((dir1/file1, "content1"), (dir2/file2, "content2"))
+ * @param promptResponse
+ * the response from the prompt
+ */
+ def parseFilesList(promptResponse: String, retry: Int = 3): Unit = {}
+
+ /** parse the prompt response to tree of files and folders
+ *
+ * The prompt response is like: (root [(folder1 [(file1 file2) folder2]) folder3])
+ *
+ * assumes the prompt response is one of tree representation
+ *
+ * @param promptResponse
+ * the response from the prompt
+ */
+ def parseFilesTree(promptResponse: String, retry: Int = 3): Unit = {
+ println("Creating files and folders")
+
+ treeParse.parse(promptResponse) match {
+ case scala.util.Success(tree) =>
+ createFilesAndFolders(tree, "")
+ case scala.util.Failure(exception) =>
+ showMessageAndLog(s"Trying again with $retry retries left")
+ if (retry > 0) {
+ println(s"Retrying with retry=$retry")
+ parseFilesTree(promptResponse, retry - 1)
+ }
+ }
+ }
+
+ /** create files and folders according to the tree
+ *
+ * @param tree
+ * the tree of files and folders
+ */
+ def createFilesAndFolders(tree: TreeNode[String], parentPath0: String): Unit = {
+ // recursively create files and folders
+ // mkdir -p src/main/scala/functorcoder/types
+ // touch src/main/scala/functorcoder/types/InputTypes.scala
+
+ val treeStr = BlackWhite.tokenize(tree).map(_.render).mkString("\n")
+ showMessageAndLog(s"Files and folders tree: $treeStr")
+ val TreeNode(root, children) = tree
+ val parentPath: String = parentPath0 + "/" + root
+ showMessageAndLog(s"Creating file in $parentPath, file: $root")
+
+ children.toSeq.foreach { child =>
+ createFilesAndFolders(child, parentPath)
+ }
+ }
+
+ def tree2list(tree: TreeNode[String]): Seq[String] = {
+ val TreeNode(root, children) = tree
+ val childList = children.flatMap(tree2list)
+ if (childList.isEmpty) {
+ Seq(root)
+ } else {
+ Seq(root) ++ childList
+ }
+ }
+}
diff --git a/src/main/scala/functorcoder/algo/treeParse.scala b/src/main/scala/functorcoder/algo/treeParse.scala
new file mode 100644
index 0000000..bc0ded7
--- /dev/null
+++ b/src/main/scala/functorcoder/algo/treeParse.scala
@@ -0,0 +1,97 @@
+package functorcoder.algo
+
+import scala.collection.mutable.ArrayBuffer
+import com.doofin.stdScala.dataTypes.Tree.TreeNode
+import scala.util.Try
+
+object treeParse {
+ val exampleInput = "(root [(folder1 [(file1 file2) folder2]) folder3])"
+ val exampleSyntax = "tree := (string [tree tree ...])"
+
+ def parse(input: String): Try[TreeNode[String]] = Try {
+ val tokens = tokenize(input)
+ val (node, remaining) = parseNode(tokens)
+ if (remaining.nonEmpty)
+ println(s"Unconsumed tokens: $remaining")
+ node
+ }
+
+ private def tokenize(input: String): List[String] = {
+ val tokenPattern = """(\(|\)|\[|\]|[^\s\(\)\[\]]+)""".r
+ tokenPattern.findAllIn(input).toList
+ }
+
+ // Parse a node. A node is expected to start with a "(",
+ // followed by a value token and then either a bracketed children list
+ // or inline children (if any), and finally a ")".
+ private def parseNode(tokens: List[String]): (TreeNode[String], List[String]) = tokens match {
+ case "(" :: rest =>
+ rest match {
+ case value :: afterValue =>
+ // If the next token is "[", we parse a bracketed children list.
+ if (afterValue.nonEmpty && afterValue.head == "[") {
+ val (children, afterBracket) = parseChildrenUntil(afterValue.tail, "]")
+ afterBracket match {
+ case ")" :: tail => (TreeNode(value, children), tail)
+ case _ => throw new RuntimeException("Expected ) after children list")
+ }
+ } else {
+ // Otherwise, if the next token is not ")", then we assume inline children.
+ if (afterValue.nonEmpty && afterValue.head == ")") {
+ // No children case.
+ (TreeNode(value), afterValue.tail)
+ } else {
+ val (children, afterInline) = parseChildrenUntilInline(afterValue)
+ (TreeNode(value, children), afterInline)
+ }
+ }
+ case Nil =>
+ throw new RuntimeException("Expected node value after (")
+ }
+ // When not starting with "(", treat the token as a leaf.
+ case token :: rest =>
+ (TreeNode(token), rest)
+ case Nil =>
+ throw new RuntimeException("Unexpected end of tokens")
+ }
+
+ // Helper: parse children until we reach the given terminator ("]" for bracketed lists).
+ // Returns the children (as TreeNode[String]) and the remaining tokens (after dropping the terminator).
+ private def parseChildrenUntil(
+ tokens: List[String],
+ terminator: String
+ ): (ArrayBuffer[TreeNode[String]], List[String]) = {
+ val children = ArrayBuffer[TreeNode[String]]()
+ var rem = tokens
+ while (rem.nonEmpty && rem.head != terminator) {
+ if (rem.head == "(") {
+ val (child, newRem) = parseNode(rem)
+ children += child
+ rem = newRem
+ } else {
+ // A plain token becomes a leaf node.
+ children += TreeNode(rem.head)
+ rem = rem.tail
+ }
+ }
+ if (rem.isEmpty) throw new RuntimeException(s"Expected terminator $terminator")
+ (children, rem.tail) // drop the terminator
+ }
+
+ private def parseChildrenUntilInline(tokens: List[String]): (ArrayBuffer[TreeNode[String]], List[String]) = {
+ val children = ArrayBuffer[TreeNode[String]]()
+ var rem = tokens
+ while (rem.nonEmpty && rem.head != ")") {
+ if (rem.head == "(") {
+ val (child, newRem) = parseNode(rem)
+ children += child
+ rem = newRem
+ } else {
+ children += TreeNode(rem.head)
+ rem = rem.tail
+ }
+ }
+ if (rem.isEmpty) throw new RuntimeException("Expected ) at end of inline children list")
+ (children, rem.tail) // drop the closing ")"
+ }
+}
diff --git a/src/main/scala/functorcoder/editorUI/diffEdit.scala b/src/main/scala/functorcoder/editorUI/diffEdit.scala
new file mode 100644
index 0000000..32f4a0a
--- /dev/null
+++ b/src/main/scala/functorcoder/editorUI/diffEdit.scala
@@ -0,0 +1,69 @@
+package functorcoder.editorUI
+
+import scala.concurrent.ExecutionContext.Implicits.global
+
+import functorcoder.llm.llmMain
+import functorcoder.llm.llmPrompt
+
+/** This abstract the editor behavior for inline editing
+ *
+ * Scenario: user wants to edit a selected snippet of code in the editor, the coding assistant will provide a modified
+ * version of the snippet with the changes highlighted, and the user can choose to accept or reject the changes.
+ */
+object diffEdit {
+
+ case class CursorPosition(
+ line: Int,
+ character: Int
+ )
+
+ case class DiffRequest(
+ oldText: String,
+ cursorPosition: CursorPosition,
+ task: String // the task to perform
+ )
+
+ /** The result of the diff operation
+ *
+ * @param oldText
+ * the old text
+ * @param newText
+ * the new text
+ * @param cursorPosition
+ * the cursor position
+ * @param difference
+ * the difference between the old and new text
+ */
+ case class DiffResult(
+ oldText: String,
+ newText: String,
+ cursorPosition: CursorPosition
+ // difference: Seq[String]
+ )
+
+ /** action to modify the code, like adding new code
+ *
+ * @param llmAgent
+ * the agent to perform the diff operation
+ * @param diffReq
+ * the diff request
+ * @return
+ * the result of the diff operation
+ */
+ def diff(llmAgent: llmMain.llmAgent, diffReq: DiffRequest) = {
+ val prompt = llmPrompt.Modification(
+ code = diffReq.oldText,
+ taskRequirement = ""
+ )
+
+ val llmResponse = llmAgent.sendPrompt(prompt)
+
+ llmResponse.map(txt =>
+ DiffResult(
+ oldText = diffReq.oldText,
+ newText = txt,
+ cursorPosition = diffReq.cursorPosition
+ )
+ )
+ }
+}
diff --git a/src/main/scala/functorcoder/editorUI/editorConfig.scala b/src/main/scala/functorcoder/editorUI/editorConfig.scala
new file mode 100644
index 0000000..443772e
--- /dev/null
+++ b/src/main/scala/functorcoder/editorUI/editorConfig.scala
@@ -0,0 +1,14 @@
+package functorcoder.editorUI
+
+import functorcoder.llm.openaiReq
+
+// https://code.visualstudio.com/api/references/contribution-points#contributes.configuration
+object editorConfig {
+ case class Config(
+ openaiApiKey: String, //
+ openaiUrl: String,
+ maxTokens: Int,
+ model: String = openaiReq.models.gpt4o
+ )
+
+}
diff --git a/src/main/scala/functorcoder/editorUI/menu.scala b/src/main/scala/functorcoder/editorUI/menu.scala
new file mode 100644
index 0000000..385ea1a
--- /dev/null
+++ b/src/main/scala/functorcoder/editorUI/menu.scala
@@ -0,0 +1,27 @@
+package functorcoder.editorUI
+
+import vscextension.facade.vscodeUtils.*
+import functorcoder.llm.llmMain.llmAgent
+import functorcoder.actions.Commands
+
+object menu {
+ case class Menu(
+ title: String, //
+ menuItems: Seq[(String, () => Unit)]
+ )
+
+ // the main menu
+ def getMainMenu(llm: llmAgent) = {
+ val mainMenuItems: Seq[(String, () => Unit)] = Seq(
+ "create files" -> { () =>
+ // invoke the create files command directly as function
+ val _: Unit = Commands.cmdCreateFiles._2(llm)(())
+ },
+ "disable autocomplete" -> { () => showMessageAndLog("disable autocomplete") }
+ )
+ Menu(
+ title = "functorcoder menu",
+ menuItems = mainMenuItems
+ )
+ }
+}
diff --git a/src/main/scala/functorcoder/llm/llmMain.scala b/src/main/scala/functorcoder/llm/llmMain.scala
new file mode 100644
index 0000000..09eb5fe
--- /dev/null
+++ b/src/main/scala/functorcoder/llm/llmMain.scala
@@ -0,0 +1,122 @@
+package functorcoder.llm
+
+import scala.scalajs.concurrent.JSExecutionContext.Implicits.queue
+import scala.scalajs.js
+
+import scala.scalajs.js.Thenable.Implicits.*
+import scala.concurrent.Future
+
+import typings.nodeFetch.mod as nodeFetch
+
+import vscextension.facade.vscodeUtils.*
+import openaiReq.*
+import functorcoder.editorUI.editorConfig
+
+/** large language model (LLM) main entry
+ */
+object llmMain {
+
+ /** prompt data to string
+ *
+ * change the model here if needed
+ *
+ * @param completionPrompt
+ * completion prompt object
+ * @return
+ */
+ def prompt2str(editorCfg: editorConfig.Config, inputPrompt: llmPrompt.Prompt) = {
+ // showMessageAndLog(s"prompt: ${inputPrompt}")
+
+ val openAiRequest = openaiReq
+ .OpenAiRequest(
+ List(
+ openaiReq.Message(roles.system, inputPrompt.getSysMessage),
+ openaiReq.Message(roles.user, inputPrompt.generatePrompt)
+ ),
+ editorCfg.model,
+ max_tokens = Some(editorCfg.maxTokens)
+ )
+
+ // showMessageAndLog(s"openai request: ${openAiRequest}")
+ openAiRequest.toJson
+ }
+
+ /** llm agent to send request to openai api
+ *
+ * @param editorCfg
+ * the editor configuration
+ */
+ case class llmAgent(editorCfg: editorConfig.Config) {
+
+ val url = editorCfg.openaiUrl
+ val apiKey = editorCfg.openaiApiKey
+
+ /** get code completion from openai asynchrnously
+ *
+ * @param holeToCode
+ * a function that takes a hole name and returns the code to fill the hole
+ *
+ * so we will call this function with the hole "{{FILL_HERE}}" you insert it in the code
+ */
+ def sendPrompt(input: llmPrompt.Prompt) = {
+
+ val requestStr = prompt2str(editorCfg, input)
+
+ val requestOptions = getRequestOptions(requestStr)
+
+ val responseFuture =
+ nodeFetch.default(url, requestOptions)
+
+ getResponseText(responseFuture)
+ }
+
+ private def getRequestOptions(requestStr: String) = {
+ new nodeFetch.RequestInit {
+ method = "POST"
+ headers = new nodeFetch.Headers {
+ append("Content-Type", "application/json")
+ append("Authorization", s"Bearer $apiKey")
+ }
+ body = requestStr
+ }
+ }
+
+ /** get the response text from ai api, only the content of the first choice
+ *
+ * it parses the response json and returns the first choice
+ *
+ * @param responseFuture
+ * the response future
+ * @return
+ * the response text
+ */
+ private def getResponseText(responseFuture: Future[nodeFetch.Response]) = {
+ for {
+ res <- responseFuture
+ body <- res
+ .json()
+ .toFuture
+ .asInstanceOf[Future[js.Object]]
+ .map(x => js.JSON.stringify(x))
+ } yield {
+ // the body of the response
+ // showMessageAndLog(s"openai response: $body")
+ val decodedResponse =
+ openAIResponse.decodeOpenAIResponse(body)
+ decodedResponse match {
+ case Left(err) =>
+ // return an empty string if failed
+ showMessageAndLog(s"error parsing openai response: $err")
+ ""
+ case Right(resp) =>
+ // return the first choice
+ resp.choices.headOption match {
+ case Some(choice) => choice.message.content
+ case None => ""
+ }
+ }
+ }
+ }
+ }
+
+}
diff --git a/src/main/scala/functorcoder/llm/llmPrompt.scala b/src/main/scala/functorcoder/llm/llmPrompt.scala
new file mode 100644
index 0000000..72f629e
--- /dev/null
+++ b/src/main/scala/functorcoder/llm/llmPrompt.scala
@@ -0,0 +1,149 @@
+package functorcoder.llm
+
+/** prompts for the llm
+ *
+ * for completion, code generation, etc.
+ */
+object llmPrompt {
+
+ /** tags, placeholders and templates used in the prompt
+ *
+ * for code completion
+ */
+ case class QueryTags(
+ hole: String, //
+ queryStart: String,
+ queryEnd: String,
+ task: String
+ )
+
+ val tagsInUse =
+ QueryTags(
+ hole = "{{HOLE}}", //
+ queryStart = "{{QUERY_START}}",
+ queryEnd = "{{QUERY_END}}",
+ task = "{{TASK}}"
+ )
+
+ // trait will have undefined value, so we use abstract class
+ sealed abstract class Prompt(val ctrlMsg: String) {
+ def generatePrompt: String
+ def getSysMessage: String = ctrlMsg
+ }
+
+ /** code completion prompt
+ *
+ * https://github.com/continuedev/continue/blob/main/core/autocomplete/templating/AutocompleteTemplate.ts
+ *
+ * @param codeWithHole
+ * code with a hole to fill like {{FILL_HERE}}
+ * @param taskRequirement
+ * like "Fill the {{FILL_HERE}} hole."
+ * @param assistantMessage
+ * like "always give scala code examples." or
+ *
+ * You are a HOLE FILLER. You are provided with a file containing holes, formatted as '{{HOLE_NAME}}'. Your TASK is
+ * to complete with a string to replace this hole with, inside a XML tag, including context-aware
+ * indentation, if needed
+ *
+ * You will complete code strings with a hole {{FILL_HERE}}, you only return the code for the hole.
+ */
+ case class Completion(
+ codeWithHole: String, // code with a hole to fill like {{FILL_HERE}}
+ // taskRequirement: String, // like "Fill the {{FILL_HERE}} hole."
+ ctrlMessage: String = promptText.promptComp3
+ ) extends Prompt(ctrlMessage) {
+ def generatePrompt = {
+
+ codeWithHole
+ }
+
+ }
+
+ /** modify code snippet
+ *
+ * @param code
+ * code snippet
+ * @param taskRequirement
+ * like "Fill the {{FILL_HERE}} hole."
+ * @param assistantMessage
+ * like "always give scala code examples."
+ */
+ case class Modification(
+ code: String,
+ taskRequirement: String,
+ assistantMessage: String =
+ "You are given a text or code snippet wrapped in tag and a TASK requirement. " +
+ "You are going to return the new snippet according to the TASK requirement. "
+ ) extends Prompt(assistantMessage) {
+ def generatePrompt = {
+ s"""
+ |${code}
+ |
+ |TASK: ${taskRequirement}
+ |""".stripMargin
+ }
+ }
+ case class CreateFiles(
+ userRequest: String,
+ assistantMessage: String =
+ s"an input is wrapped in ${tagsInUse.queryStart} and ${tagsInUse.queryEnd}, and the requirement is inside ${tagsInUse.task}. " +
+ "from input and requirement, You return the code snippet"
+ ) extends Prompt(assistantMessage) {
+ def generatePrompt = {
+ import functorcoder.algo.treeParse
+
+ val task =
+ s" return tree of files and folders in the format: ${treeParse.exampleSyntax}. An example input is: ${treeParse.exampleInput}. return the tree data structure in that format."
+
+ s"""${tagsInUse.queryStart}
+ |${userRequest}
+ |${tagsInUse.queryEnd}
+ |${tagsInUse.task} : ${task}
+ |""".stripMargin
+ }
+ }
+
+ /** prompts engineering
+ *
+ * more like art than science. just try different prompts and see what works best
+ */
+ object promptText {
+ val hole = "{{HOLE}}"
+ val promptComp1 =
+ "You are a code or text autocompletion assistant. " +
+ s"In the provided input, missing code or text are marked as $hole. " +
+ "Your task is to output only the snippet that replace the placeholder, " +
+ "ensuring that indentation and formatting remain consistent with the context. Don't quote your output"
+
+ val promptComp2 =
+ "You are a hole filler." +
+ "Given a string with a hole: " + s"$hole in the string, " +
+ "you replace this hole with your reply." +
+ "only return the string for the hole with indentation, without any quotes"
+
+ val promptComp3 = s"Fill in the missing text specified by $hole. Only return the string which replace the hole. " +
+ "Don't wrap it with backticks or any other tags"
+ }
+
+ def generateDocs(language: String) = {
+ s"generate short documentation for the input code in language: $language." +
+ "return only the documentation, " +
+ "the documentation conform to the format according to the language." +
+ "Don't wrap it with backticks or any other tags."
+ }
+}
+
+/* example:
+
+function sum_evens(lim) {
+ var sum = 0;
+ for (var i = 0; i < lim; ++i) {
+ {{FILL_HERE}}
+ }
+ return sum;
+}
+
+
+TASK: Fill the {{FILL_HERE}} hole.
+ */
diff --git a/src/main/scala/functorcoder/llm/openAIResponse.scala b/src/main/scala/functorcoder/llm/openAIResponse.scala
new file mode 100644
index 0000000..bdf7f58
--- /dev/null
+++ b/src/main/scala/functorcoder/llm/openAIResponse.scala
@@ -0,0 +1,84 @@
+package functorcoder.llm
+
+import io.circe._
+import io.circe.generic.semiauto._
+import io.circe.parser.*
+
+/** response from openAI API
+ *
+ * https://platform.openai.com/docs/api-reference/making-requests
+ */
+object openAIResponse {
+
+ def decodeOpenAIResponse(json: String): Either[Error, OpenAiResponse] = {
+ val res =
+ decode[OpenAiResponse](escapeJsonString(json))
+
+ res match {
+ case x @ Left(value) => x
+ case x @ Right(value) => x
+ }
+ }
+
+ def escapeJsonString(str: String): String = {
+ // replace the escape characters with ,etc.
+ str
+ .replace("\n", "")
+ .replace("\t", "")
+ .replace("\r", "")
+ }
+
+ def reverseEscapeJsonString(str: String): String = {
+ str
+ .replace("", "\n")
+ .replace("", "\t")
+ .replace("", "\r")
+ }
+
+ case class OpenAiResponse(
+ id: String,
+ `object`: String,
+ created: Long,
+ model: String,
+ usage: Usage,
+ choices: List[Choice]
+ )
+
+ case class Choice(
+ message: Message,
+ logprobs: Option[String],
+ finish_reason: String,
+ index: Int
+ )
+
+ case class Message(
+ role: String, // the role
+ content: String // the content from llm
+ )
+
+ case class CompletionTokensDetails(
+ reasoning_tokens: Int,
+ accepted_prediction_tokens: Int,
+ rejected_prediction_tokens: Int
+ )
+
+ case class Usage(
+ prompt_tokens: Int,
+ completion_tokens: Int,
+ total_tokens: Int,
+ completion_tokens_details: CompletionTokensDetails
+ )
+
+// encode and decode
+
+ object Choice {
+ implicit val encoder: Encoder[Choice] = deriveEncoder[Choice]
+ implicit val decoder: Decoder[Choice] = deriveDecoder[Choice]
+ }
+
+ object OpenAiResponse {
+ implicit val encoder: Encoder[OpenAiResponse] = deriveEncoder[OpenAiResponse]
+ implicit val decoder: Decoder[OpenAiResponse] = deriveDecoder[OpenAiResponse]
+ }
+
+}
diff --git a/src/main/scala/functorcoder/llm/openaiReq.scala b/src/main/scala/functorcoder/llm/openaiReq.scala
new file mode 100644
index 0000000..0139336
--- /dev/null
+++ b/src/main/scala/functorcoder/llm/openaiReq.scala
@@ -0,0 +1,151 @@
+package functorcoder.llm
+
+import io.circe.generic.auto._
+
+import io.circe.*
+import io.circe.syntax.*
+
+/* openAI API request
+https://platform.openai.com/docs/api-reference/chat
+'{
+ "model": "gpt-4o",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant."
+ },
+ {
+ "role": "user",
+ "content": "Hello!"
+ }
+ ]
+ }'
+ */
+object openaiReq {
+
+ /** https://platform.openai.com/docs/models/model-endpoint-compatibility
+ *
+ * All GPT-4o, GPT-4o-mini, GPT-4, and GPT-3.5 Turbo models and their dated releases. chatgpt-4o-latest dynamic
+ * model. Fine-tuned versions of gpt-4o, gpt-4o-mini, gpt-4, and gpt-3.5-turbo.
+ */
+ object models {
+ val gpt4o = "gpt-4o" // price is 2.5 per 1M tokens
+ /** gpt-4o-mini is a smaller version of the GPT-4o
+ */
+ val gpt4oMini = "gpt-4o-mini" // price is 0.15 per 1M tokens
+ val gpt4 = "gpt-4"
+ val gpt35Turbo = "gpt-3.5-turbo"
+ val o3mini = "o3-mini" // reasoning model,1.1 per 1M tokens
+ }
+
+ object roles {
+ val user = "user" // user input in request
+ val system = "system" // control messages in request
+ val assistant = "assistant" // the response from the model
+ }
+
+ /** Represents a message in a conversation.
+ *
+ * @param role
+ * The role of the speaker. Must be either "user" or "system".
+ * @param content
+ * The content of the message. This field is required.
+ */
+ case class Message(role: String, content: String)
+
+ /** Represents the request body for interacting with the API.
+ *
+ * @param messages
+ * A list of messages comprising the conversation so far. This field is required.
+ * @param model
+ * The ID of the model to use. This field is required. Refer to the model endpoint compatibility table for details
+ * on which models work with the Chat API.
+ *
+ * below are optional fields
+ *
+ * @param frequency_penalty
+ * Optional. A number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency
+ * in the text so far, decreasing the model's likelihood to repeat the same line verbatim. Defaults to None.
+ * @param logit_bias
+ * Optional. A map that modifies the likelihood of specified tokens appearing in the completion. Accepts a JSON
+ * object that maps tokens (specified by their token ID in the tokenizer) to an associated bias value from -100 to
+ * 100. The bias is added to the logits generated by the model prior to sampling. The exact effect varies per
+ * model. Defaults to None.
+ * @param logprobs
+ * Optional. Whether to return log probabilities of the output tokens. If true, returns the log probabilities of
+ * each output token returned in the content of the message. Defaults to None.
+ * @param top_logprobs
+ * Optional. An integer between 0 and 20 specifying the number of most likely tokens to return at each token
+ * position, each with an associated log probability. Must be set if `logprobs` is true. Defaults to None.
+ * @param max_tokens
+ * Optional. The maximum number of tokens that can be generated in the chat completion. The total length of input
+ * tokens and generated tokens is limited by the model's context length. Defaults to None.
+ * @param n
+ * Optional. How many chat completion choices to generate for each input message. Note that you will be charged
+ * based on the number of generated tokens across all choices. Defaults to None.
+ * @param presence_penalty
+ * Optional. A number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the
+ * text so far, increasing the model's likelihood to talk about new topics. Defaults to None.
+ * @param response_format
+ * Optional. An object specifying the format that the model must output. Compatible with GPT-4o, GPT-4o mini, GPT-4
+ * Turbo, and all GPT-3.5 Turbo models newer than gpt-3.5-turbo-1106. Defaults to None.
+ * @param seed
+ * Optional. If specified, the system will make a best effort to sample deterministically. Repeated requests with
+ * the same seed and parameters should return the same result. Determinism is not guaranteed. Defaults to None.
+ * @param service_tier
+ * Optional. Specifies the latency tier to use for processing the request. If set to 'auto', the system will
+ * utilize scale tier credits until they are exhausted. If set to 'default', the request will be processed using
+ * the default service tier with a lower uptime SLA and no latency guarantee. When not set, the default behavior is
+ * 'auto'. Defaults to None.
+ * @param stop
+ * Optional. Up to 4 sequences where the API will stop generating further tokens. Defaults to None.
+ * @param stream
+ * Optional. If set, partial message deltas will be sent as data-only server-sent events as they become available,
+ * with the stream terminated by a `data: [DONE]` message. Defaults to None.
+ * @param stream_options
+ * Optional. Options for streaming response. Only set this when you set `stream` to true. Defaults to None.
+ * @param temperature
+ * Optional. The sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more
+ * random, while lower values like 0.2 will make it more focused and deterministic. Defaults to None.
+ * @param top_p
+ * Optional. An alternative to sampling with temperature, called nucleus sampling, where the model considers the
+ * results of the tokens with top_p probability mass. Defaults to None.
+ * @param tools
+ * Optional. A list of tools the model may call. Currently, only functions are supported as a tool. A max of 128
+ * functions are supported. Defaults to None.
+ * @param tool_choice
+ * Optional. Controls which (if any) tool is called by the model. Defaults to `none` when no tools are present, and
+ * `auto` if tools are present. Defaults to None.
+ * @param parallel_tool_calls
+ * Optional. Whether to enable parallel function calling during tool use. Defaults to None.
+ * @param user
+ * Optional. A unique identifier representing your end-user, which can help monitor and detect abuse. Defaults to
+ * None.
+ */
+ case class OpenAiRequest(
+ messages: Seq[Message],
+ model: String,
+ frequency_penalty: Option[Double] = None,
+ logit_bias: Option[Map[String, Int]] = None,
+ logprobs: Option[Boolean] = None,
+ top_logprobs: Option[Int] = None,
+ max_tokens: Option[Int] = None
+ // n: Option[Int] = None,
+ // presence_penalty: Option[Double] = None,
+ // response_format: Option[String] = None,
+ // seed: Option[Int] = None,
+ // service_tier: Option[String] = None,
+ // stop: Option[Either[String, Seq[String]]] = None,
+ // stream: Option[Boolean] = None,
+ // stream_options: Option[String] = None,
+ // temperature: Option[Double] = None,
+ // top_p: Option[Double] = None,
+ // tools: Option[String] = None,
+ // tool_choice: Option[String] = None,
+ // parallel_tool_calls: Option[Boolean] = None,
+ // user: Option[String] = None
+ ) {
+ def toJson: String = this.asJson.noSpaces
+ }
+
+}
diff --git a/src/main/scala/functorcoder/types/editorTypes.scala b/src/main/scala/functorcoder/types/editorTypes.scala
new file mode 100644
index 0000000..6482839
--- /dev/null
+++ b/src/main/scala/functorcoder/types/editorTypes.scala
@@ -0,0 +1,39 @@
+package functorcoder.types
+import scala.scalajs.js
+
+object editorTypes {
+
+ /** the context of the editor
+ *
+ * @param language
+ * the programming language of the file
+ */
+ case class EditorContext(
+ language: String
+ )
+
+ class codeActionParam[T](
+ val documentUri: String, //
+ val range: typings.vscode.mod.Selection,
+ val param: T
+ ) extends js.Object
+
+ /* .Command(
+ command = functorcoder.actions.Commands.cmdAddDocs._1, //
+ title = "add documentation" //
+ )
+ .setArguments(
+ js.Array(
+ new codeActionParam(
+ document.uri.toString(),
+ range,
+ llmResponse
+ )
+ )
+ ) */
+ case class commandData[Param](
+ commandName: String,
+ title: String,
+ arguments: Param
+ )
+}
diff --git a/src/main/scala/vscextension/CodeActions.scala b/src/main/scala/vscextension/CodeActions.scala
index 2c75bac..d69108b 100644
--- a/src/main/scala/vscextension/CodeActions.scala
+++ b/src/main/scala/vscextension/CodeActions.scala
@@ -1,8 +1,14 @@
package vscextension
-import typings.vscode.mod as vscode
+
import scala.scalajs.js
+import scala.concurrent.Future
+
+import typings.vscode.mod as vscode
import facade.vscodeUtils.*
+import functorcoder.llm.llmMain.llmAgent
+import functorcoder.types.editorTypes.*
+import functorcoder.actions.CodeGen
/** Code actions are commands provided at the cursor in the editor, so users can
*
@@ -12,90 +18,97 @@ import facade.vscodeUtils.*
*/
object CodeActions {
- def registerCodeActions(context: vscode.ExtensionContext) = {
+ def registerCodeActions(context: vscode.ExtensionContext, llm: llmAgent) = {
// https://scalablytyped.org/docs/encoding#jsnative
// we need to manually create the js object and cast it
val mActionProvider =
new js.Object {
+ def createCodeAction(
+ document: vscode.TextDocument,
+ range: vscode.Selection,
+ context: vscode.CodeActionContext
+ ) = {
+ val selectedCode = document.getText(range)
+
+ val addDocsItem =
+ new vscode.CodeAction(
+ title = "add documentation for selected code",
+ kind = vscode.CodeActionKind.QuickFix
+ ) {
+ isPreferred = true // show it first
+ val language = editorAPI.getLanguage()
+ val (llmResponse, commandName) =
+ CodeGen.getDocumentation(
+ selectedCode,
+ llm
+ )
+
+ statusBar.showSpininngStatusBarItem(s"functorcoder($language)", llmResponse)
+
+ // there are no onSelect events for code actions
+ // so we need to create a command and set it here
+ // edit = new vscode.WorkspaceEdit() {
+ // showMessageAndLog("creating edit") // triggered immediately
+ // }
+ // invoke command
+
+ command = vscode
+ .Command(
+ command = commandName, //
+ title = "add documentation" //
+ )
+ .setArguments(
+ js.Array(
+ new codeActionParam(
+ document.uri.toString(),
+ range,
+ llmResponse
+ )
+ )
+ )
+
+ }
+ // can return array or promise of array
+
+ js.Array(addDocsItem)
+ }
+
def provideCodeActions(
document: vscode.TextDocument,
range: vscode.Selection,
context: vscode.CodeActionContext,
token: vscode.CancellationToken
- ): js.Array[vscode.CodeAction] = {
+ ): vscode.ProviderResult[js.Array[vscode.CodeAction]] = {
// check who triggers the code action, since vscode may trigger it automatically
context.triggerKind match {
case vscode.CodeActionTriggerKind.Invoke =>
// triggered by user
- showMessageAndLog("selected code: " + document.getText(range))
+
+ // showMessageAndLog("selected code: " + document.getText(range))
+ createCodeAction(document, range, context)
+
case _ =>
- }
- createCodeAction(document, range, context)
+ // vscode triggered it automatically, just return an empty array
+ js.Array()
- // show an underline for compiler issues
- /* context.diagnostics.map { diagnostic =>
- val codeAction = createCodeAction()
- codeAction
- } */
+ }
}
}.asInstanceOf[vscode.CodeActionProvider[vscode.CodeAction]]
- val registration: vscode.Disposable = vscode.languages.registerCodeActionsProvider(
- selector = "*",
- provider = mActionProvider,
- metadata = vscode
- .CodeActionProviderMetadata()
- .setProvidedCodeActionKinds(
- js.Array(vscode.CodeActionKind.QuickFix)
- )
- )
-
- context.pushDisposable(registration)
- showMessageAndLog("registered code actions")
- }
-
- def createCodeAction(document: vscode.TextDocument, range: vscode.Selection, context: vscode.CodeActionContext) = {
-
- // create quick fix action item
- val codeActionFix1 =
- new vscode.CodeAction(
- title = "My Code Action- replace with hello string",
- kind = vscode.CodeActionKind.QuickFix
- ) {
- isPreferred = true
- edit = new vscode.WorkspaceEdit() {
- replace(
- uri = document.uri,
- range = range,
- newText = "hello"
+ val registration: vscode.Disposable =
+ vscode.languages.registerCodeActionsProvider(
+ selector = "*",
+ provider = mActionProvider,
+ metadata = vscode
+ .CodeActionProviderMetadata()
+ .setProvidedCodeActionKinds(
+ js.Array(vscode.CodeActionKind.QuickFix)
)
- }
- // optional command to run when the code action is selected
- // command = vscode
- // .Command(
- // title = "My Code Action",
- // command = "myextension.myCodeAction.replaceWithHello"
- // )
- // .setTooltip("This is my code action")
- }
-
- // the code action for learn more
- val suggestedActionMore =
- new vscode.CodeAction(
- title = "learn more",
- kind = vscode.CodeActionKind.Empty
- ) {
- command = vscode
- .Command(
- title = "My Code Action",
- command = "myextension.myCodeAction.learnMore"
- )
- .setTooltip("This will show you more information")
- }
+ )
- js.Array(codeActionFix1, suggestedActionMore)
+ context.pushDisposable(registration)
}
}
diff --git a/src/main/scala/vscextension/commands.scala b/src/main/scala/vscextension/commands.scala
deleted file mode 100644
index abae9c7..0000000
--- a/src/main/scala/vscextension/commands.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-package vscextension
-
-import typings.vscode.mod as vscode
-
-import scala.collection.immutable
-import scala.scalajs.js
-
-import facade.vscodeUtils.*
-
-/** Commands are actions that a user can invoke in the vscode extension with command palette (ctrl+shift+p).
- *
- * This object registers all the commands in the extension.
- */
-object commands {
- // Store all the commands here
- def registerAllCommands(context: vscode.ExtensionContext) = {
- val cmds =
- Seq(
- ("extension.helloWorld", showHello)
- )
-
- // register the commands
- cmds foreach { (name, fun) =>
- context.pushDisposable(
- vscode.commands.registerCommand(name, fun)
- )
- }
- }
-
- /** Example command. VSCode commands can take an argument of any type, hence the `Any` here.
- *
- * @param arg
- * the argument (we don't use, but could be useful for other commands)
- */
- def showHello(arg: Any): Unit = {
- // show a message box when the command is executed in command palette
- // by typing hello
- vscode.window.showInformationMessage(s"Hello World! How are you ?")
- }
-}
diff --git a/src/main/scala/vscextension/diffInlineEdit.scala b/src/main/scala/vscextension/diffInlineEdit.scala
new file mode 100644
index 0000000..dfa4962
--- /dev/null
+++ b/src/main/scala/vscextension/diffInlineEdit.scala
@@ -0,0 +1,34 @@
+package vscextension
+
+import functorcoder.editorUI.diffEdit.*
+
+/** This object provides the diff edits for the vscode extension.
+ *
+ * currently, inline edit api is locked by vscode/microsoft as an insider feature.
+ *
+ * to overcome this, modify the product.json file to enable "proposed" api.
+ *
+ * https://github.com/microsoft/vscode/issues/190239
+ * https://stackoverflow.com/questions/77202394/what-vs-code-api-can-i-use-to-create-an-in-editor-chat-box-like-in-github-copilo
+ * https://stackoverflow.com/questions/76783624/vscode-extension-how-can-i-add-custom-ui-inside-the-editor
+ *
+ * https://github.com/microsoft/vscode/blob/main/src/vscode-dts/vscode.proposed.inlineEdit.d.ts
+ *
+ * related: source for vscode
+ * https://github.com/microsoft/vscode/blob/main/src/vs/workbench/services/extensions/browser/extensionService.ts#L226
+ *
+ * the product.json location has changed several times. /opt/visual-studio-code/resources/app/product.json
+ *
+ * use `strace -f code 2>&1 | grep product` to find the location of the product.json
+ *
+ * for more info: https://github.com/VSCodium/vscodium/blob/master/docs/index.md
+ */
+object diffInlineEdit {
+
+ /** create a diff edit in the editor
+ *
+ * @param diffResult
+ * the diff result
+ */
+ def createDiffEdit(diffResult: DiffResult): Unit = {}
+}
diff --git a/src/main/scala/vscextension/documentProps.scala b/src/main/scala/vscextension/editorAPI.scala
similarity index 66%
rename from src/main/scala/vscextension/documentProps.scala
rename to src/main/scala/vscextension/editorAPI.scala
index da86ee3..15cf660 100644
--- a/src/main/scala/vscextension/documentProps.scala
+++ b/src/main/scala/vscextension/editorAPI.scala
@@ -5,22 +5,30 @@ import typings.vscode.mod.TextEditor
import facade.vscodeUtils.*
-object documentProps {
+object editorAPI {
/** Shows various properties of the current document and editor
*
* like the language of the document, the project root, etc.
*/
def showProps = {
+ showMessageAndLog("document language: " + getLanguage())
+
+ val projectRoot = vscode.workspace.rootPath.getOrElse("")
+ showMessageAndLog("project root: " + projectRoot)
+
+ }
+
+ def getLanguage() = {
vscode.window.activeTextEditor.toOption match {
case None =>
- showMessageAndLog("no active editor")
+ ""
case Some(editor) =>
- showMessageAndLog("current language: " + editor.document.languageId)
+ editor.document.languageId
}
+ }
- val projectRoot = vscode.workspace.rootPath.getOrElse("")
- showMessageAndLog("project root: " + projectRoot)
-
+ def getCurrentDirectory() = {
+ vscode.window.activeTextEditor.toOption.map(_.document.uri.path)
}
}
diff --git a/src/main/scala/vscextension/extensionMain.scala b/src/main/scala/vscextension/extensionMain.scala
index f711bfc..9b7a39d 100644
--- a/src/main/scala/vscextension/extensionMain.scala
+++ b/src/main/scala/vscextension/extensionMain.scala
@@ -5,43 +5,43 @@ import scala.scalajs.js.annotation.JSExportTopLevel
import typings.vscode.mod as vscode
-import facade.vscodeUtils.*
-
object extensionMain {
/** The main entry for the extension, called when activated first time.
*/
@JSExportTopLevel("activate") // Exports the function to javascript so that VSCode can load it
def activate(context: vscode.ExtensionContext): Unit = {
- showMessageAndLog("congrats, your scala.js vscode extension is loaded")
+ // showMessageAndLog("congrats, your scala.js vscode extension is loaded")
- val projectRoot = vscode.workspace.rootPath.getOrElse("")
+ // vscode.workspace.rootPath.getOrElse("")
+ val cfg = vscConfig.readConfig()
+ val llm = functorcoder.llm.llmMain.llmAgent(cfg)
+ // showMessageAndLog(s"config loaded: ${cfg.toString()}")
// register all commands
- commands.registerAllCommands(context)
+ vscCommands.registerAllCommands(context, llm)
+ // show the status bar
+ statusBar.createStatusBarItem(context, llm)
+ // statusBarItem.text = "functorcoder ok"
// show the current language of the document
- documentProps.showProps
+ // documentProps.showProps
// register inline completions like github copilot
- inlineCompletions.registerInlineCompletions()
+ inlineCompletions.registerInlineCompletions(llm)
// quick pick palette, like command palette
// quickPick.showQuickPick()
// code actions like quick fixes
- CodeActions.registerCodeActions(context)
-
- // network requests
- val url = "https://github.com/"
- io.network.httpGet(url)
- io.network.httpGetTyped(url)
+ CodeActions.registerCodeActions(context, llm)
+ // functorcoder.llm.llmAIMain.test
// file operations
- io.fileIO.createFile(projectRoot)
+ // io.fileIO.createFile(projectRoot)
// load configuration
- val cfg = io.config.loadConfig(projectRoot + "/.vscode/settings.json")
- showMessageAndLog(s"config loaded: $cfg")
+ // val cfg = io.config.loadConfig(projectRoot + "/.vscode/settings.json")
+ // showMessageAndLog(s"config loaded: $cfg")
// language server client
// lsp.startLsp()
@@ -49,6 +49,8 @@ object extensionMain {
// webview
// webview.showWebviewPanel()
+ // editor config
+
}
}
diff --git a/src/main/scala/vscextension/facade/InlineEdit.scala b/src/main/scala/vscextension/facade/InlineEdit.scala
new file mode 100644
index 0000000..45f02a1
--- /dev/null
+++ b/src/main/scala/vscextension/facade/InlineEdit.scala
@@ -0,0 +1,54 @@
+package vscextension.facade
+
+import scala.scalajs.js.annotation.JSImport
+import scala.scalajs.js
+
+import typings.vscode.mod as vscode
+import typings.vscode.mod.Command
+
+/** a dialog in the editor that users can accept or reject
+ *
+ * part of the
+ *
+ * https://github.com/microsoft/vscode/blob/main/src/vscode-dts/vscode.proposed.inlineEdit.d.ts
+ */
+
+object InlineEdit {
+
+ @js.native
+ @JSImport("vscode", "InlineEdit")
+ class InlineEdit extends js.Object {
+ def this(text: String, range: vscode.Selection) = this()
+ val text: String = js.native
+ val range: vscode.Selection = js.native
+
+ val showRange: Range = js.native
+ val accepted: Command = js.native
+ val rejected: Command = js.native
+ val shown: Command = js.native
+ val commands: Command = js.native
+ val action: Command = js.native
+ }
+
+ @js.native
+ trait InlineEditContext extends js.Object {
+ val triggerKind: vscode.CodeActionTriggerKind = js.native
+ }
+
+// @js.native
+ trait InlineEditProvider extends js.Object {
+ def provideInlineEdits(
+ document: vscode.TextDocument,
+ content: InlineEditContext,
+ token: vscode.CancellationToken
+ ): js.Promise[js.Array[InlineEdit]]
+ }
+
+ @JSImport("vscode", "languages")
+ @js.native
+ object languages extends js.Object {
+ def registerInlineEditProvider(selector: vscode.DocumentSelector, provider: InlineEditProvider): vscode.Disposable =
+ js.native
+ }
+
+}
diff --git a/src/main/scala/vscextension/inlineCompletions.scala b/src/main/scala/vscextension/inlineCompletions.scala
index 88b76d7..debc11a 100644
--- a/src/main/scala/vscextension/inlineCompletions.scala
+++ b/src/main/scala/vscextension/inlineCompletions.scala
@@ -1,10 +1,15 @@
package vscextension
+
import typings.vscode.mod as vscode
+import scala.concurrent.ExecutionContext.Implicits.global
import scala.scalajs.js
import scala.scalajs.js.JSConverters.*
+import scala.scalajs.js.Promise
-import facade.vscodeUtils.*
+import functorcoder.llm.llmMain.llmAgent
+import functorcoder.actions.CodeGen
+import vscextension.facade.vscodeUtils.showMessageAndLog
/** demonstrates how to provide inline completions in the editor. like the github copilot
* https://github.com/microsoft/vscode-extension-samples/tree/main/inline-completions
@@ -12,8 +17,8 @@ import facade.vscodeUtils.*
*/
object inlineCompletions {
- def createCompletionProvider(): vscode.InlineCompletionItemProvider = {
- new vscode.InlineCompletionItemProvider {
+ def registerInlineCompletions(llm: llmAgent) = {
+ val mCompletionProvider = new vscode.InlineCompletionItemProvider {
override def provideInlineCompletionItems(
document: vscode.TextDocument, // the current document
position: vscode.Position, // the position of the cursor
@@ -21,29 +26,23 @@ object inlineCompletions {
token: vscode.CancellationToken // to cancel the completion
) = {
- val offset = 0
- // get the line before the current line
- val lineBefore =
- if !(position.line - offset < 0)
- then document.lineAt(position.line - offset).text
- else ""
-
- // the whole line before the cursor
- showMessage(s"line before cursor: $lineBefore")
-
- // always return a list of items, but only first item will be displayed
- val items = List("foo", "bar", "baz")
- // .filter(_.startsWith(word)) // often need to do some filtering
- .map { str =>
- new vscode.InlineCompletionItem(
- insertText = str, // text to insert
- range = new vscode.Range(position, position)
+ val codeBefore = document.getText(new vscode.Range(new vscode.Position(0, 0), position))
+ val codeAfter = document.getText(new vscode.Range(position, document.positionAt(document.getText().length)))
+
+ val promptResponseF = CodeGen.getCompletion(codeBefore, codeAfter, llm)
+
+ val providerResultF: Promise[scala.scalajs.js.Array[vscode.InlineCompletionItem]] =
+ promptResponseF.map { completionText =>
+ // showMessageAndLog(s"completionText: $completionText")
+ js.Array(
+ new vscode.InlineCompletionItem(
+ insertText = completionText, // text to insert
+ range = new vscode.Range(position, position)
+ )
)
- }
+ }.toJSPromise
- // return a promise of the items, useful for async but not needed here
- val providerResultF =
- jsUtils.newJsPromise(items.toJSArray)
+ statusBar.showSpininngStatusBarItem(s"functorcoder(${editorAPI.getLanguage()})", providerResultF)
providerResultF.asInstanceOf[typings.vscode.mod.ProviderResult[
scala.scalajs.js.Array[typings.vscode.mod.InlineCompletionItem] | typings.vscode.mod.InlineCompletionList
@@ -51,9 +50,7 @@ object inlineCompletions {
}
}
- }
-
- def registerInlineCompletions() = {
- vscode.languages.registerInlineCompletionItemProvider(selector = "*", provider = createCompletionProvider())
+ vscode.languages
+ .registerInlineCompletionItemProvider(selector = "*", provider = mCompletionProvider)
}
}
diff --git a/src/main/scala/vscextension/quickPick.scala b/src/main/scala/vscextension/quickPick.scala
index 54f2a66..53ffc85 100644
--- a/src/main/scala/vscextension/quickPick.scala
+++ b/src/main/scala/vscextension/quickPick.scala
@@ -1,10 +1,11 @@
package vscextension
-import scala.concurrent.ExecutionContext.Implicits.global
import scala.scalajs.js
-import typings.vscode.mod as vscode
+import scala.scalajs.js.JSConverters._
+import scala.concurrent.ExecutionContext.Implicits.global
-import facade.vscodeUtils.*
+import typings.vscode.mod as vscode
+import functorcoder.llm.llmMain.llmAgent
/** Show a quick pick palette to select items in multiple steps
*
@@ -12,55 +13,94 @@ import facade.vscodeUtils.*
*/
object quickPick {
- def showQuickPick(): Unit = {
- val items =
- js.Array("item1", "item2", "item3")
- js.Dynamic.literal(
- placeHolder = "pick one item"
- )
+ /** create a quick pick with a list of items
+ *
+ * @param title
+ * the title of the quick pick
+ * @param placeHolder
+ * the placeholder text
+ * @param items
+ * (label, description, function) a list of items to show in the quick pick
+ * @param modifieF
+ * a function to modify the quick pick (e.g. add buttons)
+ */
+ def createQuickPick(
+ title: String, //
+ placeHolder: String,
+ items: Seq[(String, String, () => Unit)],
+ modifieF: vscode.QuickPick[vscode.QuickPickItem] => Unit = { _ => }
+ ) = {
val quickPick: vscode.QuickPick[vscode.QuickPickItem] =
vscode.window.createQuickPick()
- var steps = 0 // steps for the quick pick
- quickPick.title = "Quick Pick"
- quickPick.placeholder = "pick one item"
- quickPick.totalSteps = 3
+ quickPick.title = title
+ quickPick.placeholder = placeHolder
+ // to customize the quick pick
+ modifieF(quickPick)
quickPick.buttons = js.Array(vscode.QuickInputButtons.Back)
- // option items for user to pick
- quickPick.items = items.map { itemStr => //
+ quickPick.items = items.map { (itemStr, itemDesc, _) => //
vscode
.QuickPickItem(itemStr)
.setAlwaysShow(true)
.setButtons(js.Array(vscode.QuickInputButtons.Back))
- .setDescription(itemStr + " description")
- .setDetail(itemStr + " detail")
- }
+ .setDetail(itemDesc)
+ }.toJSArray
quickPick.onDidChangeSelection { selection =>
println(s"selected: ${selection(0).label}")
- steps += 1
- quickPick.setStep(steps)
- if (selection(0).label == "item1") {
- println(s"selected: ${selection(0).label}")
+ // execute the function associated with the selected item
+ val selected = items.find(_._1 == selection(0).label)
+ selected.foreach { (_, _, fun) =>
+ fun()
+ quickPick.hide()
+ }
+ }
- // show another input box after selecting item1
- val inputBoxOptions =
- vscode
- .InputBoxOptions()
- .setTitle("Input Box")
- .setPlaceHolder("type something")
+ quickPick.onDidHide({ _ =>
+ quickPick.hide()
+ })
- vscode.window.showInputBox(inputBoxOptions).toFuture.foreach { input =>
- showMessage("input: " + input)
- }
+ quickPick.show()
+ quickPick
+ }
+
+ def showMainMenu(llm: llmAgent)(arg: Any): Unit = {
+ val mMenu = functorcoder.editorUI.menu.getMainMenu(llm)
+
+ createQuickPick(
+ title = mMenu.title,
+ placeHolder = "select an action",
+ items = mMenu.menuItems.map(x => (x._1, x._1, x._2)).toSeq
+ )
+ }
+
+ /** create an input box for string
+ *
+ * @param title
+ * the title of the input box
+ * @param placeHolder
+ * the placeholder text
+ * @param onInput
+ * the function to call when input is received
+ */
+ def createInputBox(
+ title: String,
+ placeHolder: String,
+ onInput: String => Unit
+ ) = {
+ val inputBoxOptions =
+ vscode
+ .InputBoxOptions()
+ .setTitle(title)
+ .setPlaceHolder(placeHolder)
+ vscode.window.showInputBox(inputBoxOptions).toFuture.foreach { inputO =>
+ inputO.toOption match {
+ case None =>
+ case Some(input) => onInput(input)
}
}
- /* quickPick.onDidHide({ () =>
- quickPick.dispose()
- }) */
- quickPick.show()
}
}
diff --git a/src/main/scala/vscextension/statusBar.scala b/src/main/scala/vscextension/statusBar.scala
new file mode 100644
index 0000000..2458350
--- /dev/null
+++ b/src/main/scala/vscextension/statusBar.scala
@@ -0,0 +1,45 @@
+package vscextension
+import scala.scalajs.js
+import typings.vscode.mod as vscode
+
+import vscextension.facade.vscodeUtils.*
+import functorcoder.actions.Commands
+import scala.concurrent.Future
+import scala.scalajs.js.JSConverters.JSRichFutureNonThenable
+import scala.concurrent.ExecutionContext.Implicits.global
+import functorcoder.llm.llmMain.llmAgent
+
+object statusBar {
+
+ def createStatusBarItem(context: vscode.ExtensionContext, llm: llmAgent) = {
+ val statusBarItem =
+ vscode.window.createStatusBarItem(vscode.StatusBarAlignment.Right)
+
+ val name = "functor"
+ statusBarItem.text = name
+ statusBarItem.name = name
+ statusBarItem.command = Commands.cmdShowMenu._1
+ statusBarItem.show()
+
+ context.pushDisposable(statusBarItem.asInstanceOf[vscode.Disposable])
+ statusBarItem
+ }
+
+ /** Show a spinning status bar item while loading
+ * @param text
+ * the text to show
+ * @param promise
+ * the promise to wait for
+ */
+ def showSpininngStatusBarItem(text: String, promise: js.Promise[Any]): vscode.Disposable = {
+ // show a spinner while loading
+ vscode.window.setStatusBarMessage(
+ "$(sync~spin)" + text,
+ hideWhenDone = promise.asInstanceOf[typings.std.PromiseLike[Any]]
+ )
+ }
+
+ def showSpininngStatusBarItem(text: String, future: Future[Any]): vscode.Disposable = {
+ showSpininngStatusBarItem(text, future.toJSPromise)
+ }
+}
diff --git a/src/main/scala/vscextension/vscCommands.scala b/src/main/scala/vscextension/vscCommands.scala
new file mode 100644
index 0000000..6ce49d4
--- /dev/null
+++ b/src/main/scala/vscextension/vscCommands.scala
@@ -0,0 +1,33 @@
+package vscextension
+
+import typings.vscode.mod as vscode
+
+import scala.scalajs.js
+
+import facade.vscodeUtils.*
+import functorcoder.llm.llmMain.llmAgent
+
+/** Commands are actions that a user can invoke in the vscode extension with command palette (ctrl+shift+p).
+ *
+ * This object registers all the commands in the extension.
+ */
+object vscCommands {
+
+ /** Register all the commands in the extension.
+ *
+ * @param context
+ * the vscode extension context
+ */
+ def registerAllCommands(context: vscode.ExtensionContext, llm: llmAgent) = {
+
+ val allCommands =
+ functorcoder.actions.Commands.commandList(llm)
+ // register the commands
+ allCommands foreach { (name, fun) =>
+ context.pushDisposable(
+ vscode.commands.registerCommand(name, fun)
+ )
+ }
+ }
+
+}
diff --git a/src/main/scala/vscextension/vscConfig.scala b/src/main/scala/vscextension/vscConfig.scala
new file mode 100644
index 0000000..2a283f5
--- /dev/null
+++ b/src/main/scala/vscextension/vscConfig.scala
@@ -0,0 +1,48 @@
+package vscextension
+
+import typings.vscode.mod as vscode
+import functorcoder.editorUI.editorConfig.Config
+
+object vscConfig {
+
+ /** read the configuration from the vscode settings.json
+ *
+ * https://code.visualstudio.com/api/references/contribution-points#contributes.configuration
+ */
+ def readConfig() = {
+ val config = vscode.workspace.getConfiguration("functorcoder")
+
+ // get the key values from vscode settings json
+ val apiKey =
+ config.getStringOrEmpty("apiKey")
+
+ val apiEndpointUrl =
+ config.getStringOrEmpty(key = "apiUrl", default = "https://api.openai.com/v1/chat/completions")
+
+ val maxTokens = config.get[Int]("maxTokens").getOrElse(1000)
+ val model = config.getStringOrEmpty("model", default = "gpt-4o")
+
+ Config(apiKey, apiEndpointUrl, maxTokens, model)
+ }
+
+ extension (config: vscode.WorkspaceConfiguration) {
+
+ /** get a string from the configuration or return an empty string if default is not provided
+ *
+ * @param str
+ * the string to get
+ * @param default
+ * the default value
+ * @return
+ * the string or an empty string
+ */
+ def getStringOrEmpty(key: String, default: String = "") = {
+ // if the key is found but the value is empty, return the default
+
+ config.get[String](key).toOption match {
+ case None => default
+ case Some(value) => if (value.isEmpty) default else value
+ }
+ }
+ }
+}