From 106e62827b6f98424b59a2353f4fdcb54e848692 Mon Sep 17 00:00:00 2001 From: Donnie Adams Date: Fri, 11 Oct 2024 10:18:05 -0400 Subject: [PATCH 01/18] chore: remove windows test workflow --- .github/workflows/run_tests.yaml | 22 ---------------------- 1 file changed, 22 deletions(-) diff --git a/.github/workflows/run_tests.yaml b/.github/workflows/run_tests.yaml index 64f3a71..1d959aa 100644 --- a/.github/workflows/run_tests.yaml +++ b/.github/workflows/run_tests.yaml @@ -37,25 +37,3 @@ jobs: NODE_GPTSCRIPT_SKIP_INSTALL_BINARY: true run: npm test - test-windows: - runs-on: windows-latest - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 1 - ref: ${{ github.event.pull_request.head.sha }} - - uses: actions/setup-node@v4 - with: - node-version: 21 - - name: Install gptscript - run: | - curl https://get.gptscript.ai/releases/default_windows_amd64_v1/gptscript.exe -o gptscript.exe - - name: Install dependencies - run: npm install - - name: Run Tests - env: - GPTSCRIPT_BIN: .\gptscript.exe - OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} - NODE_GPTSCRIPT_SKIP_INSTALL_BINARY: true - run: npm test From 96d37241a06f844789ce54b279973348e25364c1 Mon Sep 17 00:00:00 2001 From: Grant Linville Date: Mon, 14 Oct 2024 10:38:53 -0400 Subject: [PATCH 02/18] feat: add dataset functions (#95) Signed-off-by: Grant Linville --- src/gptscript.ts | 105 ++++++++++++++++++++++++++++++++++++++++ tests/gptscript.test.ts | 81 ++++++++++++++++++++++++++++++- 2 files changed, 185 insertions(+), 1 deletion(-) diff --git a/src/gptscript.ts b/src/gptscript.ts index 4f1cf6e..7067a84 100644 --- a/src/gptscript.ts +++ b/src/gptscript.ts @@ -12,6 +12,7 @@ export interface GlobalOpts { BaseURL?: string DefaultModel?: string DefaultModelProvider?: string + DatasetToolRepo?: string Env?: string[] } @@ -390,6 +391,84 @@ export class GPTScript { await r.text() } + // Dataset methods + + async listDatasets(workspace: string): Promise> { + if (workspace == "") { + workspace = process.env.GPTSCRIPT_WORKSPACE_DIR ?? "" + } + + const r: Run = new RunSubcommand("datasets", "", {URL: this.opts.URL, Token: this.opts.Token}) + r.request({input: "{}", workspace: workspace, datasetToolRepo: this.opts.DatasetToolRepo ?? ""}) + const result = await r.text() + return JSON.parse(result) as Array + } + + async createDataset(workspace: string, name: string, description: string): Promise { + if (workspace == "") { + workspace = process.env.GPTSCRIPT_WORKSPACE_DIR ?? "" + } + + const r: Run = new RunSubcommand("datasets/create", "", {URL: this.opts.URL, Token: this.opts.Token}) + r.request({ + input: JSON.stringify({datasetName: name, datasetDescription: description}), + workspace: workspace, + datasetToolRepo: this.opts.DatasetToolRepo ?? "" + }) + const result = await r.text() + return JSON.parse(result) as Dataset + } + + async addDatasetElement(workspace: string, datasetID: string, elementName: string, elementDescription: string, elementContent: string): Promise { + if (workspace == "") { + workspace = process.env.GPTSCRIPT_WORKSPACE_DIR ?? "" + } + + const r: Run = new RunSubcommand("datasets/add-element", "", {URL: this.opts.URL, Token: this.opts.Token}) + r.request({ + input: JSON.stringify({ + datasetID, + elementName, + elementDescription, + elementContent + }), + workspace: workspace, + datasetToolRepo: this.opts.DatasetToolRepo ?? "" + }) + const result = await r.text() + return JSON.parse(result) as DatasetElementMeta + } + + async listDatasetElements(workspace: string, datasetID: string): Promise> { + if (workspace == "") { + workspace = process.env.GPTSCRIPT_WORKSPACE_DIR ?? "" + } + + const r: Run = new RunSubcommand("datasets/list-elements", "", {URL: this.opts.URL, Token: this.opts.Token}) + r.request({ + input: JSON.stringify({datasetID}), + workspace: workspace, + datasetToolRepo: this.opts.DatasetToolRepo ?? "" + }) + const result = await r.text() + return JSON.parse(result) as Array + } + + async getDatasetElement(workspace: string, datasetID: string, elementName: string): Promise { + if (workspace == "") { + workspace = process.env.GPTSCRIPT_WORKSPACE_DIR ?? "" + } + + const r: Run = new RunSubcommand("datasets/get-element", "", {URL: this.opts.URL, Token: this.opts.Token}) + r.request({ + input: JSON.stringify({datasetID, element: elementName}), + workspace: workspace, + datasetToolRepo: this.opts.DatasetToolRepo ?? "" + }) + const result = await r.text() + return JSON.parse(result) as DatasetElement + } + /** * Helper method to handle the common logic for loading. * @@ -1103,3 +1182,29 @@ function jsonToCredential(cred: string): Credential { refreshToken: c.refreshToken } } + +// Dataset types + +export interface DatasetElementMeta { + name: string + description: string +} + +export interface DatasetElement { + name: string + description: string + contents: string +} + +export interface DatasetMeta { + id: string + name: string + description: string +} + +export interface Dataset { + id: string + name: string + description: string + elements: Record +} diff --git a/tests/gptscript.test.ts b/tests/gptscript.test.ts index 5703985..94cc0bd 100644 --- a/tests/gptscript.test.ts +++ b/tests/gptscript.test.ts @@ -1,7 +1,7 @@ import * as gptscript from "../src/gptscript" import { ArgumentSchemaType, - CredentialType, + CredentialType, Dataset, getEnv, PropertyType, RunEventType, @@ -13,6 +13,7 @@ import path from "path" import {fileURLToPath} from "url" import * as fs from "node:fs" import {randomBytes} from "node:crypto" +import {tmpdir} from "node:os"; let gFirst: gptscript.GPTScript let g: gptscript.GPTScript @@ -885,4 +886,82 @@ describe("gptscript module", () => { throw new Error("failed to verify deletion: " + e) } }, 20000) + + test("dataset operations", async () => { + const datasetName = "test-" + randomBytes(10).toString("hex") + const workspace = fs.mkdtempSync(path.join(tmpdir(), "node-gptscript-")) + let datasetID: string + + // Create + try { + const dataset = await g.createDataset(workspace, datasetName, "a test dataset") + expect(dataset).toBeDefined() + expect(dataset.name).toEqual(datasetName) + expect(dataset.description).toEqual("a test dataset") + expect(dataset.id.length).toBeGreaterThan(0) + expect(dataset.elements).toEqual({}) + datasetID = dataset.id + } catch (e) { + throw new Error("failed to create dataset: " + e) + } + + // Add elements + try { + const e1 = await g.addDatasetElement( + workspace, + datasetID, + "element1", + "", + "this is element 1 contents" + ) + expect(e1.name).toEqual("element1") + expect(e1.description).toEqual("") + + const e2 = await g.addDatasetElement( + workspace, + datasetID, + "element2", + "a description", + "this is element 2 contents" + ) + expect(e2.name).toEqual("element2") + expect(e2.description).toEqual("a description") + } catch (e) { + throw new Error("failed to add elements: " + e) + } + + // Get elements + try { + const e1 = await g.getDatasetElement(workspace, datasetID, "element1") + expect(e1.name).toEqual("element1") + expect(e1.description).toBeUndefined() + expect(e1.contents).toEqual("this is element 1 contents") + + const e2 = await g.getDatasetElement(workspace, datasetID, "element2") + expect(e2.name).toEqual("element2") + expect(e2.description).toEqual("a description") + expect(e2.contents).toEqual("this is element 2 contents") + } catch (e) { + throw new Error("failed to get elements: " + e) + } + + // List the elements in the dataset + try { + const elements = await g.listDatasetElements(workspace, datasetID) + expect(elements.length).toEqual(2) + expect(elements.map(e => e.name)).toContain("element1") + expect(elements.map(e => e.name)).toContain("element2") + } catch (e) { + throw new Error("failed to list elements: " + e) + } + + // List datasets + try { + const datasets = await g.listDatasets(workspace) + expect(datasets.length).toBeGreaterThan(0) + expect(datasets.map(d => d.name)).toContain(datasetName) + } catch (e) { + throw new Error("failed to list datasets: " + e) + } + }, 20000) }) From ce1d9f1e186f1aec397181c2333255f65d1bc7f4 Mon Sep 17 00:00:00 2001 From: Donnie Adams Date: Thu, 24 Oct 2024 14:34:38 -0400 Subject: [PATCH 03/18] feat: add workspace API Signed-off-by: Donnie Adams --- src/gptscript.ts | 90 ++++++++++++++++++++++++++- tests/gptscript.test.ts | 135 ++++++++++++++++++++++++++++++++++++---- 2 files changed, 211 insertions(+), 14 deletions(-) diff --git a/src/gptscript.ts b/src/gptscript.ts index 7067a84..1e5ade5 100644 --- a/src/gptscript.ts +++ b/src/gptscript.ts @@ -13,6 +13,7 @@ export interface GlobalOpts { DefaultModel?: string DefaultModelProvider?: string DatasetToolRepo?: string + WorkspaceTool?: string Env?: string[] } @@ -140,9 +141,12 @@ export class GPTScript { if (!this.opts.URL) { this.opts.URL = GPTScript.serverURL } + if (this.opts.URL !== "" && !this.opts.URL.startsWith("http://") && !this.opts.URL.startsWith("https://")) { + this.opts.URL = "http://" + this.opts.URL + } if (!this.opts.Env) { - this.opts.Env = [] + this.opts.Env = Object.entries(process.env).map(([k, v]) => `${k}=${v}`) } if (this.opts.URL) { this.opts.Env.push(`GPTSCRIPT_URL=${this.opts.URL}`) @@ -469,6 +473,90 @@ export class GPTScript { return JSON.parse(result) as DatasetElement } + async createWorkspace(providerType: string, ...fromWorkspaces: string[]): Promise { + const out = await this.runBasicCommand("workspaces/create", { + providerType: providerType, + fromWorkspaceIDs: fromWorkspaces, + workspaceTool: this.opts.WorkspaceTool, + env: this.opts.Env, + }) + return out.trim() + } + + async deleteWorkspace(workspaceID?: string): Promise { + if (!workspaceID) { + workspaceID = process.env.GPTSCRIPT_WORKSPACE_ID ?? "" + } + await this.runBasicCommand("workspaces/delete", { + id: workspaceID, + workspaceTool: this.opts.WorkspaceTool, + env: this.opts.Env, + }) + } + + async listFilesInWorkspace(prefix?: string, workspaceID?: string): Promise> { + if (!workspaceID) { + workspaceID = process.env.GPTSCRIPT_WORKSPACE_ID ?? "" + } + const out = await this.runBasicCommand("workspaces/list", { + id: workspaceID, + prefix: prefix, + workspaceTool: this.opts.WorkspaceTool, + env: this.opts.Env, + }) + return JSON.parse(out) + } + + async removeAll(withPrefix?: string, workspaceID?: string): Promise { + if (!workspaceID) { + workspaceID = process.env.GPTSCRIPT_WORKSPACE_ID ?? "" + } + await this.runBasicCommand("workspaces/remove-all-with-prefix", { + id: workspaceID, + prefix: withPrefix, + workspaceTool: this.opts.WorkspaceTool, + env: this.opts.Env, + }) + } + + async writeFileInWorkspace(filePath: string, content: ArrayBuffer, workspaceID?: string): Promise { + if (!workspaceID) { + workspaceID = process.env.GPTSCRIPT_WORKSPACE_ID ?? "" + } + await this.runBasicCommand("workspaces/write-file", { + id: workspaceID, + filePath: filePath, + contents: Buffer.from(content).toString("base64"), + workspaceTool: this.opts.WorkspaceTool, + env: this.opts.Env, + }) + } + + async deleteFileInWorkspace(filePath: string, workspaceID?: string): Promise { + if (!workspaceID) { + workspaceID = process.env.GPTSCRIPT_WORKSPACE_ID ?? "" + } + await this.runBasicCommand("workspaces/delete-file", { + id: workspaceID, + filePath: filePath, + workspaceTool: this.opts.WorkspaceTool, + env: this.opts.Env, + }) + } + + async readFileInWorkspace(filePath: string, workspaceID?: string): Promise { + if (!workspaceID) { + workspaceID = process.env.GPTSCRIPT_WORKSPACE_ID ?? "" + } + const out = await this.runBasicCommand("workspaces/read-file", { + id: workspaceID, + filePath: filePath, + workspaceTool: this.opts.WorkspaceTool, + env: this.opts.Env, + }) + return Buffer.from(out.trim(), "base64") + } + /** * Helper method to handle the common logic for loading. * diff --git a/tests/gptscript.test.ts b/tests/gptscript.test.ts index 94cc0bd..52de79f 100644 --- a/tests/gptscript.test.ts +++ b/tests/gptscript.test.ts @@ -1,7 +1,7 @@ import * as gptscript from "../src/gptscript" import { ArgumentSchemaType, - CredentialType, Dataset, + CredentialType, getEnv, PropertyType, RunEventType, @@ -13,7 +13,7 @@ import path from "path" import {fileURLToPath} from "url" import * as fs from "node:fs" import {randomBytes} from "node:crypto" -import {tmpdir} from "node:os"; +import {tmpdir} from "node:os" let gFirst: gptscript.GPTScript let g: gptscript.GPTScript @@ -908,21 +908,21 @@ describe("gptscript module", () => { // Add elements try { const e1 = await g.addDatasetElement( - workspace, - datasetID, - "element1", - "", - "this is element 1 contents" + workspace, + datasetID, + "element1", + "", + "this is element 1 contents" ) expect(e1.name).toEqual("element1") expect(e1.description).toEqual("") const e2 = await g.addDatasetElement( - workspace, - datasetID, - "element2", - "a description", - "this is element 2 contents" + workspace, + datasetID, + "element2", + "a description", + "this is element 2 contents" ) expect(e2.name).toEqual("element2") expect(e2.description).toEqual("a description") @@ -963,5 +963,114 @@ describe("gptscript module", () => { } catch (e) { throw new Error("failed to list datasets: " + e) } - }, 20000) + }, 60000) + + test("create and delete workspace", async () => { + if (!process.env.AWS_ACCESS_KEY_ID || !process.env.AWS_SECRET_ACCESS_KEY) { + console.log("AWS credentials not set, skipping test") + return + } + + const workspaceID = await g.createWorkspace("directory") + expect(workspaceID).toBeDefined() + await g.deleteWorkspace(workspaceID) + }, 60000) + + test("write, read, and delete file", async () => { + if (!process.env.AWS_ACCESS_KEY_ID || !process.env.AWS_SECRET_ACCESS_KEY) { + console.log("AWS credentials not set, skipping test") + return + } + + const workspaceID = await g.createWorkspace("directory") + expect(workspaceID).toBeDefined() + + await g.writeFileInWorkspace("test.txt", Buffer.from("test"), workspaceID) + const content = await g.readFileInWorkspace("test.txt", workspaceID) + expect(content.toString()).toEqual("test") + await g.deleteWorkspace(workspaceID) + }, 60000) + + test("test complex ls", async () => { + if (!process.env.AWS_ACCESS_KEY_ID || !process.env.AWS_SECRET_ACCESS_KEY) { + console.log("AWS credentials not set, skipping test") + return + } + + const workspaceID = await g.createWorkspace("directory") + + // Write files in the workspace + await g.writeFileInWorkspace("test/test1.txt", Buffer.from("hello1"), workspaceID) + await g.writeFileInWorkspace("test1/test2.txt", Buffer.from("hello2"), workspaceID) + await g.writeFileInWorkspace("test1/test3.txt", Buffer.from("hello3"), workspaceID) + await g.writeFileInWorkspace(".hidden.txt", Buffer.from("hidden"), workspaceID) + + let content = await g.listFilesInWorkspace(undefined, workspaceID) + expect(content.length).toEqual(4) + expect(content).toContain("test1/test2.txt") + expect(content).toContain("test1/test3.txt") + expect(content).toContain("test/test1.txt") + expect(content).toContain(".hidden.txt") + + content = await g.listFilesInWorkspace("test1", workspaceID) + expect(content.length).toEqual(2) + expect(content).toContain("test1/test2.txt") + expect(content).toContain("test1/test3.txt") + + await g.removeAll("test1", workspaceID) + + content = await g.listFilesInWorkspace("", workspaceID) + expect(content.length).toEqual(2) + expect(content).toContain("test/test1.txt") + expect(content).toContain(".hidden.txt") + + await g.deleteWorkspace(workspaceID) + }, 60000) + + test("create and delete workspace in s3", async () => { + const workspaceID = await g.createWorkspace("s3") + expect(workspaceID).toBeDefined() + await g.deleteWorkspace(workspaceID) + }, 60000) + + test("write, read, and delete file in s3", async () => { + const workspaceID = await g.createWorkspace("s3") + expect(workspaceID).toBeDefined() + + await g.writeFileInWorkspace("test.txt", Buffer.from("test"), workspaceID) + const content = await g.readFileInWorkspace("test.txt", workspaceID) + expect(content.toString()).toEqual("test") + await g.deleteWorkspace(workspaceID) + }, 60000) + + test("test complex ls in s3", async () => { + const workspaceID = await g.createWorkspace("s3") + + // Write files in the workspace + await g.writeFileInWorkspace("test/test1.txt", Buffer.from("hello1"), workspaceID) + await g.writeFileInWorkspace("test1/test2.txt", Buffer.from("hello2"), workspaceID) + await g.writeFileInWorkspace("test1/test3.txt", Buffer.from("hello3"), workspaceID) + await g.writeFileInWorkspace(".hidden.txt", Buffer.from("hidden"), workspaceID) + + let content = await g.listFilesInWorkspace(undefined, workspaceID) + expect(content.length).toEqual(4) + expect(content).toContain("test1/test2.txt") + expect(content).toContain("test1/test3.txt") + expect(content).toContain("test/test1.txt") + expect(content).toContain(".hidden.txt") + + content = await g.listFilesInWorkspace("test1", workspaceID) + expect(content.length).toEqual(2) + expect(content).toContain("test1/test2.txt") + expect(content).toContain("test1/test3.txt") + + await g.removeAll("test1", workspaceID) + + content = await g.listFilesInWorkspace("", workspaceID) + expect(content.length).toEqual(2) + expect(content).toContain("test/test1.txt") + expect(content).toContain(".hidden.txt") + + await g.deleteWorkspace(workspaceID) + }, 60000) }) From 4deabc35dd6b1ba9ceb364d3a2b06da7901952ce Mon Sep 17 00:00:00 2001 From: Donnie Adams Date: Thu, 24 Oct 2024 15:03:18 -0400 Subject: [PATCH 04/18] chore: make credential calls use runBasicCommand Signed-off-by: Donnie Adams --- src/gptscript.ts | 38 ++++++++++++++++---------------------- 1 file changed, 16 insertions(+), 22 deletions(-) diff --git a/src/gptscript.ts b/src/gptscript.ts index 1e5ade5..6a81ead 100644 --- a/src/gptscript.ts +++ b/src/gptscript.ts @@ -366,33 +366,24 @@ export class GPTScript { } async createCredential(credential: Credential): Promise { - if (!this.opts.URL) { - await this.testGPTScriptURL(20) - } - - const r: Run = new RunSubcommand("credentials/create", "", {URL: this.opts.URL, Token: this.opts.Token}) - r.request({content: credentialToJSON(credential)}) - await r.text() + await this.runBasicCommand("credentials/create", { + content: credentialToJSON(credential) + }) } async revealCredential(context: Array, name: string): Promise { - if (!this.opts.URL) { - await this.testGPTScriptURL(20) - } - - const r: Run = new RunSubcommand("credentials/reveal", "", {URL: this.opts.URL, Token: this.opts.Token}) - r.request({context, name}) - return jsonToCredential(await r.text()) + const resp = await this.runBasicCommand("credentials/reveal", { + context, + name + }) + return jsonToCredential(resp) } async deleteCredential(context: string, name: string): Promise { - if (!this.opts.URL) { - await this.testGPTScriptURL(20) - } - - const r: Run = new RunSubcommand("credentials/delete", "", {URL: this.opts.URL, Token: this.opts.Token}) - r.request({context: [context], name}) - await r.text() + await this.runBasicCommand("credentials/delete", { + context: [context], + name + }) } // Dataset methods @@ -782,7 +773,10 @@ export class Run { fetch(req).then(resp => { return resp.json() }).then(res => { - resolve(res.stdout) + if (typeof res.stdout === "string") { + resolve(res.stdout) + } + resolve(JSON.stringify(res.stdout)) }).catch(e => { reject(new Error(e)) }) From b42172c6b4f89b6bda6e7d422c2e486dcee1e99a Mon Sep 17 00:00:00 2001 From: Donnie Adams Date: Thu, 24 Oct 2024 15:03:41 -0400 Subject: [PATCH 05/18] chore: update datasets API and use runBasicCommand Signed-off-by: Donnie Adams --- src/gptscript.ts | 81 ++++++++++++++++++++--------------------- tests/gptscript.test.ts | 51 +++++++++++++------------- 2 files changed, 65 insertions(+), 67 deletions(-) diff --git a/src/gptscript.ts b/src/gptscript.ts index 6a81ead..14695e7 100644 --- a/src/gptscript.ts +++ b/src/gptscript.ts @@ -388,79 +388,78 @@ export class GPTScript { // Dataset methods - async listDatasets(workspace: string): Promise> { - if (workspace == "") { - workspace = process.env.GPTSCRIPT_WORKSPACE_DIR ?? "" + async listDatasets(workspaceID: string): Promise> { + if (workspaceID == "") { + workspaceID = process.env.GPTSCRIPT_WORKSPACE_ID ?? "" } - const r: Run = new RunSubcommand("datasets", "", {URL: this.opts.URL, Token: this.opts.Token}) - r.request({input: "{}", workspace: workspace, datasetToolRepo: this.opts.DatasetToolRepo ?? ""}) - const result = await r.text() + const result = await this.runBasicCommand("datasets", { + workspaceID: workspaceID, + datasetToolRepo: this.opts.DatasetToolRepo ?? "", + env: this.opts.Env + }) return JSON.parse(result) as Array } - async createDataset(workspace: string, name: string, description: string): Promise { - if (workspace == "") { - workspace = process.env.GPTSCRIPT_WORKSPACE_DIR ?? "" + async createDataset(workspaceID: string, name: string, description: string): Promise { + if (workspaceID == "") { + workspaceID = process.env.GPTSCRIPT_WORKSPACE_ID ?? "" } - const r: Run = new RunSubcommand("datasets/create", "", {URL: this.opts.URL, Token: this.opts.Token}) - r.request({ + const result = await this.runBasicCommand("datasets/create", { input: JSON.stringify({datasetName: name, datasetDescription: description}), - workspace: workspace, - datasetToolRepo: this.opts.DatasetToolRepo ?? "" + workspaceID: workspaceID, + datasetToolRepo: this.opts.DatasetToolRepo ?? "", + env: this.opts.Env }) - const result = await r.text() return JSON.parse(result) as Dataset } - async addDatasetElement(workspace: string, datasetID: string, elementName: string, elementDescription: string, elementContent: string): Promise { - if (workspace == "") { - workspace = process.env.GPTSCRIPT_WORKSPACE_DIR ?? "" + async addDatasetElement(workspaceID: string, datasetID: string, elementName: string, elementDescription: string, elementContent: string): Promise { + if (workspaceID == "") { + workspaceID = process.env.GPTSCRIPT_WORKSPACE_ID ?? "" } - const r: Run = new RunSubcommand("datasets/add-element", "", {URL: this.opts.URL, Token: this.opts.Token}) - r.request({ + const result = await this.runBasicCommand("datasets/add-element", { input: JSON.stringify({ datasetID, - elementName, - elementDescription, - elementContent + elementName: elementName, + elementDescription: elementDescription, + elementContent: elementContent }), - workspace: workspace, - datasetToolRepo: this.opts.DatasetToolRepo ?? "" + workspaceID: workspaceID, + datasetToolRepo: this.opts.DatasetToolRepo ?? "", + env: this.opts.Env }) - const result = await r.text() return JSON.parse(result) as DatasetElementMeta } - async listDatasetElements(workspace: string, datasetID: string): Promise> { - if (workspace == "") { - workspace = process.env.GPTSCRIPT_WORKSPACE_DIR ?? "" + async listDatasetElements(workspaceID: string, datasetID: string): Promise> { + if (workspaceID == "") { + workspaceID = process.env.GPTSCRIPT_WORKSPACE_ID ?? "" } - const r: Run = new RunSubcommand("datasets/list-elements", "", {URL: this.opts.URL, Token: this.opts.Token}) - r.request({ + + const result = await this.runBasicCommand("datasets/list-elements", { input: JSON.stringify({datasetID}), - workspace: workspace, - datasetToolRepo: this.opts.DatasetToolRepo ?? "" + workspaceID: workspaceID, + datasetToolRepo: this.opts.DatasetToolRepo ?? "", + env: this.opts.Env }) - const result = await r.text() return JSON.parse(result) as Array } - async getDatasetElement(workspace: string, datasetID: string, elementName: string): Promise { - if (workspace == "") { - workspace = process.env.GPTSCRIPT_WORKSPACE_DIR ?? "" + async getDatasetElement(workspaceID: string, datasetID: string, elementName: string): Promise { + if (workspaceID == "") { + workspaceID = process.env.GPTSCRIPT_WORKSPACE_ID ?? "" } - const r: Run = new RunSubcommand("datasets/get-element", "", {URL: this.opts.URL, Token: this.opts.Token}) - r.request({ + const result = await this.runBasicCommand("datasets/get-element", { input: JSON.stringify({datasetID, element: elementName}), - workspace: workspace, - datasetToolRepo: this.opts.DatasetToolRepo ?? "" + workspaceID: workspaceID, + datasetToolRepo: this.opts.DatasetToolRepo ?? "", + env: this.opts.Env }) - const result = await r.text() return JSON.parse(result) as DatasetElement } diff --git a/tests/gptscript.test.ts b/tests/gptscript.test.ts index 52de79f..b0ac6bd 100644 --- a/tests/gptscript.test.ts +++ b/tests/gptscript.test.ts @@ -13,7 +13,6 @@ import path from "path" import {fileURLToPath} from "url" import * as fs from "node:fs" import {randomBytes} from "node:crypto" -import {tmpdir} from "node:os" let gFirst: gptscript.GPTScript let g: gptscript.GPTScript @@ -660,7 +659,7 @@ describe("gptscript module", () => { tools: ["sys.exec"] } - const commands = [`"ls"`, `"dir"`] + const commands = [`ls`, `dir`] let confirmCallCount = 0 const run = await g.evaluate(t, {confirm: true}) run.on(gptscript.RunEventType.CallConfirm, async (data: gptscript.CallFrame) => { @@ -683,7 +682,7 @@ describe("gptscript module", () => { } const run = await g.evaluate(t, {confirm: true}) run.on(gptscript.RunEventType.CallConfirm, async (data: gptscript.CallFrame) => { - expect(data.input).toContain(`"ls"`) + expect(data.input).toContain(`ls`) confirmFound = true await g.confirm({id: data.id, accept: false, message: "I will not allow it!"}) }) @@ -889,12 +888,12 @@ describe("gptscript module", () => { test("dataset operations", async () => { const datasetName = "test-" + randomBytes(10).toString("hex") - const workspace = fs.mkdtempSync(path.join(tmpdir(), "node-gptscript-")) + const workspaceID = await g.createWorkspace("directory") let datasetID: string // Create try { - const dataset = await g.createDataset(workspace, datasetName, "a test dataset") + const dataset = await g.createDataset(workspaceID, datasetName, "a test dataset") expect(dataset).toBeDefined() expect(dataset.name).toEqual(datasetName) expect(dataset.description).toEqual("a test dataset") @@ -908,7 +907,7 @@ describe("gptscript module", () => { // Add elements try { const e1 = await g.addDatasetElement( - workspace, + workspaceID, datasetID, "element1", "", @@ -918,7 +917,7 @@ describe("gptscript module", () => { expect(e1.description).toEqual("") const e2 = await g.addDatasetElement( - workspace, + workspaceID, datasetID, "element2", "a description", @@ -932,12 +931,12 @@ describe("gptscript module", () => { // Get elements try { - const e1 = await g.getDatasetElement(workspace, datasetID, "element1") + const e1 = await g.getDatasetElement(workspaceID, datasetID, "element1") expect(e1.name).toEqual("element1") expect(e1.description).toBeUndefined() expect(e1.contents).toEqual("this is element 1 contents") - const e2 = await g.getDatasetElement(workspace, datasetID, "element2") + const e2 = await g.getDatasetElement(workspaceID, datasetID, "element2") expect(e2.name).toEqual("element2") expect(e2.description).toEqual("a description") expect(e2.contents).toEqual("this is element 2 contents") @@ -947,7 +946,7 @@ describe("gptscript module", () => { // List the elements in the dataset try { - const elements = await g.listDatasetElements(workspace, datasetID) + const elements = await g.listDatasetElements(workspaceID, datasetID) expect(elements.length).toEqual(2) expect(elements.map(e => e.name)).toContain("element1") expect(elements.map(e => e.name)).toContain("element2") @@ -957,7 +956,7 @@ describe("gptscript module", () => { // List datasets try { - const datasets = await g.listDatasets(workspace) + const datasets = await g.listDatasets(workspaceID) expect(datasets.length).toBeGreaterThan(0) expect(datasets.map(d => d.name)).toContain(datasetName) } catch (e) { @@ -966,22 +965,12 @@ describe("gptscript module", () => { }, 60000) test("create and delete workspace", async () => { - if (!process.env.AWS_ACCESS_KEY_ID || !process.env.AWS_SECRET_ACCESS_KEY) { - console.log("AWS credentials not set, skipping test") - return - } - const workspaceID = await g.createWorkspace("directory") expect(workspaceID).toBeDefined() await g.deleteWorkspace(workspaceID) }, 60000) test("write, read, and delete file", async () => { - if (!process.env.AWS_ACCESS_KEY_ID || !process.env.AWS_SECRET_ACCESS_KEY) { - console.log("AWS credentials not set, skipping test") - return - } - const workspaceID = await g.createWorkspace("directory") expect(workspaceID).toBeDefined() @@ -992,11 +981,6 @@ describe("gptscript module", () => { }, 60000) test("test complex ls", async () => { - if (!process.env.AWS_ACCESS_KEY_ID || !process.env.AWS_SECRET_ACCESS_KEY) { - console.log("AWS credentials not set, skipping test") - return - } - const workspaceID = await g.createWorkspace("directory") // Write files in the workspace @@ -1028,12 +1012,22 @@ describe("gptscript module", () => { }, 60000) test("create and delete workspace in s3", async () => { + if (!process.env.AWS_ACCESS_KEY_ID || !process.env.AWS_SECRET_ACCESS_KEY) { + console.log("AWS credentials not set, skipping test") + return + } + const workspaceID = await g.createWorkspace("s3") expect(workspaceID).toBeDefined() await g.deleteWorkspace(workspaceID) }, 60000) test("write, read, and delete file in s3", async () => { + if (!process.env.AWS_ACCESS_KEY_ID || !process.env.AWS_SECRET_ACCESS_KEY) { + console.log("AWS credentials not set, skipping test") + return + } + const workspaceID = await g.createWorkspace("s3") expect(workspaceID).toBeDefined() @@ -1044,6 +1038,11 @@ describe("gptscript module", () => { }, 60000) test("test complex ls in s3", async () => { + if (!process.env.AWS_ACCESS_KEY_ID || !process.env.AWS_SECRET_ACCESS_KEY) { + console.log("AWS credentials not set, skipping test") + return + } + const workspaceID = await g.createWorkspace("s3") // Write files in the workspace From 8ef041284cbecd0949263515f0cbde937e485fff Mon Sep 17 00:00:00 2001 From: Donnie Adams Date: Mon, 28 Oct 2024 07:24:26 -0400 Subject: [PATCH 06/18] chore: make workspace ID required when deleting workspaces Signed-off-by: Donnie Adams --- src/gptscript.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/gptscript.ts b/src/gptscript.ts index 14695e7..40ca832 100644 --- a/src/gptscript.ts +++ b/src/gptscript.ts @@ -473,10 +473,11 @@ export class GPTScript { return out.trim() } - async deleteWorkspace(workspaceID?: string): Promise { + async deleteWorkspace(workspaceID: string): Promise { if (!workspaceID) { - workspaceID = process.env.GPTSCRIPT_WORKSPACE_ID ?? "" + return Promise.reject("workspace ID cannot be empty") } + await this.runBasicCommand("workspaces/delete", { id: workspaceID, workspaceTool: this.opts.WorkspaceTool, From 306861b8b5aea2ea0481c180067a1687d2726530 Mon Sep 17 00:00:00 2001 From: Donnie Adams Date: Mon, 28 Oct 2024 07:48:02 -0400 Subject: [PATCH 07/18] feat: add ability to stat files in workspace API Signed-off-by: Donnie Adams --- src/gptscript.ts | 21 +++++++++++++++++++++ tests/gptscript.test.ts | 16 ++++++++++++++++ 2 files changed, 37 insertions(+) diff --git a/src/gptscript.ts b/src/gptscript.ts index 40ca832..431e48d 100644 --- a/src/gptscript.ts +++ b/src/gptscript.ts @@ -548,6 +548,20 @@ export class GPTScript { return Buffer.from(out.trim(), "base64") } + async statFileInWorkspace(filePath: string, workspaceID?: string): Promise { + if (!workspaceID) { + workspaceID = process.env.GPTSCRIPT_WORKSPACE_ID ?? "" + } + const out = await this.runBasicCommand("workspaces/stat-file", { + id: workspaceID, + filePath: filePath, + workspaceTool: this.opts.WorkspaceTool, + env: this.opts.Env, + }) + + return JSON.parse(out) + } + /** * Helper method to handle the common logic for loading. * @@ -590,6 +604,13 @@ export class GPTScript { } } +export interface FileInfo { + workspaceID: string + name: string + size: number + modTime: string +} + export class Run { public readonly id: string public readonly opts: RunOpts diff --git a/tests/gptscript.test.ts b/tests/gptscript.test.ts index b0ac6bd..4ac834c 100644 --- a/tests/gptscript.test.ts +++ b/tests/gptscript.test.ts @@ -977,6 +977,14 @@ describe("gptscript module", () => { await g.writeFileInWorkspace("test.txt", Buffer.from("test"), workspaceID) const content = await g.readFileInWorkspace("test.txt", workspaceID) expect(content.toString()).toEqual("test") + + const fileInfo = await g.statFileInWorkspace("test.txt", workspaceID) + expect(fileInfo.size).toEqual(4) + expect(fileInfo.name).toEqual("test.txt") + expect(fileInfo.workspaceID).toEqual(workspaceID) + expect(fileInfo.modTime).toBeDefined() + + await g.deleteFileInWorkspace("test.txt", workspaceID) await g.deleteWorkspace(workspaceID) }, 60000) @@ -1034,6 +1042,14 @@ describe("gptscript module", () => { await g.writeFileInWorkspace("test.txt", Buffer.from("test"), workspaceID) const content = await g.readFileInWorkspace("test.txt", workspaceID) expect(content.toString()).toEqual("test") + + const fileInfo = await g.statFileInWorkspace("test.txt", workspaceID) + expect(fileInfo.size).toEqual(4) + expect(fileInfo.name).toEqual("test.txt") + expect(fileInfo.workspaceID).toEqual(workspaceID) + expect(fileInfo.modTime).toBeDefined() + + await g.deleteFileInWorkspace("test.txt", workspaceID) await g.deleteWorkspace(workspaceID) }, 60000) From ea42fed5208e0c8371d652204790965fa3c0a9ea Mon Sep 17 00:00:00 2001 From: Grant Linville Date: Wed, 30 Oct 2024 14:38:59 -0400 Subject: [PATCH 08/18] chore: add addDatasetElements function (#99) Signed-off-by: Grant Linville --- src/gptscript.ts | 13 +++++++++++++ tests/gptscript.test.ts | 36 +++++++++++++++++++++++++++++++++++- 2 files changed, 48 insertions(+), 1 deletion(-) diff --git a/src/gptscript.ts b/src/gptscript.ts index 431e48d..e581d39 100644 --- a/src/gptscript.ts +++ b/src/gptscript.ts @@ -434,6 +434,19 @@ export class GPTScript { return JSON.parse(result) as DatasetElementMeta } + async addDatasetElements(workspaceID: string, datasetID: string, elements: Array) { + if (workspaceID === "") { + workspaceID = process.env.GPTSCRIPT_WORKSPACE_ID ?? "" + } + + return await this.runBasicCommand("datasets/add-elements", { + input: JSON.stringify({datasetID, elements}), + workspaceID: workspaceID, + datasetToolRepo: this.opts.DatasetToolRepo ?? "", + env: this.opts.Env, + }) + } + async listDatasetElements(workspaceID: string, datasetID: string): Promise> { if (workspaceID == "") { workspaceID = process.env.GPTSCRIPT_WORKSPACE_ID ?? "" diff --git a/tests/gptscript.test.ts b/tests/gptscript.test.ts index 4ac834c..eb8b0bf 100644 --- a/tests/gptscript.test.ts +++ b/tests/gptscript.test.ts @@ -929,6 +929,28 @@ describe("gptscript module", () => { throw new Error("failed to add elements: " + e) } + // Add two elements at once. + try { + await g.addDatasetElements( + workspaceID, + datasetID, + [ + { + name: "element3", + description: "a description", + contents: "this is element 3 contents" + }, + { + name: "element4", + description: "a description", + contents: "this is element 4 contents" + } + ] + ) + } catch (e) { + throw new Error("failed to add elements: " + e) + } + // Get elements try { const e1 = await g.getDatasetElement(workspaceID, datasetID, "element1") @@ -940,6 +962,16 @@ describe("gptscript module", () => { expect(e2.name).toEqual("element2") expect(e2.description).toEqual("a description") expect(e2.contents).toEqual("this is element 2 contents") + + const e3 = await g.getDatasetElement(workspaceID, datasetID, "element3") + expect(e3.name).toEqual("element3") + expect(e3.description).toEqual("a description") + expect(e3.contents).toEqual("this is element 3 contents") + + const e4 = await g.getDatasetElement(workspaceID, datasetID, "element4") + expect(e4.name).toEqual("element4") + expect(e4.description).toEqual("a description") + expect(e4.contents).toEqual("this is element 4 contents") } catch (e) { throw new Error("failed to get elements: " + e) } @@ -947,9 +979,11 @@ describe("gptscript module", () => { // List the elements in the dataset try { const elements = await g.listDatasetElements(workspaceID, datasetID) - expect(elements.length).toEqual(2) + expect(elements.length).toEqual(4) expect(elements.map(e => e.name)).toContain("element1") expect(elements.map(e => e.name)).toContain("element2") + expect(elements.map(e => e.name)).toContain("element3") + expect(elements.map(e => e.name)).toContain("element4") } catch (e) { throw new Error("failed to list elements: " + e) } From b30db3cf4fe87b1ee09a20752baf2263b1e5c7be Mon Sep 17 00:00:00 2001 From: Grant Linville Date: Fri, 1 Nov 2024 10:36:54 -0400 Subject: [PATCH 09/18] enhance: use ArrayBuffer for dataset element contents (#100) Signed-off-by: Grant Linville --- src/gptscript.ts | 25 +++++++++++++++++++------ tests/gptscript.test.ts | 16 ++++++++-------- 2 files changed, 27 insertions(+), 14 deletions(-) diff --git a/src/gptscript.ts b/src/gptscript.ts index e581d39..c9f32bf 100644 --- a/src/gptscript.ts +++ b/src/gptscript.ts @@ -415,7 +415,7 @@ export class GPTScript { return JSON.parse(result) as Dataset } - async addDatasetElement(workspaceID: string, datasetID: string, elementName: string, elementDescription: string, elementContent: string): Promise { + async addDatasetElement(workspaceID: string, datasetID: string, elementName: string, elementDescription: string, elementContent: ArrayBuffer): Promise { if (workspaceID == "") { workspaceID = process.env.GPTSCRIPT_WORKSPACE_ID ?? "" } @@ -425,7 +425,7 @@ export class GPTScript { datasetID, elementName: elementName, elementDescription: elementDescription, - elementContent: elementContent + elementContent: Buffer.from(elementContent).toString("base64") }), workspaceID: workspaceID, datasetToolRepo: this.opts.DatasetToolRepo ?? "", @@ -439,8 +439,16 @@ export class GPTScript { workspaceID = process.env.GPTSCRIPT_WORKSPACE_ID ?? "" } + const serializableElements = elements.map(e => { + return { + name: e.name, + description: e.description, + contents: Buffer.from(e.contents).toString("base64") + } + }) + return await this.runBasicCommand("datasets/add-elements", { - input: JSON.stringify({datasetID, elements}), + input: JSON.stringify({datasetID, elements: serializableElements}), workspaceID: workspaceID, datasetToolRepo: this.opts.DatasetToolRepo ?? "", env: this.opts.Env, @@ -452,7 +460,6 @@ export class GPTScript { workspaceID = process.env.GPTSCRIPT_WORKSPACE_ID ?? "" } - const result = await this.runBasicCommand("datasets/list-elements", { input: JSON.stringify({datasetID}), workspaceID: workspaceID, @@ -473,7 +480,13 @@ export class GPTScript { datasetToolRepo: this.opts.DatasetToolRepo ?? "", env: this.opts.Env }) - return JSON.parse(result) as DatasetElement + + const element = JSON.parse(result) + return { + name: element.name, + description: element.description, + contents: Buffer.from(element.contents, "base64") + } } async createWorkspace(providerType: string, ...fromWorkspaces: string[]): Promise { @@ -1309,7 +1322,7 @@ export interface DatasetElementMeta { export interface DatasetElement { name: string description: string - contents: string + contents: ArrayBuffer } export interface DatasetMeta { diff --git a/tests/gptscript.test.ts b/tests/gptscript.test.ts index eb8b0bf..fa3dec9 100644 --- a/tests/gptscript.test.ts +++ b/tests/gptscript.test.ts @@ -911,7 +911,7 @@ describe("gptscript module", () => { datasetID, "element1", "", - "this is element 1 contents" + Buffer.from("this is element 1 contents") ) expect(e1.name).toEqual("element1") expect(e1.description).toEqual("") @@ -921,7 +921,7 @@ describe("gptscript module", () => { datasetID, "element2", "a description", - "this is element 2 contents" + Buffer.from("this is element 2 contents") ) expect(e2.name).toEqual("element2") expect(e2.description).toEqual("a description") @@ -938,12 +938,12 @@ describe("gptscript module", () => { { name: "element3", description: "a description", - contents: "this is element 3 contents" + contents: Buffer.from("this is element 3 contents") }, { name: "element4", description: "a description", - contents: "this is element 4 contents" + contents: Buffer.from("this is element 4 contents") } ] ) @@ -956,22 +956,22 @@ describe("gptscript module", () => { const e1 = await g.getDatasetElement(workspaceID, datasetID, "element1") expect(e1.name).toEqual("element1") expect(e1.description).toBeUndefined() - expect(e1.contents).toEqual("this is element 1 contents") + expect(e1.contents).toEqual(Buffer.from("this is element 1 contents")) const e2 = await g.getDatasetElement(workspaceID, datasetID, "element2") expect(e2.name).toEqual("element2") expect(e2.description).toEqual("a description") - expect(e2.contents).toEqual("this is element 2 contents") + expect(e2.contents).toEqual(Buffer.from("this is element 2 contents")) const e3 = await g.getDatasetElement(workspaceID, datasetID, "element3") expect(e3.name).toEqual("element3") expect(e3.description).toEqual("a description") - expect(e3.contents).toEqual("this is element 3 contents") + expect(e3.contents).toEqual(Buffer.from("this is element 3 contents")) const e4 = await g.getDatasetElement(workspaceID, datasetID, "element4") expect(e4.name).toEqual("element4") expect(e4.description).toEqual("a description") - expect(e4.contents).toEqual("this is element 4 contents") + expect(e4.contents).toEqual(Buffer.from("this is element 4 contents")) } catch (e) { throw new Error("failed to get elements: " + e) } From 56135fadee9445c452f54547b72db864ebae40a9 Mon Sep 17 00:00:00 2001 From: Donnie Adams Date: Fri, 1 Nov 2024 20:23:26 -0400 Subject: [PATCH 10/18] fix: the parent call frame should be of "no" tool category Signed-off-by: Donnie Adams --- src/gptscript.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/gptscript.ts b/src/gptscript.ts index c9f32bf..406f702 100644 --- a/src/gptscript.ts +++ b/src/gptscript.ts @@ -960,7 +960,7 @@ export class Run { } } else if ((f.type as string).startsWith("call")) { f = f as CallFrame - if (!f.parentID && this.parentCallId === "") { + if (!f.parentID && this.parentCallId === "" && (f.toolCategory || ToolCategory.NoCategory) === ToolCategory.NoCategory) { this.parentCallId = f.id } this.calls[f.id] = f From d4b222c34e757b62429dc43ae9cc44aa3f6c9700 Mon Sep 17 00:00:00 2001 From: Grant Linville Date: Wed, 6 Nov 2024 17:07:34 -0500 Subject: [PATCH 11/18] chore: update for dataset rewrite (#102) Signed-off-by: Grant Linville --- src/gptscript.ts | 112 +++++++++++----------------------------- tests/gptscript.test.ts | 104 ++++++++++++++----------------------- 2 files changed, 67 insertions(+), 149 deletions(-) diff --git a/src/gptscript.ts b/src/gptscript.ts index 406f702..d241cb0 100644 --- a/src/gptscript.ts +++ b/src/gptscript.ts @@ -12,7 +12,7 @@ export interface GlobalOpts { BaseURL?: string DefaultModel?: string DefaultModelProvider?: string - DatasetToolRepo?: string + DatasetTool?: string WorkspaceTool?: string Env?: string[] } @@ -386,98 +386,51 @@ export class GPTScript { }) } - // Dataset methods - - async listDatasets(workspaceID: string): Promise> { - if (workspaceID == "") { - workspaceID = process.env.GPTSCRIPT_WORKSPACE_ID ?? "" - } - + // returns an array of dataset IDs + async listDatasets(): Promise> { const result = await this.runBasicCommand("datasets", { - workspaceID: workspaceID, - datasetToolRepo: this.opts.DatasetToolRepo ?? "", + input: "{}", + datasetTool: this.opts.DatasetTool ?? "", env: this.opts.Env }) return JSON.parse(result) as Array } - async createDataset(workspaceID: string, name: string, description: string): Promise { - if (workspaceID == "") { - workspaceID = process.env.GPTSCRIPT_WORKSPACE_ID ?? "" - } - - const result = await this.runBasicCommand("datasets/create", { - input: JSON.stringify({datasetName: name, datasetDescription: description}), - workspaceID: workspaceID, - datasetToolRepo: this.opts.DatasetToolRepo ?? "", - env: this.opts.Env - }) - return JSON.parse(result) as Dataset - } - - async addDatasetElement(workspaceID: string, datasetID: string, elementName: string, elementDescription: string, elementContent: ArrayBuffer): Promise { - if (workspaceID == "") { - workspaceID = process.env.GPTSCRIPT_WORKSPACE_ID ?? "" - } - - const result = await this.runBasicCommand("datasets/add-element", { - input: JSON.stringify({ - datasetID, - elementName: elementName, - elementDescription: elementDescription, - elementContent: Buffer.from(elementContent).toString("base64") - }), - workspaceID: workspaceID, - datasetToolRepo: this.opts.DatasetToolRepo ?? "", - env: this.opts.Env - }) - return JSON.parse(result) as DatasetElementMeta - } - - async addDatasetElements(workspaceID: string, datasetID: string, elements: Array) { - if (workspaceID === "") { - workspaceID = process.env.GPTSCRIPT_WORKSPACE_ID ?? "" - } - + async addDatasetElements(elements: Array, opts: {name?: string, description?: string, datasetID?: string}): Promise { const serializableElements = elements.map(e => { return { name: e.name, description: e.description, - contents: Buffer.from(e.contents).toString("base64") + contents: e.contents, + binaryContents: Buffer.from(e.binaryContents ?? Buffer.from("")).toString("base64") } }) return await this.runBasicCommand("datasets/add-elements", { - input: JSON.stringify({datasetID, elements: serializableElements}), - workspaceID: workspaceID, - datasetToolRepo: this.opts.DatasetToolRepo ?? "", - env: this.opts.Env, + input: JSON.stringify({ + name: opts.name ?? "", + description: opts.description ?? "", + datasetID: opts.datasetID ?? "", + elements: serializableElements + }), + datasetTool: this.opts.DatasetTool ?? "", + env: this.opts.Env }) } - async listDatasetElements(workspaceID: string, datasetID: string): Promise> { - if (workspaceID == "") { - workspaceID = process.env.GPTSCRIPT_WORKSPACE_ID ?? "" - } - + async listDatasetElements(datasetID: string): Promise> { const result = await this.runBasicCommand("datasets/list-elements", { input: JSON.stringify({datasetID}), - workspaceID: workspaceID, - datasetToolRepo: this.opts.DatasetToolRepo ?? "", + datasetTool: this.opts.DatasetTool ?? "", env: this.opts.Env }) return JSON.parse(result) as Array } - async getDatasetElement(workspaceID: string, datasetID: string, elementName: string): Promise { - if (workspaceID == "") { - workspaceID = process.env.GPTSCRIPT_WORKSPACE_ID ?? "" - } - + async getDatasetElement(datasetID: string, elementName: string): Promise { const result = await this.runBasicCommand("datasets/get-element", { - input: JSON.stringify({datasetID, element: elementName}), - workspaceID: workspaceID, - datasetToolRepo: this.opts.DatasetToolRepo ?? "", + input: JSON.stringify({datasetID, name: elementName}), + datasetTool: this.opts.DatasetTool ?? "", env: this.opts.Env }) @@ -485,7 +438,8 @@ export class GPTScript { return { name: element.name, description: element.description, - contents: Buffer.from(element.contents, "base64") + contents: element.contents, + binaryContents: Buffer.from(element.binaryContents ?? "", "base64") } } @@ -1312,28 +1266,20 @@ function jsonToCredential(cred: string): Credential { } } -// Dataset types - -export interface DatasetElementMeta { - name: string - description: string -} - -export interface DatasetElement { +export interface DatasetMeta { + id: string name: string description: string - contents: ArrayBuffer } -export interface DatasetMeta { - id: string +export interface DatasetElementMeta { name: string description: string } -export interface Dataset { - id: string +export interface DatasetElement { name: string description: string - elements: Record + contents?: string + binaryContents?: ArrayBuffer } diff --git a/tests/gptscript.test.ts b/tests/gptscript.test.ts index fa3dec9..077107c 100644 --- a/tests/gptscript.test.ts +++ b/tests/gptscript.test.ts @@ -887,112 +887,84 @@ describe("gptscript module", () => { }, 20000) test("dataset operations", async () => { - const datasetName = "test-" + randomBytes(10).toString("hex") - const workspaceID = await g.createWorkspace("directory") + process.env.GPTSCRIPT_WORKSPACE_ID = await g.createWorkspace("directory") + + const client = new gptscript.GPTScript({ + APIKey: process.env.OPENAI_API_KEY, + Env: Object.entries(process.env).map(([k, v]) => `${k}=${v}`) + }) + let datasetID: string - // Create + // Create and add two elements try { - const dataset = await g.createDataset(workspaceID, datasetName, "a test dataset") - expect(dataset).toBeDefined() - expect(dataset.name).toEqual(datasetName) - expect(dataset.description).toEqual("a test dataset") - expect(dataset.id.length).toBeGreaterThan(0) - expect(dataset.elements).toEqual({}) - datasetID = dataset.id + datasetID = await client.addDatasetElements([ + { + name: "element1", + description: "", + contents: "this is element 1 contents" + }, + { + name: "element2", + description: "a description", + binaryContents: Buffer.from("this is element 2 contents") + } + ], {name: "test-dataset", description: "a test dataset"}) } catch (e) { throw new Error("failed to create dataset: " + e) } - // Add elements - try { - const e1 = await g.addDatasetElement( - workspaceID, - datasetID, - "element1", - "", - Buffer.from("this is element 1 contents") - ) - expect(e1.name).toEqual("element1") - expect(e1.description).toEqual("") - - const e2 = await g.addDatasetElement( - workspaceID, - datasetID, - "element2", - "a description", - Buffer.from("this is element 2 contents") - ) - expect(e2.name).toEqual("element2") - expect(e2.description).toEqual("a description") - } catch (e) { - throw new Error("failed to add elements: " + e) - } - - // Add two elements at once. + // Add another element try { - await g.addDatasetElements( - workspaceID, - datasetID, - [ + await client.addDatasetElements([ { - name: "element3", - description: "a description", - contents: Buffer.from("this is element 3 contents") - }, - { - name: "element4", - description: "a description", - contents: Buffer.from("this is element 4 contents") + name: "element3", + description: "a description", + contents: "this is element 3 contents" } - ] - ) + ], {datasetID: datasetID}) } catch (e) { throw new Error("failed to add elements: " + e) } // Get elements try { - const e1 = await g.getDatasetElement(workspaceID, datasetID, "element1") + const e1 = await client.getDatasetElement(datasetID, "element1") expect(e1.name).toEqual("element1") expect(e1.description).toBeUndefined() - expect(e1.contents).toEqual(Buffer.from("this is element 1 contents")) + expect(e1.contents).toEqual("this is element 1 contents") - const e2 = await g.getDatasetElement(workspaceID, datasetID, "element2") + const e2 = await client.getDatasetElement(datasetID, "element2") expect(e2.name).toEqual("element2") expect(e2.description).toEqual("a description") - expect(e2.contents).toEqual(Buffer.from("this is element 2 contents")) + expect(e2.binaryContents).toEqual(Buffer.from("this is element 2 contents")) - const e3 = await g.getDatasetElement(workspaceID, datasetID, "element3") + const e3 = await client.getDatasetElement(datasetID, "element3") expect(e3.name).toEqual("element3") expect(e3.description).toEqual("a description") - expect(e3.contents).toEqual(Buffer.from("this is element 3 contents")) - - const e4 = await g.getDatasetElement(workspaceID, datasetID, "element4") - expect(e4.name).toEqual("element4") - expect(e4.description).toEqual("a description") - expect(e4.contents).toEqual(Buffer.from("this is element 4 contents")) + expect(e3.contents).toEqual("this is element 3 contents") } catch (e) { throw new Error("failed to get elements: " + e) } // List the elements in the dataset try { - const elements = await g.listDatasetElements(workspaceID, datasetID) - expect(elements.length).toEqual(4) + const elements = await client.listDatasetElements(datasetID) + expect(elements.length).toEqual(3) expect(elements.map(e => e.name)).toContain("element1") expect(elements.map(e => e.name)).toContain("element2") expect(elements.map(e => e.name)).toContain("element3") - expect(elements.map(e => e.name)).toContain("element4") } catch (e) { throw new Error("failed to list elements: " + e) } // List datasets try { - const datasets = await g.listDatasets(workspaceID) + const datasets = await client.listDatasets() expect(datasets.length).toBeGreaterThan(0) - expect(datasets.map(d => d.name)).toContain(datasetName) + expect(datasets[0].id).toEqual(datasetID) + expect(datasets[0].name).toEqual("test-dataset") + expect(datasets[0].description).toEqual("a test dataset") } catch (e) { throw new Error("failed to list datasets: " + e) } From 74e083d3ee93b62530007e03247e4c929de1eb56 Mon Sep 17 00:00:00 2001 From: Grant Linville Date: Mon, 16 Dec 2024 14:40:37 -0500 Subject: [PATCH 12/18] enhance: add functions for daemon tools to do mTLS (#103) Signed-off-by: Grant Linville --- src/gptscript.ts | 46 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/src/gptscript.ts b/src/gptscript.ts index d241cb0..bea6175 100644 --- a/src/gptscript.ts +++ b/src/gptscript.ts @@ -3,6 +3,7 @@ import path from "path" import child_process from "child_process" import {fileURLToPath} from "url" import {gunzipSync} from "zlib" +import https from "https" export interface GlobalOpts { URL?: string @@ -1283,3 +1284,48 @@ export interface DatasetElement { contents?: string binaryContents?: ArrayBuffer } + +// Functions for use in daemon tools: + +export function createServer(listener: http.RequestListener): https.Server { + const certB64 = process.env.CERT + const privateKeyB64 = process.env.PRIVATE_KEY + const gptscriptCertB64 = process.env.GPTSCRIPT_CERT + + if (!certB64) { + console.log('Missing CERT env var') + process.exit(1) + } else if (!privateKeyB64) { + console.log('Missing PRIVATE_KEY env var') + process.exit(1) + } else if (!gptscriptCertB64) { + console.log('Missing GPTSCRIPT_CERT env var') + process.exit(1) + } + + const cert = Buffer.from(certB64, 'base64').toString('utf-8') + const privateKey = Buffer.from(privateKeyB64, 'base64').toString('utf-8') + const gptscriptCert = Buffer.from(gptscriptCertB64, 'base64').toString('utf-8') + + const options = { + key: privateKey, + cert: cert, + ca: gptscriptCert, + requestCert: true, + rejectUnauthorized: true, + } + + return https.createServer(options, listener) +} + +export function startServer(server: https.Server) { + const port = process.env.PORT + if (!port) { + console.log('Missing PORT env var') + process.exit(1) + } + + server.listen(port, () => { + console.log(`Server listening on port ${port}`) + }) +} From 8596761bce32e4eb70596ce382a2e752158ded6d Mon Sep 17 00:00:00 2001 From: Grant Linville Date: Mon, 16 Dec 2024 15:24:54 -0500 Subject: [PATCH 13/18] fix: daemons: start server on localhost (#104) Signed-off-by: Grant Linville --- src/gptscript.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/gptscript.ts b/src/gptscript.ts index bea6175..baa8a15 100644 --- a/src/gptscript.ts +++ b/src/gptscript.ts @@ -1325,7 +1325,7 @@ export function startServer(server: https.Server) { process.exit(1) } - server.listen(port, () => { + server.listen(parseInt(port, 10), '127.0.0.1', () => { console.log(`Server listening on port ${port}`) }) } From 3248d6ec38ea43957efdb461309fd6c10afcedd9 Mon Sep 17 00:00:00 2001 From: Grant Linville Date: Thu, 19 Dec 2024 09:46:31 -0500 Subject: [PATCH 14/18] enhance: get more information about models (#105) Signed-off-by: Grant Linville --- src/gptscript.ts | 33 +++++++++++++++++++++++++++++++-- tests/gptscript.test.ts | 16 ++++++++-------- 2 files changed, 39 insertions(+), 10 deletions(-) diff --git a/src/gptscript.ts b/src/gptscript.ts index baa8a15..8b8fbf9 100644 --- a/src/gptscript.ts +++ b/src/gptscript.ts @@ -168,18 +168,19 @@ export class GPTScript { } } - listModels(providers?: string[], credentialOverrides?: string[]): Promise { + async listModels(providers?: string[], credentialOverrides?: string[]): Promise> { if (this.opts.DefaultModelProvider) { if (!providers) { providers = [] } providers.push(this.opts.DefaultModelProvider) } - return this.runBasicCommand("list-models", { + const result = await this.runBasicCommand("list-models", { "providers": providers, "env": this.opts.Env, "credentialOverrides": credentialOverrides }) + return await JSON.parse(result) as Array } version(): Promise { @@ -1229,6 +1230,34 @@ export type Credential = { refreshToken?: string | undefined } +// Types for OpenAI API-compatible models + +export type Permission = { + created: number, + id: string, + object: string, + allow_create_engine: boolean, + allow_sampling: boolean, + allow_logprobs: boolean, + allow_search_indices: boolean, + allow_view: boolean, + allow_fine_tuning: boolean, + organization: string, + group: any, + is_blocking: boolean, +} + +export type Model = { + created: number, + id: string, + object: string, + owned_by: string, + permission: Array, + root: string, + parent: string, + metadata: Record, +} + // for internal use only type cred = { context: string diff --git a/tests/gptscript.test.ts b/tests/gptscript.test.ts index 077107c..6cc0cfb 100644 --- a/tests/gptscript.test.ts +++ b/tests/gptscript.test.ts @@ -51,12 +51,12 @@ describe("gptscript module", () => { return } - let models = await g.listModels(["github.com/gptscript-ai/claude3-anthropic-provider"], ["github.com/gptscript-ai/claude3-anthropic-provider/credential:ANTHROPIC_API_KEY"]) + const models = await g.listModels(["github.com/gptscript-ai/claude3-anthropic-provider"], ["github.com/gptscript-ai/claude3-anthropic-provider/credential:ANTHROPIC_API_KEY"]) expect(models).toBeDefined() - for (let model of models.split("\n")) { + for (const model of models) { expect(model).toBeDefined() - expect(model.startsWith("claude-3-")).toBe(true) - expect(model.endsWith("from github.com/gptscript-ai/claude3-anthropic-provider")).toBe(true) + expect(model.id.startsWith("claude-3-")).toBe(true) + expect(model.id.endsWith("from github.com/gptscript-ai/claude3-anthropic-provider")).toBe(true) } }, 60000) @@ -67,12 +67,12 @@ describe("gptscript module", () => { const newg = new gptscript.GPTScript({DefaultModelProvider: "github.com/gptscript-ai/claude3-anthropic-provider"}) try { - let models = await newg.listModels(undefined, ["github.com/gptscript-ai/claude3-anthropic-provider/credential:ANTHROPIC_API_KEY"]) + const models = await newg.listModels(undefined, ["github.com/gptscript-ai/claude3-anthropic-provider/credential:ANTHROPIC_API_KEY"]) expect(models).toBeDefined() - for (let model of models.split("\n")) { + for (const model of models) { expect(model).toBeDefined() - expect(model.startsWith("claude-3-")).toBe(true) - expect(model.endsWith("from github.com/gptscript-ai/claude3-anthropic-provider")).toBe(true) + expect(model.id.startsWith("claude-3-")).toBe(true) + expect(model.id.endsWith("from github.com/gptscript-ai/claude3-anthropic-provider")).toBe(true) } } finally { newg.close() From 1f83238053487cb95ab705b941c31e2aa30ea4e1 Mon Sep 17 00:00:00 2001 From: Donnie Adams Date: Tue, 4 Feb 2025 12:20:51 -0500 Subject: [PATCH 15/18] chore: update prompt types for more granular configuration Signed-off-by: Donnie Adams --- src/gptscript.ts | 30 ++++++++++++++++++++---------- tests/gptscript.test.ts | 33 +++++++++++++++++---------------- 2 files changed, 37 insertions(+), 26 deletions(-) diff --git a/src/gptscript.ts b/src/gptscript.ts index 8b8fbf9..283169e 100644 --- a/src/gptscript.ts +++ b/src/gptscript.ts @@ -398,7 +398,11 @@ export class GPTScript { return JSON.parse(result) as Array } - async addDatasetElements(elements: Array, opts: {name?: string, description?: string, datasetID?: string}): Promise { + async addDatasetElements(elements: Array, opts: { + name?: string, + description?: string, + datasetID?: string + }): Promise { const serializableElements = elements.map(e => { return { name: e.name, @@ -1136,11 +1140,17 @@ export interface PromptFrame { type: RunEventType.Prompt time: string message: string - fields: string[] + fields: Field[] sensitive: boolean metadata: Record } +export interface Field { + name: string + description?: string + sensitive?: boolean +} + export type Frame = RunFrame | CallFrame | PromptFrame export interface AuthResponse { @@ -1322,19 +1332,19 @@ export function createServer(listener: http.RequestListener { + server.listen(parseInt(port, 10), "127.0.0.1", () => { console.log(`Server listening on port ${port}`) }) } diff --git a/tests/gptscript.test.ts b/tests/gptscript.test.ts index 6cc0cfb..83789e4 100644 --- a/tests/gptscript.test.ts +++ b/tests/gptscript.test.ts @@ -116,7 +116,7 @@ describe("gptscript module", () => { expect(run).toBeDefined() expect(await run.text()).toContain("Understood.") - }) + }, 10000) test("evaluate executes and streams a prompt correctly", async () => { let out = "" @@ -129,7 +129,7 @@ describe("gptscript module", () => { } const run = await g.evaluate(t, opts) - run.on(gptscript.RunEventType.CallProgress, (data: gptscript.CallFrame) => { + run.on(gptscript.RunEventType.CallFinish, data => { for (let output of data.output) out += `system: ${output.content}` }) @@ -210,10 +210,11 @@ describe("gptscript module", () => { } const run = await g.run(testGptPath, opts) - run.on(gptscript.RunEventType.CallProgress, data => { + run.on(gptscript.RunEventType.CallFinish, data => { for (let output of data.output) out += `system: ${output.content}` }) - await run.text() + + expect(await run.text()).toContain("Calvin Coolidge") err = run.err for (let c in run.calls) { @@ -231,7 +232,6 @@ describe("gptscript module", () => { test("run executes and streams a file with global tools correctly", async () => { let out = "" - let err = undefined const testGptPath = path.join(__dirname, "fixtures", "global-tools.gpt") const opts = { disableCache: true, @@ -239,15 +239,14 @@ describe("gptscript module", () => { } const run = await g.run(testGptPath, opts) - run.on(gptscript.RunEventType.CallProgress, data => { + run.on(gptscript.RunEventType.CallFinish, data => { for (let output of data.output) out += `system: ${output.content}` }) - await run.text() - err = run.err + expect(await run.text()).toContain("Hello!") + expect(run.err).toEqual("") expect(out).toContain("Hello!") - expect(err).toEqual("") - }, 30000) + }, 60000) test("aborting a run is reported correctly", async () => { let errMessage = "" @@ -627,7 +626,7 @@ describe("gptscript module", () => { expect(await run.text()).toContain("Lake Huron") expect(run.err).toEqual("") expect(run.state).toEqual(gptscript.RunState.Continue) - }, 10000) + }, 15000) test("nextChat on tool providing chat state", async () => { const t = { @@ -651,7 +650,7 @@ describe("gptscript module", () => { expect(await run.text()).toContain("Austin") expect(run.err).toEqual("") expect(run.state).toEqual(gptscript.RunState.Continue) - }, 10000) + }, 15000) test("confirm", async () => { const t = { @@ -702,11 +701,11 @@ describe("gptscript module", () => { run.on(gptscript.RunEventType.Prompt, async (data: gptscript.PromptFrame) => { expect(data.message).toContain("first name") expect(data.fields.length).toEqual(1) - expect(data.fields[0]).toEqual("first name") + expect(data.fields[0].name).toEqual("first name") expect(data.sensitive).toBeFalsy() promptFound = true - await g.promptResponse({id: data.id, responses: {[data.fields[0]]: "Clicky"}}) + await g.promptResponse({id: data.id, responses: {[data.fields[0].name]: "Clicky"}}) }) expect(await run.text()).toContain("Clicky") @@ -722,12 +721,12 @@ describe("gptscript module", () => { }) run.on(gptscript.RunEventType.Prompt, async (data: gptscript.PromptFrame) => { expect(data.fields.length).toEqual(1) - expect(data.fields[0]).toEqual("first name") + expect(data.fields[0].name).toEqual("first name") expect(data.metadata).toEqual({key: "value"}) expect(data.sensitive).toBeFalsy() promptFound = true - await g.promptResponse({id: data.id, responses: {[data.fields[0]]: "Clicky"}}) + await g.promptResponse({id: data.id, responses: {[data.fields[0].name]: "Clicky"}}) }) expect(await run.text()).toContain("Clicky") @@ -968,6 +967,8 @@ describe("gptscript module", () => { } catch (e) { throw new Error("failed to list datasets: " + e) } + + client.close() }, 60000) test("create and delete workspace", async () => { From 0aef7159bf6f0298c1da7d14f0e4350e2262fb3f Mon Sep 17 00:00:00 2001 From: Donnie Adams Date: Thu, 24 Apr 2025 16:24:36 -0400 Subject: [PATCH 16/18] chore: add credential check param field Signed-off-by: Donnie Adams --- src/gptscript.ts | 8 ++++++-- tests/gptscript.test.ts | 3 +++ 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/src/gptscript.ts b/src/gptscript.ts index 283169e..f139d17 100644 --- a/src/gptscript.ts +++ b/src/gptscript.ts @@ -1238,6 +1238,7 @@ export type Credential = { ephemeral: boolean expiresAt?: Date | undefined refreshToken?: string | undefined + checkParam?: string | undefined } // Types for OpenAI API-compatible models @@ -1277,6 +1278,7 @@ type cred = { ephemeral: boolean expiresAt: string | undefined refreshToken: string | undefined + checkParam: string | undefined } export function credentialToJSON(c: Credential): string { @@ -1289,7 +1291,8 @@ export function credentialToJSON(c: Credential): string { env: c.env, ephemeral: c.ephemeral, expiresAt: expiresAt, - refreshToken: c.refreshToken + refreshToken: c.refreshToken, + checkParam: c.checkParam } as cred) } @@ -1302,7 +1305,8 @@ function jsonToCredential(cred: string): Credential { env: c.env, ephemeral: c.ephemeral, expiresAt: c.expiresAt ? new Date(c.expiresAt) : undefined, - refreshToken: c.refreshToken + refreshToken: c.refreshToken, + checkParam: c.checkParam } } diff --git a/tests/gptscript.test.ts b/tests/gptscript.test.ts index 83789e4..ea2f153 100644 --- a/tests/gptscript.test.ts +++ b/tests/gptscript.test.ts @@ -843,6 +843,7 @@ describe("gptscript module", () => { ephemeral: false, expiresAt: new Date(Date.now() + 5000), // 5 seconds from now type: CredentialType.Tool, + checkParam: "my-check-param", }) } catch (e) { throw new Error("failed to create credential: " + e) @@ -856,6 +857,8 @@ describe("gptscript module", () => { const result = await g.revealCredential(["default"], name) expect(result.env["TEST"]).toEqual(value) expect(result.expiresAt!.valueOf()).toBeLessThan(new Date().valueOf()) + expect(result.type).toEqual(CredentialType.Tool) + expect(result.checkParam).toEqual("my-check-param") } catch (e) { throw new Error("failed to reveal credential: " + e) } From 880f3f22b3268b42f3a5254afa4cf63ae06e291c Mon Sep 17 00:00:00 2001 From: acorn-io-bot <105877126+acorn-io-bot@users.noreply.github.com> Date: Tue, 4 Nov 2025 20:16:10 +0000 Subject: [PATCH 17/18] Automated GPTScript Version Update --- package-lock.json | 4 ++-- package.json | 2 +- scripts/install-binary.js | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/package-lock.json b/package-lock.json index 74a8c5e..1370df7 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@gptscript-ai/gptscript", - "version": "v0.9.5", + "version": "v0.9.6", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@gptscript-ai/gptscript", - "version": "v0.9.5", + "version": "v0.9.6", "hasInstallScript": true, "license": "Apache-2.0", "dependencies": { diff --git a/package.json b/package.json index ccc8437..a29b264 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@gptscript-ai/gptscript", - "version": "v0.9.5", + "version": "v0.9.6", "description": "Run gptscript in node.js", "source": "src/gptscript.ts", "main": "dist/gptscript.js", diff --git a/scripts/install-binary.js b/scripts/install-binary.js index 9ed7bf1..0ad9151 100644 --- a/scripts/install-binary.js +++ b/scripts/install-binary.js @@ -72,7 +72,7 @@ if (process.platform === 'win32') { const gptscript_info = { name: "gptscript", url: "https://github.com/gptscript-ai/gptscript/releases/download/", - version: "v0.9.5" + version: "v0.9.6" } const pltfm = { From d72114b20913609a1b65f1734eb2ce013aa6c1e4 Mon Sep 17 00:00:00 2001 From: acorn-io-bot <105877126+acorn-io-bot@users.noreply.github.com> Date: Wed, 5 Nov 2025 17:24:12 +0000 Subject: [PATCH 18/18] Automated GPTScript Version Update --- package-lock.json | 4 ++-- package.json | 2 +- scripts/install-binary.js | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/package-lock.json b/package-lock.json index 1370df7..aac3a0c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@gptscript-ai/gptscript", - "version": "v0.9.6", + "version": "v0.9.7", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@gptscript-ai/gptscript", - "version": "v0.9.6", + "version": "v0.9.7", "hasInstallScript": true, "license": "Apache-2.0", "dependencies": { diff --git a/package.json b/package.json index a29b264..600db13 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@gptscript-ai/gptscript", - "version": "v0.9.6", + "version": "v0.9.7", "description": "Run gptscript in node.js", "source": "src/gptscript.ts", "main": "dist/gptscript.js", diff --git a/scripts/install-binary.js b/scripts/install-binary.js index 0ad9151..eacb302 100644 --- a/scripts/install-binary.js +++ b/scripts/install-binary.js @@ -72,7 +72,7 @@ if (process.platform === 'win32') { const gptscript_info = { name: "gptscript", url: "https://github.com/gptscript-ai/gptscript/releases/download/", - version: "v0.9.6" + version: "v0.9.7" } const pltfm = {