From 58b884ae673d3b8368826f08ece2566f261d6ecd Mon Sep 17 00:00:00 2001 From: Bryan Kyle Date: Thu, 22 Feb 2024 12:55:29 -0800 Subject: [PATCH] commands to allow pause, resume, and toggle inferencing --- package.json | 18 +++++++++++++++++ src/extension.ts | 17 +++++++++++++--- src/prompts/provider.ts | 44 +++++++++++++++++++++++++++++++++++++---- 3 files changed, 72 insertions(+), 7 deletions(-) diff --git a/package.json b/package.json index bc30b49..bcc5c48 100644 --- a/package.json +++ b/package.json @@ -33,6 +33,24 @@ "extensionKind": ["ui"], "main": "./out/extension.js", "contributes": { + "commands": [ + { + "command": "llama.openSettings", + "title": "Llama Coder: Open Settings" + }, + { + "command": "llama.pause", + "title": "Llama Coder: Pause" + }, + { + "command": "llama.resume", + "title": "Llama Coder: Resume" + }, + { + "command": "llama.toggle", + "title": "Llama Coder: Toggle" + } + ], "configuration": [ { "title": "Llama coder", diff --git a/src/extension.ts b/src/extension.ts index 6571022..3f69875 100644 --- a/src/extension.ts +++ b/src/extension.ts @@ -9,12 +9,12 @@ export function activate(context: vscode.ExtensionContext) { info('Llama Coder is activated.'); // Create status bar - const openSettings = 'llama.openSettings'; - context.subscriptions.push(vscode.commands.registerCommand(openSettings, () => { + context.subscriptions.push(vscode.commands.registerCommand('llama.openSettings', () => { vscode.commands.executeCommand('workbench.action.openSettings', '@ext:ex3ndr.llama-coder'); })); + let statusBarItem = vscode.window.createStatusBarItem(vscode.StatusBarAlignment.Right, 100); - statusBarItem.command = openSettings; + statusBarItem.command = 'llama.toggle'; statusBarItem.text = `$(chip) Llama Coder`; statusBarItem.show(); context.subscriptions.push(statusBarItem); @@ -23,6 +23,17 @@ export function activate(context: vscode.ExtensionContext) { const provider = new PromptProvider(statusBarItem, context); let disposable = vscode.languages.registerInlineCompletionItemProvider({ pattern: '**', }, provider); context.subscriptions.push(disposable); + + context.subscriptions.push(vscode.commands.registerCommand('llama.pause', () => { + provider.paused = true; + })); + context.subscriptions.push(vscode.commands.registerCommand('llama.resume', () => { + provider.paused = false; + })); + context.subscriptions.push(vscode.commands.registerCommand('llama.toggle', () => { + provider.paused = !provider.paused; + })); + } export function deactivate() { diff --git a/src/prompts/provider.ts b/src/prompts/provider.ts index 2c78daf..7820b04 100644 --- a/src/prompts/provider.ts +++ b/src/prompts/provider.ts @@ -9,16 +9,49 @@ import { ollamaCheckModel } from '../modules/ollamaCheckModel'; import { ollamaDownloadModel } from '../modules/ollamaDownloadModel'; import { config } from '../config'; +type Status = { + icon: string; + text: string; +}; + export class PromptProvider implements vscode.InlineCompletionItemProvider { lock = new AsyncLock(); statusbar: vscode.StatusBarItem; context: vscode.ExtensionContext; + private _paused: boolean = false; + private _status: Status = { icon: "chip", text: "Llama Coder" }; constructor(statusbar: vscode.StatusBarItem, context: vscode.ExtensionContext) { this.statusbar = statusbar; this.context = context; } + + public set paused(value: boolean) { + this._paused = value; + this.update(); + } + + public get paused(): boolean { + return this._paused; + } + + private update(icon?: string, text?: string): void { + this._status.icon = icon ? icon : this._status.icon; + this._status.text = text ? text : this._status.text; + + let statusText = ''; + let statusTooltip = ''; + if (this._paused) { + statusText = `$(sync-ignored) ${this._status.text}`; + statusTooltip = `${this._status.text} (Paused)`; + } else { + statusText = `$(${this._status.icon}) ${this._status.text}`; + statusTooltip = `${this._status.text}`; + } + this.statusbar.text = statusText; + this.statusbar.tooltip = statusTooltip; + } async delayCompletion(delay: number, token: vscode.CancellationToken): Promise { if (config.inference.delay < 0) { @@ -37,6 +70,9 @@ export class PromptProvider implements vscode.InlineCompletionItemProvider { } try { + if (this.paused) { + return; + } // Ignore unsupported documents if (!isSupported(document)) { @@ -82,7 +118,7 @@ export class PromptProvider implements vscode.InlineCompletionItemProvider { let inferenceConfig = config.inference; // Update status - this.statusbar.text = `$(sync~spin) Llama Coder`; + this.update('sync~spin', 'Llama Coder'); try { // Check model exists @@ -110,9 +146,9 @@ export class PromptProvider implements vscode.InlineCompletionItemProvider { } // Perform download - this.statusbar.text = `$(sync~spin) Downloading`; + this.update('sync~spin', 'Downloading'); await ollamaDownloadModel(inferenceConfig.endpoint, inferenceConfig.modelName); - this.statusbar.text = `$(sync~spin) Llama Coder`; + this.update('sync~spin', 'Llama Coder') } if (token.isCancellationRequested) { info(`Canceled after AI completion.`); @@ -141,7 +177,7 @@ export class PromptProvider implements vscode.InlineCompletionItemProvider { value: res }); } finally { - this.statusbar.text = `$(chip) Llama Coder`; + this.update('chip', 'Llama Coder'); } } else { if (cached !== null) {