From fd0f553738b34d358e976fa117f94e0fbc29085f Mon Sep 17 00:00:00 2001 From: Nathan Hedge <23344786+10Nates@users.noreply.github.com> Date: Wed, 20 Dec 2023 16:06:47 -0600 Subject: [PATCH] moved command function outside of activate (cleanup) --- src/extension.ts | 181 ++++++++++++++++++++++++----------------------- 1 file changed, 92 insertions(+), 89 deletions(-) diff --git a/src/extension.ts b/src/extension.ts index 3a64a47..79a3d9e 100644 --- a/src/extension.ts +++ b/src/extension.ts @@ -11,6 +11,97 @@ if (apiSystemMessage == "DEFAULT") apiSystemMessage = undefined; const numPredict: number = VSConfig.get("max-tokens-predicted") || 500; const promptWindowSize: number = VSConfig.get("prompt-window-size") || 2000; +// Function called on ollama-coder.autocomplete +async function autocompleteCommand(document: vscode.TextDocument, position: vscode.Position, prompt: string, cancellationToken: vscode.CancellationToken) { + // Show a progress message + vscode.window.withProgress( + { + location: vscode.ProgressLocation.Notification, + title: "Getting a completion from Ollama...", + cancellable: true, + }, + async (progress, progressCancellationToken) => { + try { + // Make a request to the ollama.ai REST API + const response = await axios.post(apiEndpoint, { + model: apiModel, // Change this to the model you want to use + prompt: prompt, + stream: true, + system: apiSystemMessage, + options: { + num_predict: numPredict + }, + }, { + cancelToken: new axios.CancelToken((c) => { + const cancelPost = function () { + c("Autocompletion request terminated"); + }; + cancellationToken.onCancellationRequested(cancelPost); + progressCancellationToken.onCancellationRequested(cancelPost); + vscode.workspace.onDidCloseTextDocument(cancelPost); + }), + responseType: 'stream' + } + ); + + //tracker + let currentPosition = position; + + response.data.on('data', async (d: Uint8Array) => { + // Get a completion from the response + const completion: string = JSON.parse(d.toString()).response; + + //complete edit for token + const edit = new vscode.WorkspaceEdit(); + const range = new vscode.Range( + currentPosition.line, + currentPosition.character, + currentPosition.line, + currentPosition.character + ); + edit.replace(document.uri, range, completion); + await vscode.workspace.applyEdit(edit); + + // Move the cursor to the end of the completion + const completionLines = completion.split("\n"); + const newPosition = position.with( + currentPosition.line + completionLines.length, + (completionLines.length > 0 ? 0 : currentPosition.character) + completionLines[completionLines.length - 1].length + ); + const newSelection = new vscode.Selection( + newPosition, + newPosition + ); + currentPosition = newPosition; + + // completion bar + progress.report({ increment: 1 / (numPredict/100) }); + + // move cursor + const editor = vscode.window.activeTextEditor; + if (editor) editor.selection = newSelection; + }); + + // Keep cancel window available + const finished = new Promise((resolve) => { + response.data.on('end', () => { + progress.report({ message: "Ollama completion finished." }); + resolve(true); + }); + }); + + await finished; + + } catch (err: any) { + // Show an error message + vscode.window.showErrorMessage( + "Ollama encountered an error: " + err.message + ); + } + } + ); +} + // This method is called when your extension is activated function activate(context: vscode.ExtensionContext) { // Register a completion provider for JavaScript files @@ -47,95 +138,7 @@ function activate(context: vscode.ExtensionContext) { // Register a command for getting a completion from Ollama const disposable = vscode.commands.registerCommand( "ollama-coder.autocomplete", - async function (document: vscode.TextDocument, position: vscode.Position, prompt: string, cancellationToken: vscode.CancellationToken) { - // Show a progress message - vscode.window.withProgress( - { - location: vscode.ProgressLocation.Notification, - title: "Getting a completion from Ollama...", - cancellable: true, - }, - async (progress, progressCancellationToken) => { - try { - // Make a request to the ollama.ai REST API - const response = await axios.post(apiEndpoint, { - model: apiModel, // Change this to the model you want to use - prompt: prompt, - stream: true, - system: apiSystemMessage, - options: { - num_predict: numPredict - }, - }, { - cancelToken: new axios.CancelToken((c) => { - const cancelPost = function () { - c("Autocompletion request terminated"); - }; - cancellationToken.onCancellationRequested(cancelPost); - progressCancellationToken.onCancellationRequested(cancelPost); - vscode.workspace.onDidCloseTextDocument(cancelPost); - }), - responseType: 'stream' - } - ); - - //tracker - let currentPosition = position; - - response.data.on('data', async (d: Uint8Array) => { - // Get a completion from the response - const completion: string = JSON.parse(d.toString()).response; - - //complete edit for token - const edit = new vscode.WorkspaceEdit(); - const range = new vscode.Range( - currentPosition.line, - currentPosition.character, - currentPosition.line, - currentPosition.character - ); - edit.replace(document.uri, range, completion); - await vscode.workspace.applyEdit(edit); - - // Move the cursor to the end of the completion - const completionLines = completion.split("\n"); - const newPosition = position.with( - currentPosition.line + completionLines.length, - (completionLines.length > 0 ? 0 : currentPosition.character) + completionLines[completionLines.length - 1].length - ); - const newSelection = new vscode.Selection( - newPosition, - newPosition - ); - currentPosition = newPosition; - - // completion bar - progress.report({ increment: 1 / (numPredict/100) }); - - // move cursor - const editor = vscode.window.activeTextEditor; - if (editor) editor.selection = newSelection; - }); - - // Keep cancel window available - const finished = new Promise((resolve) => { - response.data.on('end', () => { - progress.report({ message: "Ollama completion finished." }); - resolve(true); - }); - }); - - await finished; - - } catch (err: any) { - // Show an error message - vscode.window.showErrorMessage( - "Ollama encountered an error: " + err.message - ); - } - } - ); - } + autocompleteCommand ); // Add the command to the context