removed the cursor thing because it was silly, added toggle for ask on space, exposed autocomplete command to command pallete

This commit is contained in:
Nathan Hedge 2023-12-21 00:33:07 -06:00
parent b095396220
commit ee97715d9b
No known key found for this signature in database
GPG Key ID: 1ADBA36D6E304C5C
3 changed files with 58 additions and 45 deletions

View File

@ -10,7 +10,8 @@ A simple to use Ollama autocompletion engine with options exposed and streaming
## How to Use ## How to Use
1. In a text document, press space or go to a new line. The option `Autocomplete with Ollama` will appear. Press enter to start generation. 1. In a text document, press space. The option `Autocomplete with Ollama` will appear. Press `enter` to start generation.
- Alternatively, you can run the `Autocomplete with Ollama` command from the command pallete (or set a keybind).
2. After startup, the tokens will be streamed to your cursor. 2. After startup, the tokens will be streamed to your cursor.
3. To stop the generation early, press the "Cancel" button on the "Ollama Autocoder" notification 3. To stop the generation early, press the "Cancel" button on the "Ollama Autocoder" notification
4. Once generation stops, the notification will disappear. 4. Once generation stops, the notification will disappear.

View File

@ -67,13 +67,19 @@
"default": 2000, "default": 2000,
"description": "The size of the prompt in characters. NOT tokens, so can be set about 1.5-2x the max tokens of the model (varies)." "description": "The size of the prompt in characters. NOT tokens, so can be set about 1.5-2x the max tokens of the model (varies)."
}, },
"ollama-autocoder.cursor-follows": { "ollama-autocoder.ask-on-space": {
"type": "boolean", "type": "boolean",
"default": true, "default": true,
"description": "The user's cursor will follow along with the generation. Disabling this can cause unintended effects when typing above/in front of the generation point, but could boost user productivity." "description": "Have the option `Autocomplete with Ollama` appear when you press space. **RELOAD REQUIRED**"
} }
} }
} },
"commands": [
{
"command": "ollama-autocoder.autocomplete",
"title": "Autocomplete with Ollama"
}
]
}, },
"scripts": { "scripts": {
"vscode:prepublish": "npm run compile", "vscode:prepublish": "npm run compile",

View File

@ -9,18 +9,18 @@ let apiModel: string;
let apiSystemMessage: string | undefined; let apiSystemMessage: string | undefined;
let numPredict: number; let numPredict: number;
let promptWindowSize: number; let promptWindowSize: number;
let rawInput: boolean; let rawInput: boolean | undefined;
let cursorFollows: boolean | undefined; let askOnSpace: boolean | undefined;
function updateVSConfig() { function updateVSConfig() {
VSConfig = vscode.workspace.getConfiguration("ollama-autocoder"); VSConfig = vscode.workspace.getConfiguration("ollama-autocoder");
apiEndpoint = VSConfig.get("apiEndpoint") || "http://localhost:11434/api/generate"; apiEndpoint = VSConfig.get("apiEndpoint") || "http://localhost:11434/api/generate";
apiModel = VSConfig.get("model") || "openhermes2.5-mistral:7b-q4_K_M"; apiModel = VSConfig.get("model") || "openhermes2.5-mistral:7b-q4_K_M"; // The model I tested with
apiSystemMessage = VSConfig.get("system-message"); apiSystemMessage = VSConfig.get("system-message");
numPredict = VSConfig.get("max-tokens-predicted") || 500; numPredict = VSConfig.get("max-tokens-predicted") || 500;
promptWindowSize = VSConfig.get("prompt-window-size") || 2000; promptWindowSize = VSConfig.get("prompt-window-size") || 2000;
rawInput = VSConfig.get("raw-input") || false; rawInput = VSConfig.get("raw-input");
cursorFollows = VSConfig.get("cursor-follows"); askOnSpace = VSConfig.get("ask-on-space"); // not actually changeable, requires reload
if (apiSystemMessage == "DEFAULT" || rawInput) apiSystemMessage = undefined; if (apiSystemMessage == "DEFAULT" || rawInput) apiSystemMessage = undefined;
} }
@ -30,8 +30,12 @@ updateVSConfig();
// No need for restart for any of these settings // No need for restart for any of these settings
vscode.workspace.onDidChangeConfiguration(updateVSConfig); vscode.workspace.onDidChangeConfiguration(updateVSConfig);
// Function called on ollama-autocoder.autocomplete // internal function for autocomplete, not directly exposed
async function autocompleteCommand(document: vscode.TextDocument, position: vscode.Position, prompt: string, cancellationToken: vscode.CancellationToken) { async function autocompleteCommand(document: vscode.TextDocument, position: vscode.Position, cancellationToken?: vscode.CancellationToken) {
// Get the current prompt
let prompt = document.getText(new vscode.Range(document.lineAt(0).range.start, position));
prompt = prompt.substring(Math.max(0, prompt.length - promptWindowSize), prompt.length);
// Show a progress message // Show a progress message
vscode.window.withProgress( vscode.window.withProgress(
{ {
@ -58,7 +62,7 @@ async function autocompleteCommand(document: vscode.TextDocument, position: vsco
const cancelPost = function () { const cancelPost = function () {
c("Autocompletion request terminated"); c("Autocompletion request terminated");
}; };
cancellationToken.onCancellationRequested(cancelPost); if (cancellationToken) cancellationToken.onCancellationRequested(cancelPost);
progressCancellationToken.onCancellationRequested(cancelPost); progressCancellationToken.onCancellationRequested(cancelPost);
vscode.workspace.onDidCloseTextDocument(cancelPost); vscode.workspace.onDidCloseTextDocument(cancelPost);
}), }),
@ -100,10 +104,8 @@ async function autocompleteCommand(document: vscode.TextDocument, position: vsco
progress.report({ message: "Generating...", increment: 1 / (numPredict / 100) }); progress.report({ message: "Generating...", increment: 1 / (numPredict / 100) });
// move cursor // move cursor
if (cursorFollows) { const editor = vscode.window.activeTextEditor;
const editor = vscode.window.activeTextEditor; if (editor) editor.selection = newSelection;
if (editor) editor.selection = newSelection;
}
}); });
// Keep cancel window available // Keep cancel window available
@ -130,44 +132,48 @@ async function autocompleteCommand(document: vscode.TextDocument, position: vsco
// This method is called when extension is activated // This method is called when extension is activated
function activate(context: vscode.ExtensionContext) { function activate(context: vscode.ExtensionContext) {
// Register a completion provider for JavaScript files // Register a completion provider for JavaScript files
const provider = vscode.languages.registerCompletionItemProvider("*", { const completionProvider = vscode.languages.registerCompletionItemProvider("*", {
async provideCompletionItems(document, position, cancellationToken) { async provideCompletionItems(document, position, cancellationToken) {
// Get the current prompt // Create a completion item
let prompt = document.getText(new vscode.Range(document.lineAt(0).range.start, position)); const item = new vscode.CompletionItem("Autocomplete with Ollama");
prompt = prompt.substring(Math.max(0, prompt.length - promptWindowSize), prompt.length); // Set the insert text to a placeholder
// Check if the prompt is not empty and ends with a dot item.insertText = new vscode.SnippetString('${1:}');
if (prompt) { // Set the documentation to a message
// Create a completion item item.documentation = new vscode.MarkdownString('Press `Enter` to get a completion from Ollama');
const item = new vscode.CompletionItem("Autocomplete with Ollama"); // Set the command to trigger the completion
// Set the insert text to a placeholder item.command = {
item.insertText = new vscode.SnippetString('${1:}'); command: 'ollama-autocoder.autocomplete-internal',
// Set the documentation to a message title: 'Ollama',
item.documentation = new vscode.MarkdownString('Press `Enter` to get a completion from Ollama'); arguments: [document, position, cancellationToken]
// Set the command to trigger the completion };
item.command = { // Return the completion item
command: 'ollama-autocoder.autocomplete', return [item];
title: 'Ollama',
arguments: [document, position, prompt, cancellationToken]
};
// Return the completion item
return [item];
}
}, },
}, },
"\n", " " " "
); );
// Add the completion provider to the context
context.subscriptions.push(provider);
// Register a command for getting a completion from Ollama // Register a command for getting a completion from Ollama
const disposable = vscode.commands.registerCommand( const internalAutocompleteCommand = vscode.commands.registerCommand(
"ollama-autocoder.autocomplete", "ollama-autocoder.autocomplete-internal",
autocompleteCommand autocompleteCommand
); );
// Add the command to the context // Register a command for getting a completion from Ollama through command/keybind
context.subscriptions.push(disposable); const externalAutocompleteCommand = vscode.commands.registerTextEditorCommand(
"ollama-autocoder.autocomplete",
(textEditor) => {
// no cancellation token from here
autocompleteCommand(textEditor.document, textEditor.selection.active);
}
);
// Add the commands to the context
// Add the completion provider to the context
if (askOnSpace) context.subscriptions.push(completionProvider);
context.subscriptions.push(internalAutocompleteCommand);
context.subscriptions.push(externalAutocompleteCommand);
} }
// This method is called when extension is deactivated // This method is called when extension is deactivated