2023-12-20 09:36:55 +00:00
{
2023-12-21 00:27:42 +00:00
"name" : "ollama-autocoder" ,
2023-12-21 02:23:33 +00:00
"displayName" : "Ollama Autocoder" ,
2023-12-21 00:30:34 +00:00
"description" : "A simple to use Ollama autocompletion engine with options exposed and streaming functionality" ,
2023-12-21 06:43:22 +00:00
"version" : "0.0.3" ,
2023-12-21 00:27:42 +00:00
"icon" : "icon.png" ,
2023-12-20 09:36:55 +00:00
"publisher" : "10nates" ,
"license" : "MIT" ,
2023-12-22 07:52:29 +00:00
"bugs" : {
"url" : "https://github.com/10Nates/ollama-autocoder/issues"
} ,
"sponsor" : {
"url" : "https://ko-fi.com/natehedge"
} ,
2023-12-20 09:36:55 +00:00
"repository" : {
"type" : "git" ,
2023-12-21 00:27:42 +00:00
"url" : "https://github.com/10Nates/ollama-autocoder"
2023-12-20 09:36:55 +00:00
} ,
"engines" : {
"vscode" : "^1.73.0"
} ,
"categories" : [
2023-12-21 00:27:42 +00:00
"Machine Learning" ,
"Snippets" ,
"Programming Languages"
] ,
"keywords" : [
"llama" ,
"ollama" ,
"gpt" ,
"coding" ,
"autocomplete" ,
"open source" ,
"assistant" ,
"ai" ,
"llm"
2023-12-20 09:36:55 +00:00
] ,
"activationEvents" : [
2023-12-21 00:27:42 +00:00
"onStartupFinished"
2023-12-20 09:36:55 +00:00
] ,
"main" : "./out/extension.js" ,
"contributes" : {
"configuration" : {
2023-12-21 00:27:42 +00:00
"title" : "Ollama Autocoder" ,
"properties" : {
"ollama-autocoder.endpoint" : {
"type" : "string" ,
"default" : "http://localhost:11434/api/generate" ,
"description" : "The endpoint of the ollama REST API"
} ,
"ollama-autocoder.model" : {
"type" : "string" ,
"default" : "openhermes2.5-mistral:7b-q4_K_M" ,
"description" : "The model to use for generating completions"
} ,
2023-12-21 06:54:03 +00:00
"ollama-autocoder.raw input" : {
2023-12-21 00:27:42 +00:00
"type" : "boolean" ,
"default" : false ,
2023-12-21 02:22:39 +00:00
"description" : "Prompt the model without formatting. Disables system message. Turn this on if you are having trouble with a model falling out of coding mode."
2023-12-21 00:27:42 +00:00
} ,
2023-12-21 06:54:03 +00:00
"ollama-autocoder.system message" : {
2023-12-21 00:27:42 +00:00
"type" : "string" ,
"default" : "You are a code autocompletion engine. Respond with a continuation of the code provided and nothing else. Code should not be in a code block. Anything that is not code should be written as a code comment." ,
"description" : "The system message to use for code completions. Type DEFAULT for Makefile."
} ,
2023-12-21 06:54:03 +00:00
"ollama-autocoder.max tokens predicted" : {
2023-12-21 00:27:42 +00:00
"type" : "integer" ,
"default" : 500 ,
"description" : "The maximum number of tokens generated by the model."
} ,
2023-12-21 06:54:03 +00:00
"ollama-autocoder.prompt window size" : {
2023-12-21 00:27:42 +00:00
"type" : "integer" ,
"default" : 2000 ,
"description" : "The size of the prompt in characters. NOT tokens, so can be set about 1.5-2x the max tokens of the model (varies)."
}
}
2023-12-21 06:33:07 +00:00
} ,
"commands" : [
{
"command" : "ollama-autocoder.autocomplete" ,
"title" : "Autocomplete with Ollama"
}
]
2023-12-21 00:27:42 +00:00
} ,
2023-12-20 09:36:55 +00:00
"scripts" : {
"vscode:prepublish" : "npm run compile" ,
"compile" : "tsc -p ./" ,
"lint" : "eslint \"src/**/*.ts\"" ,
"watch" : "tsc -watch -p ./"
} ,
"devDependencies" : {
"@types/node" : "^16.18.34" ,
"@types/vscode" : "^1.73.0" ,
"@typescript-eslint/eslint-plugin" : "^6.7.0" ,
"@typescript-eslint/parser" : "^6.7.0" ,
"eslint" : "^8.26.0" ,
"typescript" : "^5.3.2"
} ,
"dependencies" : {
"axios" : "^1.6.2"
}
2023-12-21 00:27:42 +00:00
}