2023-12-20 09:36:55 +00:00
{
"name" : "ollama-coder" ,
"displayName" : "Ollama Coder" ,
"version" : "0.0.1" ,
"publisher" : "10nates" ,
"license" : "MIT" ,
"repository" : {
"type" : "git" ,
"url" : "https://github.com/10Nates/ollama-coder"
} ,
"engines" : {
"vscode" : "^1.73.0"
} ,
"categories" : [
"Other"
] ,
"activationEvents" : [
"*"
] ,
"main" : "./out/extension.js" ,
"contributes" : {
"configuration" : {
"title" : "Ollama Coder" ,
"properties" : {
"ollama-coder.endpoint" : {
"type" : "string" ,
"default" : "http://localhost:11434/api/generate" ,
"description" : "The endpoint of the ollama REST API"
} ,
"ollama-coder.model" : {
"type" : "string" ,
"default" : "deepseek-coder" ,
"description" : "The model to use for generating completions"
} ,
"ollama-coder.system-message" : {
2023-12-20 19:43:44 +00:00
"type" : "string" ,
"default" : "You are a code autocompletion engine. Respond with a continuation of the code provided and nothing else. Code should not be in a code block. Anything that is not code should be written as a code comment." ,
"description" : "The system message to use for code completions. Type DEFAULT for Makefile."
} ,
"ollama-coder.max-tokens-predicted" : {
"type" : "integer" ,
"default" : 500 ,
2023-12-20 22:04:29 +00:00
"description" : "The maximum number of tokens generated by the model."
} ,
"ollama-coder.prompt-window-size" : {
"type" : "integer" ,
"default" : 2000 ,
"description" : "The size of the prompt in characters. NOT tokens, so can be set about 1.5-2x the max tokens of the model (varies)."
2023-12-20 09:36:55 +00:00
}
}
}
} ,
"scripts" : {
"vscode:prepublish" : "npm run compile" ,
"compile" : "tsc -p ./" ,
"lint" : "eslint \"src/**/*.ts\"" ,
"watch" : "tsc -watch -p ./"
} ,
"devDependencies" : {
"@types/node" : "^16.18.34" ,
"@types/vscode" : "^1.73.0" ,
"@typescript-eslint/eslint-plugin" : "^6.7.0" ,
"@typescript-eslint/parser" : "^6.7.0" ,
"eslint" : "^8.26.0" ,
"typescript" : "^5.3.2"
} ,
"dependencies" : {
"axios" : "^1.6.2"
}
}