Skip to content

Commit a00523c

Browse files
authored
feat(completion): move to codegemmy completion (#32)
1 parent dce7758 commit a00523c

File tree

5 files changed

+25
-28
lines changed

5 files changed

+25
-28
lines changed

package.json

+4-8
Original file line numberDiff line numberDiff line change
@@ -113,13 +113,11 @@
113113
"markdownDescription": "Select a model for the automatic mode based on your system specifications.",
114114
"enum": [
115115
"base-small",
116-
"base-medium",
117-
"base-large"
116+
"base-medium"
118117
],
119118
"enumDescriptions": [
120119
"Use if you have only CPU.",
121-
"Use if you have standard GPU.",
122-
"Use if you have enterprise GPU."
120+
"Use if you have standard GPU."
123121
]
124122
},
125123
"firecoder.completion.manuallyMode": {
@@ -128,13 +126,11 @@
128126
"markdownDescription": "Select a model for the manually mode based on your system specifications.",
129127
"enum": [
130128
"base-small",
131-
"base-medium",
132-
"base-large"
129+
"base-medium"
133130
],
134131
"enumDescriptions": [
135132
"Use if you have only CPU.",
136-
"Use if you have standard GPU.",
137-
"Use if you have enterprise GPU."
133+
"Use if you have standard GPU."
138134
]
139135
},
140136
"firecoder.experimental.useopentabs": {

src/common/completion/index.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -82,11 +82,11 @@ export const getInlineCompletionProvider = (
8282
const parameters = triggerAuto
8383
? {
8484
n_predict: 128,
85-
stop: ["\n"],
85+
stop: ["\n", "<|file_separator|>"],
8686
}
8787
: {
8888
n_predict: 512,
89-
stop: [],
89+
stop: ["<|file_separator|>"],
9090
temperature: 0.5,
9191
};
9292

src/common/download/index.ts

+5-10
Original file line numberDiff line numberDiff line change
@@ -155,19 +155,14 @@ const getModelInfo = async (
155155
): Promise<ResourceInfo | null> => {
156156
const models: Record<TypeModel, { url: string; checksum: string }> = {
157157
"base-small": {
158-
url: "https://huggingface.co/TheBloke/deepseek-coder-1.3b-base-GGUF/resolve/main/deepseek-coder-1.3b-base.Q8_0.gguf",
158+
url: "https://huggingface.co/lmstudio-community/codegemma-2b-GGUF/resolve/main/codegemma-2b-Q5_K_M.gguf",
159159
checksum:
160-
"9fcdcb283ef5b1d80ec7365b307c1ceab0c0f8ea079b49969f7febc06a11bccd",
160+
"95f06d59cbf697da2fe9aa00b019c4f2e464718a956352ccbbb1cb436b98a2a7",
161161
},
162162
"base-medium": {
163-
url: "https://huggingface.co/TheBloke/deepseek-coder-6.7B-base-GGUF/resolve/main/deepseek-coder-6.7b-base.Q8_0.gguf",
163+
url: "https://huggingface.co/lmstudio-community/codegemma-7b-GGUF/resolve/main/codegemma-7b-Q5_K_M.gguf",
164164
checksum:
165-
"a2f82242ac5e465037cbf1ed754f04f0be044ee196e1589905f9e4dcd0e6559d",
166-
},
167-
"base-large": {
168-
url: "https://huggingface.co/TheBloke/deepseek-coder-33B-base-GGUF/resolve/main/deepseek-coder-33b-base.Q8_0.gguf",
169-
checksum:
170-
"9b9210b7de8c26d94773146613ee86844a714aae997223355bb520927627feff",
165+
"eb00372705e7d5d30442750e8a7c72919c8e243bee52e1cce97fcfc1008c6143",
171166
},
172167
"chat-small": {
173168
url: "https://huggingface.co/TheBloke/deepseek-coder-1.3b-instruct-GGUF/resolve/main/deepseek-coder-1.3b-instruct.Q8_0.gguf",
@@ -263,7 +258,7 @@ export const downloadModel = async (typeModel: TypeModel) => {
263258
const modelFileInfo = await getModelInfo(typeModel);
264259

265260
if (modelFileInfo === null) {
266-
throw new Error("Server file info not found");
261+
throw new Error("Model file info not found");
267262
}
268263

269264
Logger.info(`Got model ${typeModel} file info`, {

src/common/prompt/promptCompletion.ts

+14-4
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ const processingDocumentWithPosition = async ({
9696
tokenDifference <= maxDifference
9797
) {
9898
return {
99-
documentText: `${textBeforeSlice}<|fim▁hole|>${textAfterSlice}`,
99+
documentText: `${textBeforeSlice}<|fim_suffix|>${textAfterSlice}`,
100100
documentTokens: tokens,
101101
};
102102
}
@@ -244,17 +244,27 @@ export const getPromptCompletion = async ({
244244
);
245245

246246
additionalDocumentsText +=
247-
"\n" + getRelativePath(document.uri) + "\n" + documentText;
247+
"/" +
248+
getRelativePath(document.uri) +
249+
"\n" +
250+
"<|fim_prefix|>" +
251+
documentText +
252+
"<|fim_middle|>" +
253+
"<|file_separator|>";
248254
restTokens -= documentTokens;
249255
}
250256
}
251257

252258
const activeDocumentFileName =
253259
additionalDocumentsText === ""
254260
? ""
255-
: "\n" + getRelativePath(activeDocument.uri) + "\n";
261+
: "\n" +
262+
"/" +
263+
getRelativePath(activeDocument.uri) +
264+
"\n" +
265+
"<|fim_prefix|>";
256266

257-
const prompt = `<|fim▁begin|>${additionalDocumentsText}${activeDocumentFileName}${activeDocumentText}<|fim▁end|>`;
267+
const prompt = `${additionalDocumentsText}${activeDocumentFileName}${activeDocumentText}<|fim_middle|>`;
258268

259269
return prompt;
260270
};

src/common/server/index.ts

-4
Original file line numberDiff line numberDiff line change
@@ -14,9 +14,6 @@ const modelsBase = {
1414
"base-medium": {
1515
port: 39721,
1616
},
17-
"base-large": {
18-
port: 39722,
19-
},
2017
};
2118
export type TypeModelsBase = keyof typeof modelsBase;
2219

@@ -310,7 +307,6 @@ class Server {
310307
export const servers = {
311308
"base-small": new Server("base-small"),
312309
"base-medium": new Server("base-medium"),
313-
"base-large": new Server("base-large"),
314310
"chat-small": new Server("chat-small"),
315311
"chat-medium": new Server("chat-medium"),
316312
"chat-large": new Server("chat-large"),

0 commit comments

Comments
 (0)