Skip to content

Commit 460d4d9

Browse files
authored
Merge pull request #22 from lowcoding/feat/v1.7.9
✨ feat: getEnv return extensionContext
2 parents 10b38f0 + 5153dac commit 460d4d9

File tree

5 files changed

+16
-15
lines changed

5 files changed

+16
-15
lines changed

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
"displayName": "lowcode",
44
"description": "lowcode tool, support ChatGPT and other LLM",
55
"author": "wjkang <[email protected]>",
6-
"version": "1.7.8",
6+
"version": "1.7.9",
77
"icon": "asset/icon.png",
88
"publisher": "wjkang",
99
"repository": "https://github.com/lowcoding/lowcode-vscode",

src/utils/emitter.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -125,7 +125,7 @@ function mitt<Events extends Record<EventType, unknown>>(
125125

126126
type Events = {
127127
clipboardImage: string;
128-
chatGPTChunck: { text?: string; hasMore: boolean };
128+
chatGPTChunck: { text?: string };
129129
chatGPTComplete: string;
130130
};
131131

src/utils/llm.ts

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,11 +4,13 @@ import { createChatCompletion as openaiCreateChatCompletion } from './openai';
44
import { emitter } from './emitter';
55
import { getSyncFolder } from './config';
66
import { showChatGPTView } from '../webview';
7+
import { getEnv } from './vscodeEnv';
78

89
const LLMScript: {
910
createChatCompletion?: (options: {
1011
messages: { role: 'system' | 'user' | 'assistant'; content: string }[];
11-
handleChunk?: (data: { text?: string; hasMore: boolean }) => void;
12+
handleChunk?: (data: { text?: string }) => void;
13+
lowcodeContext: object;
1214
}) => Promise<string>;
1315
} = {};
1416

@@ -27,7 +29,7 @@ if (syncFolder) {
2729

2830
export const createChatCompletion = async (options: {
2931
messages: { role: 'system' | 'user' | 'assistant'; content: string }[];
30-
handleChunk?: (data: { text?: string; hasMore: boolean }) => void;
32+
handleChunk?: (data: { text?: string }) => void;
3133
}) => {
3234
if (LLMScript.createChatCompletion) {
3335
const res = await LLMScript.createChatCompletion({
@@ -38,6 +40,9 @@ export const createChatCompletion = async (options: {
3840
emitter.emit('chatGPTChunck', data);
3941
}
4042
},
43+
lowcodeContext: {
44+
env: getEnv(),
45+
},
4146
});
4247
emitter.emit('chatGPTComplete', res);
4348
return res;
@@ -57,7 +62,7 @@ export const createChatCompletion = async (options: {
5762

5863
export const createChatCompletionForScript = (options: {
5964
messages: { role: 'system' | 'user' | 'assistant'; content: string }[];
60-
handleChunk?: (data: { text?: string; hasMore: boolean }) => void;
65+
handleChunk?: (data: { text?: string }) => void;
6166
showWebview?: boolean;
6267
}) => {
6368
if (!options.showWebview) {

src/utils/openai.ts

Lines changed: 4 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,10 @@
11
import * as https from 'https';
22
import { TextDecoder } from 'util';
33
import { getChatGPTConfig } from './config';
4-
import { showChatGPTView } from '../webview';
54

65
export const createChatCompletion = (options: {
76
messages: { role: 'system' | 'user' | 'assistant'; content: string }[];
8-
handleChunk?: (data: { text?: string; hasMore: boolean }) => void;
7+
handleChunk?: (data: { text?: string }) => void;
98
}) =>
109
new Promise<string>((resolve) => {
1110
let combinedResult = '';
@@ -41,15 +40,15 @@ export const createChatCompletion = (options: {
4140
if (element.includes('data: ')) {
4241
if (element.includes('[DONE]')) {
4342
if (options.handleChunk) {
44-
options.handleChunk({ hasMore: true, text: '' });
43+
options.handleChunk({ text: '' });
4544
}
4645
return;
4746
}
4847
// remove 'data: '
4948
const data = JSON.parse(element.replace('data: ', ''));
5049
if (data.finish_reason === 'stop') {
5150
if (options.handleChunk) {
52-
options.handleChunk({ hasMore: true, text: '' });
51+
options.handleChunk({ text: '' });
5352
}
5453
return;
5554
}
@@ -58,15 +57,13 @@ export const createChatCompletion = (options: {
5857
if (options.handleChunk) {
5958
options.handleChunk({
6059
text: openaiRes.replaceAll('\\n', '\n'),
61-
hasMore: true,
6260
});
6361
}
6462
combinedResult += openaiRes;
6563
}
6664
} else {
6765
if (options.handleChunk) {
6866
options.handleChunk({
69-
hasMore: true,
7067
text: element,
7168
});
7269
}
@@ -84,7 +81,6 @@ export const createChatCompletion = (options: {
8481
res.on('error', (e) => {
8582
if (options.handleChunk) {
8683
options.handleChunk({
87-
hasMore: true,
8884
text: e.toString(),
8985
});
9086
}
@@ -95,7 +91,6 @@ export const createChatCompletion = (options: {
9591
if (error !== '发生错误:') {
9692
if (options.handleChunk) {
9793
options.handleChunk({
98-
hasMore: true,
9994
text: error,
10095
});
10196
}
@@ -111,8 +106,7 @@ export const createChatCompletion = (options: {
111106
max_tokens: config.maxTokens,
112107
};
113108
request.on('error', (error) => {
114-
options.handleChunk &&
115-
options.handleChunk({ hasMore: true, text: error.toString() });
109+
options.handleChunk && options.handleChunk({ text: error.toString() });
116110
resolve(error.toString());
117111
});
118112
request.write(JSON.stringify(body));

src/utils/vscodeEnv.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import * as path from 'path';
22
import { workspace } from 'vscode';
33
import { getSyncFolder } from './config';
4+
import { getExtensionContext } from '../context';
45

56
export const rootPath = path.join(workspace.rootPath || '');
67

@@ -39,6 +40,7 @@ export const getEnv = () => ({
3940
blockMaterialsPath,
4041
snippetMaterialsPath,
4142
privateMaterialsPath: getSyncFolder(),
43+
extensionContext: getExtensionContext(),
4244
});
4345

4446
export const checkRootPath = () => {

0 commit comments

Comments
 (0)