change build messages for qwen in client

This commit is contained in:
lloydzhou
2024-07-11 00:48:58 +08:00
parent 844025ec14
commit 32b82b9cb3
2 changed files with 36 additions and 38 deletions

View File

@@ -32,19 +32,25 @@ export interface OpenAIListModelResponse {
}>;
}
interface RequestPayload {
interface RequestInput {
messages: {
role: "system" | "user" | "assistant";
content: string | MultimodalContent[];
}[];
stream?: boolean;
model: string;
}
interface RequestParam {
result_format: string;
incremental_output?: boolean;
temperature: number;
presence_penalty: number;
frequency_penalty: number;
repetition_penalty: number;
top_p: number;
max_tokens?: number;
}
interface RequestPayload {
model: string;
input: RequestInput;
parameters: RequestParam;
}
export class QwenApi implements LLMApi {
path(path: string): string {
@@ -91,17 +97,21 @@ export class QwenApi implements LLMApi {
},
};
const shouldStream = !!options.config.stream;
const requestPayload: RequestPayload = {
messages,
stream: options.config.stream,
model: modelConfig.model,
temperature: modelConfig.temperature,
presence_penalty: modelConfig.presence_penalty,
frequency_penalty: modelConfig.frequency_penalty,
top_p: modelConfig.top_p,
input: {
messages,
},
parameters: {
result_format: "message",
incremental_output: shouldStream,
temperature: modelConfig.temperature,
// max_tokens: modelConfig.max_tokens,
top_p: modelConfig.top_p === 1 ? 0.99 : modelConfig.top_p, // qwen top_p is should be < 1
},
};
const shouldStream = !!options.config.stream;
const controller = new AbortController();
options.onController?.(controller);
@@ -111,7 +121,10 @@ export class QwenApi implements LLMApi {
method: "POST",
body: JSON.stringify(requestPayload),
signal: controller.signal,
headers: getHeaders(),
headers: {
...getHeaders(),
"X-DashScope-SSE": shouldStream ? "enable" : "disable",
},
};
// make a fetch request