Skip to content

Commit c4649cd

Browse files
committed
Formatting in deepSeekService.ts
1 parent 2732ab6 commit c4649cd

File tree

1 file changed

+6
-7
lines changed

1 file changed

+6
-7
lines changed

src/llm/llmServices/deepSeek/deepSeekService.ts

+6-7
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import OpenAI from "openai";
22

3+
import { asErrorOrRethrow } from "../../../utils/errorsUtils";
34
import { illegalState } from "../../../utils/throwErrors";
45
import { ProofGenerationContext } from "../../proofGenerationContext";
56
import { DeepSeekUserModelParams } from "../../userModelParams";
@@ -16,7 +17,6 @@ import { DeepSeekModelParams } from "../modelParams";
1617
import { toO1CompatibleChatHistory } from "../utils/o1ClassModels";
1718

1819
import { DeepSeekModelParamsResolver } from "./deepSeekModelParamsResolver";
19-
import { asErrorOrRethrow } from "../../../utils/errorsUtils";
2020

2121
export class DeepSeekService extends LLMServiceImpl<
2222
DeepSeekUserModelParams,
@@ -121,7 +121,6 @@ class DeepSeekServiceInternal extends LLMServiceInternal<
121121
);
122122
}
123123
const content = completion.choices[0].message.content;
124-
completion.usage
125124
if (content === null) {
126125
illegalState("response message content is null");
127126
}
@@ -155,9 +154,8 @@ class DeepSeekServiceInternal extends LLMServiceInternal<
155154
);
156155
}
157156

158-
private accumulateTokenMetrics(
159-
tokenUsages: TokenMetrics[]
160-
): TokenMetrics {
157+
/* eslint-disable @typescript-eslint/naming-convention */
158+
private accumulateTokenMetrics(tokenUsages: TokenMetrics[]): TokenMetrics {
161159
const availableTokenUsages = tokenUsages.filter(
162160
(usage): usage is OpenAI.Completions.CompletionUsage =>
163161
usage !== undefined
@@ -170,7 +168,8 @@ class DeepSeekServiceInternal extends LLMServiceInternal<
170168
return availableTokenUsages.reduce(
171169
(acc, usage) => {
172170
return {
173-
completion_tokens: acc.completion_tokens + usage.completion_tokens,
171+
completion_tokens:
172+
acc.completion_tokens + usage.completion_tokens,
174173
prompt_tokens: acc.prompt_tokens + usage.prompt_tokens,
175174
total_tokens: acc.total_tokens + usage.total_tokens,
176175
};
@@ -216,4 +215,4 @@ class DeepSeekServiceInternal extends LLMServiceInternal<
216215
}
217216
}
218217

219-
type TokenMetrics = OpenAI.Completions.CompletionUsage | undefined;
218+
type TokenMetrics = OpenAI.Completions.CompletionUsage | undefined;

0 commit comments

Comments
 (0)