Skip to content

Commit 305eb0b

Browse files
authored
add jsDocs (#88)
- refactored return statements in some functions - added jsDocs
1 parent 0a5f7de commit 305eb0b

File tree

4 files changed

+199
-56
lines changed

4 files changed

+199
-56
lines changed

.gitignore

+6
Original file line numberDiff line numberDiff line change
@@ -128,3 +128,9 @@ dist
128128
.yarn/build-state.yml
129129
.yarn/install-state.gz
130130
.pnp.*
131+
132+
# IDEs
133+
.idea
134+
135+
# MacOS
136+
.DS_Store

src/browser.ts

+93-26
Original file line numberDiff line numberDiff line change
@@ -2,25 +2,25 @@ import * as utils from './utils.js'
22
import 'whatwg-fetch'
33

44
import type {
5-
Fetch,
5+
ChatRequest,
6+
ChatResponse,
67
Config,
7-
GenerateRequest,
8-
PullRequest,
9-
PushRequest,
8+
CopyRequest,
9+
CreateRequest,
10+
DeleteRequest,
1011
EmbeddingsRequest,
11-
GenerateResponse,
1212
EmbeddingsResponse,
13+
ErrorResponse,
14+
Fetch,
15+
GenerateRequest,
16+
GenerateResponse,
1317
ListResponse,
1418
ProgressResponse,
15-
ErrorResponse,
16-
StatusResponse,
17-
DeleteRequest,
18-
CopyRequest,
19-
ShowResponse,
19+
PullRequest,
20+
PushRequest,
2021
ShowRequest,
21-
ChatRequest,
22-
ChatResponse,
23-
CreateRequest,
22+
ShowResponse,
23+
StatusResponse,
2424
} from './interfaces.js'
2525

2626
export class Ollama {
@@ -50,6 +50,17 @@ export class Ollama {
5050
this.abortController = new AbortController()
5151
}
5252

53+
/**
54+
* Processes a request to the Ollama server. If the request is streamable, it will return an
55+
* AsyncGenerator that yields the response messages. Otherwise, it will return the response
56+
* object.
57+
* @param endpoint {string} - The endpoint to send the request to.
58+
* @param request {object} - The request object to send to the endpoint.
59+
* @protected {T | AsyncGenerator<T>} - The response object or an AsyncGenerator that yields
60+
* response messages.
61+
* @throws {Error} - If the response body is missing or if the response is an error.
62+
* @returns {Promise<T | AsyncGenerator<T>>} - The response object or an AsyncGenerator that yields the streamed response.
63+
*/
5364
protected async processStreamableRequest<T extends object>(
5465
endpoint: string,
5566
request: { stream?: boolean } & Record<string, any>,
@@ -94,13 +105,17 @@ export class Ollama {
94105
}
95106
}
96107

108+
/**
109+
* Encodes an image to base64 if it is a Uint8Array.
110+
* @param image {Uint8Array | string} - The image to encode.
111+
* @returns {Promise<string>} - The base64 encoded image.
112+
*/
97113
async encodeImage(image: Uint8Array | string): Promise<string> {
98114
if (typeof image !== 'string') {
99115
// image is Uint8Array convert it to base64
100116
const uint8Array = new Uint8Array(image)
101117
const numberArray = Array.from(uint8Array)
102-
const base64String = btoa(String.fromCharCode.apply(null, numberArray))
103-
return base64String
118+
return btoa(String.fromCharCode.apply(null, numberArray))
104119
}
105120
// the string may be base64 encoded
106121
return image
@@ -110,7 +125,12 @@ export class Ollama {
110125
request: GenerateRequest & { stream: true },
111126
): Promise<AsyncGenerator<GenerateResponse>>
112127
generate(request: GenerateRequest & { stream?: false }): Promise<GenerateResponse>
113-
128+
/**
129+
* Generates a response from a text prompt.
130+
* @param request {GenerateRequest} - The request object.
131+
* @returns {Promise<GenerateResponse | AsyncGenerator<GenerateResponse>>} - The response object or
132+
* an AsyncGenerator that yields response messages.
133+
*/
114134
async generate(
115135
request: GenerateRequest,
116136
): Promise<GenerateResponse | AsyncGenerator<GenerateResponse>> {
@@ -122,7 +142,14 @@ export class Ollama {
122142

123143
chat(request: ChatRequest & { stream: true }): Promise<AsyncGenerator<ChatResponse>>
124144
chat(request: ChatRequest & { stream?: false }): Promise<ChatResponse>
125-
145+
/**
146+
* Chats with the model. The request object can contain messages with images that are either
147+
* Uint8Arrays or base64 encoded strings. The images will be base64 encoded before sending the
148+
* request.
149+
* @param request {ChatRequest} - The request object.
150+
* @returns {Promise<ChatResponse | AsyncGenerator<ChatResponse>>} - The response object or an
151+
* AsyncGenerator that yields response messages.
152+
*/
126153
async chat(request: ChatRequest): Promise<ChatResponse | AsyncGenerator<ChatResponse>> {
127154
if (request.messages) {
128155
for (const message of request.messages) {
@@ -140,7 +167,11 @@ export class Ollama {
140167
request: CreateRequest & { stream: true },
141168
): Promise<AsyncGenerator<ProgressResponse>>
142169
create(request: CreateRequest & { stream?: false }): Promise<ProgressResponse>
143-
170+
/**
171+
* Creates a new model from a stream of data.
172+
* @param request {CreateRequest} - The request object.
173+
* @returns {Promise<ProgressResponse | AsyncGenerator<ProgressResponse>>} - The response object or a stream of progress responses.
174+
*/
144175
async create(
145176
request: CreateRequest,
146177
): Promise<ProgressResponse | AsyncGenerator<ProgressResponse>> {
@@ -154,7 +185,13 @@ export class Ollama {
154185

155186
pull(request: PullRequest & { stream: true }): Promise<AsyncGenerator<ProgressResponse>>
156187
pull(request: PullRequest & { stream?: false }): Promise<ProgressResponse>
157-
188+
/**
189+
* Pulls a model from the Ollama registry. The request object can contain a stream flag to indicate if the
190+
* response should be streamed.
191+
* @param request {PullRequest} - The request object.
192+
* @returns {Promise<ProgressResponse | AsyncGenerator<ProgressResponse>>} - The response object or
193+
* an AsyncGenerator that yields response messages.
194+
*/
158195
async pull(
159196
request: PullRequest,
160197
): Promise<ProgressResponse | AsyncGenerator<ProgressResponse>> {
@@ -167,7 +204,13 @@ export class Ollama {
167204

168205
push(request: PushRequest & { stream: true }): Promise<AsyncGenerator<ProgressResponse>>
169206
push(request: PushRequest & { stream?: false }): Promise<ProgressResponse>
170-
207+
/**
208+
* Pushes a model to the Ollama registry. The request object can contain a stream flag to indicate if the
209+
* response should be streamed.
210+
* @param request {PushRequest} - The request object.
211+
* @returns {Promise<ProgressResponse | AsyncGenerator<ProgressResponse>>} - The response object or
212+
* an AsyncGenerator that yields response messages.
213+
*/
171214
async push(
172215
request: PushRequest,
173216
): Promise<ProgressResponse | AsyncGenerator<ProgressResponse>> {
@@ -178,38 +221,62 @@ export class Ollama {
178221
})
179222
}
180223

224+
/**
225+
* Deletes a model from the server. The request object should contain the name of the model to
226+
* delete.
227+
* @param request {DeleteRequest} - The request object.
228+
* @returns {Promise<StatusResponse>} - The response object.
229+
*/
181230
async delete(request: DeleteRequest): Promise<StatusResponse> {
182231
await utils.del(this.fetch, `${this.config.host}/api/delete`, {
183232
name: request.model,
184233
})
185234
return { status: 'success' }
186235
}
187236

237+
/**
238+
* Copies a model from one name to another. The request object should contain the name of the
239+
* model to copy and the new name.
240+
* @param request {CopyRequest} - The request object.
241+
* @returns {Promise<StatusResponse>} - The response object.
242+
*/
188243
async copy(request: CopyRequest): Promise<StatusResponse> {
189244
await utils.post(this.fetch, `${this.config.host}/api/copy`, { ...request })
190245
return { status: 'success' }
191246
}
192247

248+
/**
249+
* Lists the models on the server.
250+
* @returns {Promise<ListResponse>} - The response object.
251+
* @throws {Error} - If the response body is missing.
252+
*/
193253
async list(): Promise<ListResponse> {
194254
const response = await utils.get(this.fetch, `${this.config.host}/api/tags`)
195-
const listResponse = (await response.json()) as ListResponse
196-
return listResponse
255+
return (await response.json()) as ListResponse
197256
}
198257

258+
/**
259+
* Shows the metadata of a model. The request object should contain the name of the model.
260+
* @param request {ShowRequest} - The request object.
261+
* @returns {Promise<ShowResponse>} - The response object.
262+
*/
199263
async show(request: ShowRequest): Promise<ShowResponse> {
200264
const response = await utils.post(this.fetch, `${this.config.host}/api/show`, {
201265
...request,
202266
})
203-
const showResponse = (await response.json()) as ShowResponse
204-
return showResponse
267+
return (await response.json()) as ShowResponse
205268
}
206269

270+
/**
271+
* Embeds a text prompt into a vector.
272+
* @param request {EmbeddingsRequest} - The request object.
273+
* @returns {Promise<EmbeddingsResponse>} - The response object.
274+
*/
207275
async embeddings(request: EmbeddingsRequest): Promise<EmbeddingsResponse> {
208276
const response = await utils.post(this.fetch, `${this.config.host}/api/embeddings`, {
209277
...request,
210278
})
211-
const embeddingsResponse = (await response.json()) as EmbeddingsResponse
212-
return embeddingsResponse
279+
return (await response.json()) as EmbeddingsResponse
213280
}
214281
}
215282

src/index.ts

+21-4
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import * as utils from './utils.js'
2-
import fs, { promises, createReadStream } from 'fs'
3-
import { join, resolve, dirname } from 'path'
2+
import fs, { createReadStream, promises } from 'fs'
3+
import { dirname, join, resolve } from 'path'
44
import { createHash } from 'crypto'
55
import { homedir } from 'os'
66
import { Ollama as OllamaBrowser } from './browser.js'
@@ -11,8 +11,7 @@ export class Ollama extends OllamaBrowser {
1111
async encodeImage(image: Uint8Array | Buffer | string): Promise<string> {
1212
if (typeof image !== 'string') {
1313
// image is Uint8Array or Buffer, convert it to base64
14-
const result = Buffer.from(image).toString('base64')
15-
return result
14+
return Buffer.from(image).toString('base64')
1615
}
1716
try {
1817
if (fs.existsSync(image)) {
@@ -27,6 +26,12 @@ export class Ollama extends OllamaBrowser {
2726
return image
2827
}
2928

29+
/**
30+
* Parse the modelfile and replace the FROM and ADAPTER commands with the corresponding blob hashes.
31+
* @param modelfile {string} - The modelfile content
32+
* @param mfDir {string} - The directory of the modelfile
33+
* @private @internal
34+
*/
3035
private async parseModelfile(
3136
modelfile: string,
3237
mfDir: string = process.cwd(),
@@ -49,13 +54,25 @@ export class Ollama extends OllamaBrowser {
4954
return out.join('\n')
5055
}
5156

57+
/**
58+
* Resolve the path to an absolute path.
59+
* @param inputPath {string} - The input path
60+
* @param mfDir {string} - The directory of the modelfile
61+
* @private @internal
62+
*/
5263
private resolvePath(inputPath, mfDir) {
5364
if (inputPath.startsWith('~')) {
5465
return join(homedir(), inputPath.slice(1))
5566
}
5667
return resolve(mfDir, inputPath)
5768
}
5869

70+
/**
71+
* checks if a file exists
72+
* @param path {string} - The path to the file
73+
* @private @internal
74+
* @returns {Promise<boolean>} - Whether the file exists or not
75+
*/
5976
private async fileExists(path: string): Promise<boolean> {
6077
try {
6178
await promises.access(path)

0 commit comments

Comments
 (0)