Skip to content

Commit cdec2ad

Browse files
authored
Merge pull request #31 from ollama/keepalive
add keep_alive
2 parents 4a81fa4 + fbb6553 commit cdec2ad

File tree

2 files changed

+36
-2
lines changed

2 files changed

+36
-2
lines changed

ollama/_client.py

Lines changed: 24 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -93,6 +93,7 @@ def generate(
9393
format: Literal['', 'json'] = '',
9494
images: Optional[Sequence[AnyStr]] = None,
9595
options: Optional[Options] = None,
96+
keep_alive: Optional[Union[float, str]] = None,
9697
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
9798
"""
9899
Create a response using the requested model.
@@ -121,6 +122,7 @@ def generate(
121122
'images': [_encode_image(image) for image in images or []],
122123
'format': format,
123124
'options': options or {},
125+
'keep_alive': keep_alive,
124126
},
125127
stream=stream,
126128
)
@@ -132,6 +134,7 @@ def chat(
132134
stream: bool = False,
133135
format: Literal['', 'json'] = '',
134136
options: Optional[Options] = None,
137+
keep_alive: Optional[Union[float, str]] = None,
135138
) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]:
136139
"""
137140
Create a chat response using the requested model.
@@ -165,18 +168,26 @@ def chat(
165168
'stream': stream,
166169
'format': format,
167170
'options': options or {},
171+
'keep_alive': keep_alive,
168172
},
169173
stream=stream,
170174
)
171175

172-
def embeddings(self, model: str = '', prompt: str = '', options: Optional[Options] = None) -> Sequence[float]:
176+
def embeddings(
177+
self,
178+
model: str = '',
179+
prompt: str = '',
180+
options: Optional[Options] = None,
181+
keep_alive: Optional[Union[float, str]] = None,
182+
) -> Sequence[float]:
173183
return self._request(
174184
'POST',
175185
'/api/embeddings',
176186
json={
177187
'model': model,
178188
'prompt': prompt,
179189
'options': options or {},
190+
'keep_alive': keep_alive,
180191
},
181192
).json()
182193

@@ -364,6 +375,7 @@ async def generate(
364375
format: Literal['', 'json'] = '',
365376
images: Optional[Sequence[AnyStr]] = None,
366377
options: Optional[Options] = None,
378+
keep_alive: Optional[Union[float, str]] = None,
367379
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
368380
"""
369381
Create a response using the requested model.
@@ -391,6 +403,7 @@ async def generate(
391403
'images': [_encode_image(image) for image in images or []],
392404
'format': format,
393405
'options': options or {},
406+
'keep_alive': keep_alive,
394407
},
395408
stream=stream,
396409
)
@@ -402,6 +415,7 @@ async def chat(
402415
stream: bool = False,
403416
format: Literal['', 'json'] = '',
404417
options: Optional[Options] = None,
418+
keep_alive: Optional[Union[float, str]] = None,
405419
) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]:
406420
"""
407421
Create a chat response using the requested model.
@@ -434,18 +448,26 @@ async def chat(
434448
'stream': stream,
435449
'format': format,
436450
'options': options or {},
451+
'keep_alive': keep_alive,
437452
},
438453
stream=stream,
439454
)
440455

441-
async def embeddings(self, model: str = '', prompt: str = '', options: Optional[Options] = None) -> Sequence[float]:
456+
async def embeddings(
457+
self,
458+
model: str = '',
459+
prompt: str = '',
460+
options: Optional[Options] = None,
461+
keep_alive: Optional[Union[float, str]] = None,
462+
) -> Sequence[float]:
442463
response = await self._request(
443464
'POST',
444465
'/api/embeddings',
445466
json={
446467
'model': model,
447468
'prompt': prompt,
448469
'options': options or {},
470+
'keep_alive': keep_alive,
449471
},
450472
)
451473

tests/test_client.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ def test_client_chat(httpserver: HTTPServer):
2929
'stream': False,
3030
'format': '',
3131
'options': {},
32+
'keep_alive': None,
3233
},
3334
).respond_with_json(
3435
{
@@ -75,6 +76,7 @@ def generate():
7576
'stream': True,
7677
'format': '',
7778
'options': {},
79+
'keep_alive': None,
7880
},
7981
).respond_with_handler(stream_handler)
8082

@@ -103,6 +105,7 @@ def test_client_chat_images(httpserver: HTTPServer):
103105
'stream': False,
104106
'format': '',
105107
'options': {},
108+
'keep_alive': None,
106109
},
107110
).respond_with_json(
108111
{
@@ -139,6 +142,7 @@ def test_client_generate(httpserver: HTTPServer):
139142
'images': [],
140143
'format': '',
141144
'options': {},
145+
'keep_alive': None,
142146
},
143147
).respond_with_json(
144148
{
@@ -183,6 +187,7 @@ def generate():
183187
'images': [],
184188
'format': '',
185189
'options': {},
190+
'keep_alive': None,
186191
},
187192
).respond_with_handler(stream_handler)
188193

@@ -210,6 +215,7 @@ def test_client_generate_images(httpserver: HTTPServer):
210215
'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'],
211216
'format': '',
212217
'options': {},
218+
'keep_alive': None,
213219
},
214220
).respond_with_json(
215221
{
@@ -513,6 +519,7 @@ async def test_async_client_chat(httpserver: HTTPServer):
513519
'stream': False,
514520
'format': '',
515521
'options': {},
522+
'keep_alive': None,
516523
},
517524
).respond_with_json({})
518525

@@ -550,6 +557,7 @@ def generate():
550557
'stream': True,
551558
'format': '',
552559
'options': {},
560+
'keep_alive': None,
553561
},
554562
).respond_with_handler(stream_handler)
555563

@@ -579,6 +587,7 @@ async def test_async_client_chat_images(httpserver: HTTPServer):
579587
'stream': False,
580588
'format': '',
581589
'options': {},
590+
'keep_alive': None,
582591
},
583592
).respond_with_json({})
584593

@@ -606,6 +615,7 @@ async def test_async_client_generate(httpserver: HTTPServer):
606615
'images': [],
607616
'format': '',
608617
'options': {},
618+
'keep_alive': None,
609619
},
610620
).respond_with_json({})
611621

@@ -645,6 +655,7 @@ def generate():
645655
'images': [],
646656
'format': '',
647657
'options': {},
658+
'keep_alive': None,
648659
},
649660
).respond_with_handler(stream_handler)
650661

@@ -673,6 +684,7 @@ async def test_async_client_generate_images(httpserver: HTTPServer):
673684
'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'],
674685
'format': '',
675686
'options': {},
687+
'keep_alive': None,
676688
},
677689
).respond_with_json({})
678690

0 commit comments

Comments
 (0)