add support for thinking

To support https://github.com/ollama/ollama/pull/10584
This commit is contained in:
Devon Rifkin
2025-05-27 00:35:28 -07:00
parent 5d7c63fae1
commit eaad6df5ef
3 changed files with 37 additions and 0 deletions

13
examples/thinking.py Normal file
View File

@@ -0,0 +1,13 @@
from ollama import chat
messages = [
{
'role': 'user',
'content': 'What is 10 + 23?',
},
]
response = chat('deepseek-r1', messages=messages, think=True)
print('Thinking:\n========\n\n' + response.message.thinking)
print('\nResponse:\n========\n\n' + response.message.content)

View File

@@ -270,6 +270,7 @@ class Client(BaseClient):
*, *,
tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None, tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None,
stream: Literal[False] = False, stream: Literal[False] = False,
think: Optional[bool] = None,
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
options: Optional[Union[Mapping[str, Any], Options]] = None, options: Optional[Union[Mapping[str, Any], Options]] = None,
keep_alive: Optional[Union[float, str]] = None, keep_alive: Optional[Union[float, str]] = None,
@@ -283,6 +284,7 @@ class Client(BaseClient):
*, *,
tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None, tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None,
stream: Literal[True] = True, stream: Literal[True] = True,
think: Optional[bool] = None,
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
options: Optional[Union[Mapping[str, Any], Options]] = None, options: Optional[Union[Mapping[str, Any], Options]] = None,
keep_alive: Optional[Union[float, str]] = None, keep_alive: Optional[Union[float, str]] = None,
@@ -295,6 +297,7 @@ class Client(BaseClient):
*, *,
tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None, tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None,
stream: bool = False, stream: bool = False,
think: Optional[bool] = None,
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
options: Optional[Union[Mapping[str, Any], Options]] = None, options: Optional[Union[Mapping[str, Any], Options]] = None,
keep_alive: Optional[Union[float, str]] = None, keep_alive: Optional[Union[float, str]] = None,
@@ -341,6 +344,7 @@ class Client(BaseClient):
messages=list(_copy_messages(messages)), messages=list(_copy_messages(messages)),
tools=list(_copy_tools(tools)), tools=list(_copy_tools(tools)),
stream=stream, stream=stream,
think=think,
format=format, format=format,
options=options, options=options,
keep_alive=keep_alive, keep_alive=keep_alive,
@@ -694,6 +698,7 @@ class AsyncClient(BaseClient):
template: str = '', template: str = '',
context: Optional[Sequence[int]] = None, context: Optional[Sequence[int]] = None,
stream: Literal[False] = False, stream: Literal[False] = False,
think: Optional[bool] = None,
raw: bool = False, raw: bool = False,
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
images: Optional[Sequence[Union[str, bytes, Image]]] = None, images: Optional[Sequence[Union[str, bytes, Image]]] = None,
@@ -712,6 +717,7 @@ class AsyncClient(BaseClient):
template: str = '', template: str = '',
context: Optional[Sequence[int]] = None, context: Optional[Sequence[int]] = None,
stream: Literal[True] = True, stream: Literal[True] = True,
think: Optional[bool] = None,
raw: bool = False, raw: bool = False,
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
images: Optional[Sequence[Union[str, bytes, Image]]] = None, images: Optional[Sequence[Union[str, bytes, Image]]] = None,
@@ -729,6 +735,7 @@ class AsyncClient(BaseClient):
template: Optional[str] = None, template: Optional[str] = None,
context: Optional[Sequence[int]] = None, context: Optional[Sequence[int]] = None,
stream: bool = False, stream: bool = False,
think: Optional[bool] = None,
raw: Optional[bool] = None, raw: Optional[bool] = None,
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
images: Optional[Sequence[Union[str, bytes, Image]]] = None, images: Optional[Sequence[Union[str, bytes, Image]]] = None,
@@ -756,6 +763,7 @@ class AsyncClient(BaseClient):
template=template, template=template,
context=context, context=context,
stream=stream, stream=stream,
think=think,
raw=raw, raw=raw,
format=format, format=format,
images=list(_copy_images(images)) if images else None, images=list(_copy_images(images)) if images else None,
@@ -773,6 +781,7 @@ class AsyncClient(BaseClient):
*, *,
tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None, tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None,
stream: Literal[False] = False, stream: Literal[False] = False,
think: Optional[bool] = None,
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
options: Optional[Union[Mapping[str, Any], Options]] = None, options: Optional[Union[Mapping[str, Any], Options]] = None,
keep_alive: Optional[Union[float, str]] = None, keep_alive: Optional[Union[float, str]] = None,
@@ -786,6 +795,7 @@ class AsyncClient(BaseClient):
*, *,
tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None, tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None,
stream: Literal[True] = True, stream: Literal[True] = True,
think: Optional[bool] = None,
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
options: Optional[Union[Mapping[str, Any], Options]] = None, options: Optional[Union[Mapping[str, Any], Options]] = None,
keep_alive: Optional[Union[float, str]] = None, keep_alive: Optional[Union[float, str]] = None,
@@ -798,6 +808,7 @@ class AsyncClient(BaseClient):
*, *,
tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None, tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None,
stream: bool = False, stream: bool = False,
think: Optional[bool] = None,
format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None,
options: Optional[Union[Mapping[str, Any], Options]] = None, options: Optional[Union[Mapping[str, Any], Options]] = None,
keep_alive: Optional[Union[float, str]] = None, keep_alive: Optional[Union[float, str]] = None,
@@ -845,6 +856,7 @@ class AsyncClient(BaseClient):
messages=list(_copy_messages(messages)), messages=list(_copy_messages(messages)),
tools=list(_copy_tools(tools)), tools=list(_copy_tools(tools)),
stream=stream, stream=stream,
think=think,
format=format, format=format,
options=options, options=options,
keep_alive=keep_alive, keep_alive=keep_alive,

View File

@@ -207,6 +207,9 @@ class GenerateRequest(BaseGenerateRequest):
images: Optional[Sequence[Image]] = None images: Optional[Sequence[Image]] = None
'Image data for multimodal models.' 'Image data for multimodal models.'
think: Optional[bool] = None
'Enable thinking mode (for thinking models).'
class BaseGenerateResponse(SubscriptableBaseModel): class BaseGenerateResponse(SubscriptableBaseModel):
model: Optional[str] = None model: Optional[str] = None
@@ -248,6 +251,9 @@ class GenerateResponse(BaseGenerateResponse):
response: str response: str
'Response content. When streaming, this contains a fragment of the response.' 'Response content. When streaming, this contains a fragment of the response.'
thinking: Optional[str] = None
'Thinking content. Only present when thinking is enabled.'
context: Optional[Sequence[int]] = None context: Optional[Sequence[int]] = None
'Tokenized history up to the point of the response.' 'Tokenized history up to the point of the response.'
@@ -263,6 +269,9 @@ class Message(SubscriptableBaseModel):
content: Optional[str] = None content: Optional[str] = None
'Content of the message. Response messages contains message fragments when streaming.' 'Content of the message. Response messages contains message fragments when streaming.'
thinking: Optional[str] = None
'Thinking content. Only present when thinking is enabled.'
images: Optional[Sequence[Image]] = None images: Optional[Sequence[Image]] = None
""" """
Optional list of image data for multimodal models. Optional list of image data for multimodal models.
@@ -345,6 +354,9 @@ class ChatRequest(BaseGenerateRequest):
tools: Optional[Sequence[Tool]] = None tools: Optional[Sequence[Tool]] = None
'Tools to use for the chat.' 'Tools to use for the chat.'
think: Optional[bool] = None
'Enable thinking mode (for thinking models).'
class ChatResponse(BaseGenerateResponse): class ChatResponse(BaseGenerateResponse):
""" """