Skip to content

types: enable passing messages with arbitrary role #462

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Mar 20, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion ollama/_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ class Message(SubscriptableBaseModel):
Chat message.
"""

role: Literal['user', 'assistant', 'system', 'tool']
role: str
"Assumed role of the message. Response messages has role 'assistant' or 'tool'."

content: Optional[str] = None
Expand Down
33 changes: 32 additions & 1 deletion tests/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,16 @@
import re
import tempfile
from pathlib import Path
from typing import Any

import pytest
from httpx import Response as httpxResponse
from pydantic import BaseModel, ValidationError
from pytest_httpserver import HTTPServer, URIPattern
from werkzeug.wrappers import Request, Response

from ollama._client import CONNECTION_ERROR_MESSAGE, AsyncClient, Client, _copy_tools
from ollama._types import Image
from ollama._types import Image, Message

PNG_BASE64 = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'
PNG_BYTES = base64.b64decode(PNG_BASE64)
Expand Down Expand Up @@ -1181,3 +1183,32 @@ async def test_async_client_connection_error():
with pytest.raises(ConnectionError) as exc_info:
await client.show('model')
assert str(exc_info.value) == 'Failed to connect to Ollama. Please check that Ollama is downloaded, running and accessible. https://ollama.com/download'


def test_arbitrary_roles_accepted_in_message():
_ = Message(role='somerandomrole', content="I'm ok with you adding any role message now!")


def _mock_request(*args: Any, **kwargs: Any) -> Response:
return httpxResponse(status_code=200, content="{'response': 'Hello world!'}")


def test_arbitrary_roles_accepted_in_message_request(monkeypatch: pytest.MonkeyPatch):
monkeypatch.setattr(Client, '_request', _mock_request)

client = Client()

client.chat(model='llama3.1', messages=[{'role': 'somerandomrole', 'content': "I'm ok with you adding any role message now!"}, {'role': 'user', 'content': 'Hello world!'}])


async def _mock_request_async(*args: Any, **kwargs: Any) -> Response:
return httpxResponse(status_code=200, content="{'response': 'Hello world!'}")


@pytest.mark.asyncio
async def test_arbitrary_roles_accepted_in_message_request_async(monkeypatch: pytest.MonkeyPatch):
monkeypatch.setattr(AsyncClient, '_request', _mock_request_async)

client = AsyncClient()

await client.chat(model='llama3.1', messages=[{'role': 'somerandomrole', 'content': "I'm ok with you adding any role message now!"}, {'role': 'user', 'content': 'Hello world!'}])