From b288f8ef4f98244564f5d6fd5b986da332cdb12b Mon Sep 17 00:00:00 2001 From: Ghraven <115199279+Ghraven@users.noreply.github.com> Date: Tue, 28 Apr 2026 03:21:56 +0800 Subject: [PATCH] feat: add exists() method to Client and AsyncClient MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #640 Adds a convenience `exists(model)` method to both `Client` and `AsyncClient` that returns `True` if the model is available locally, `False` otherwise — no exception handling needed at the call site. Implemented as a thin wrapper around `show()` that catches `ResponseError`, matching the pattern already used throughout the SDK. --- ollama/_client.py | 49 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/ollama/_client.py b/ollama/_client.py index 18cb0fb4..0bfaf62a 100644 --- a/ollama/_client.py +++ b/ollama/_client.py @@ -664,6 +664,32 @@ def show(self, model: str) -> ShowResponse: ).model_dump(exclude_none=True), ) + def exists(self, model: str) -> bool: + """ + Check whether a model is available locally. + + This is a convenience wrapper around :meth:`show` that returns a + plain ``bool`` instead of raising :class:`ResponseError` when the + model is not found, making it suitable for use in ``if`` statements + without exception-based control flow. + + Args: + model: The name of the model to check (e.g. ``"llama3.1:8b"``). + + Returns: + ``True`` if the model exists locally, ``False`` otherwise. + + Example:: + + if not ollama.exists("llama3.1:8b"): + ollama.pull("llama3.1:8b") + """ + try: + self.show(model) + return True + except ResponseError: + return False + def ps(self) -> ProcessResponse: return self._request( ProcessResponse, @@ -1305,6 +1331,29 @@ async def show(self, model: str) -> ShowResponse: ).model_dump(exclude_none=True), ) + async def exists(self, model: str) -> bool: + """ + Check whether a model is available locally. + + Async variant of :meth:`Client.exists`. + + Args: + model: The name of the model to check (e.g. ``"llama3.1:8b"``). + + Returns: + ``True`` if the model exists locally, ``False`` otherwise. + + Example:: + + if not await client.exists("llama3.1:8b"): + await client.pull("llama3.1:8b") + """ + try: + await self.show(model) + return True + except ResponseError: + return False + async def ps(self) -> ProcessResponse: return await self._request( ProcessResponse,