From dda1a7678e15332e5a8c8b50458ab821ab9faa2e Mon Sep 17 00:00:00 2001 From: Spatika Ganesh Date: Fri, 10 Jan 2025 18:40:11 -0800 Subject: [PATCH 1/3] Update sdk CacheUsage and default set to auto --- workflowai/core/client/_types.py | 2 +- workflowai/core/domain/cache_usage.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/workflowai/core/client/_types.py b/workflowai/core/client/_types.py index 29322b5..c8701fc 100644 --- a/workflowai/core/client/_types.py +++ b/workflowai/core/client/_types.py @@ -176,7 +176,7 @@ async def run( defined in the task is used. Defaults to None. stream (bool, optional): whether to stream the output. If True, the function returns an async iterator of partial output objects. Defaults to False. - use_cache (CacheUsage, optional): how to use the cache. Defaults to "when_available". + use_cache (CacheUsage, optional): how to use the cache. Defaults to "auto". labels (Optional[set[str]], optional): a set of labels to attach to the run. Labels are indexed and searchable. Defaults to None. metadata (Optional[dict[str, Any]], optional): a dictionary of metadata to attach to the run. diff --git a/workflowai/core/domain/cache_usage.py b/workflowai/core/domain/cache_usage.py index 3542837..01f4a25 100644 --- a/workflowai/core/domain/cache_usage.py +++ b/workflowai/core/domain/cache_usage.py @@ -1,3 +1,3 @@ from typing import Literal -CacheUsage = Literal["only", "never", "when_available"] +CacheUsage = Literal["always", "never", "auto"] From 2dafc2b905a11c14f50239b39e71b2d37b1081ff Mon Sep 17 00:00:00 2001 From: Spatika Ganesh Date: Sat, 11 Jan 2025 10:52:06 -0800 Subject: [PATCH 2/3] Expose cache options in the sdk --- workflowai/core/client/_types.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/workflowai/core/client/_types.py b/workflowai/core/client/_types.py index c8701fc..0d6b7b5 100644 --- a/workflowai/core/client/_types.py +++ b/workflowai/core/client/_types.py @@ -177,6 +177,9 @@ async def run( stream (bool, optional): whether to stream the output. If True, the function returns an async iterator of partial output objects. Defaults to False. use_cache (CacheUsage, optional): how to use the cache. Defaults to "auto". + "auto" will use the cache if available, and the temperature is 0. + "always" will always use the cache if available. + "never" will never use the cache. labels (Optional[set[str]], optional): a set of labels to attach to the run. Labels are indexed and searchable. Defaults to None. metadata (Optional[dict[str, Any]], optional): a dictionary of metadata to attach to the run. From b2a6dbd02d47d9ab33f9c63e0b4a05a86efa3e92 Mon Sep 17 00:00:00 2001 From: Spatika Ganesh Date: Mon, 13 Jan 2025 10:58:02 -0800 Subject: [PATCH 3/3] Update comment to keep consistent --- workflowai/core/client/_types.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/workflowai/core/client/_types.py b/workflowai/core/client/_types.py index 0d6b7b5..43db6d7 100644 --- a/workflowai/core/client/_types.py +++ b/workflowai/core/client/_types.py @@ -177,9 +177,11 @@ async def run( stream (bool, optional): whether to stream the output. If True, the function returns an async iterator of partial output objects. Defaults to False. use_cache (CacheUsage, optional): how to use the cache. Defaults to "auto". - "auto" will use the cache if available, and the temperature is 0. - "always" will always use the cache if available. - "never" will never use the cache. + "auto" (default): if a previous run exists with the same version and input, and if + the temperature is 0, the cached output is returned + "always": the cached output is returned when available, regardless + of the temperature value + "never": the cache is never used labels (Optional[set[str]], optional): a set of labels to attach to the run. Labels are indexed and searchable. Defaults to None. metadata (Optional[dict[str, Any]], optional): a dictionary of metadata to attach to the run.