Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
# Changelog

## 4.25
- [#303] (https://github.com/cohere-ai/cohere-python/pull/303)
- Allow uploading of evaluation data

## 4.24
- [#296] (https://github.com/cohere-ai/cohere-python/pull/296)
- Allow passing of delimiter for csv
Expand Down
4 changes: 4 additions & 0 deletions cohere/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -751,6 +751,7 @@ def create_dataset(
name: str,
data: BinaryIO,
dataset_type: str,
eval_data: Optional[BinaryIO] = None,
keep_fields: Union[str, List[str]] = None,
optional_fields: Union[str, List[str]] = None,
parse_info: Optional[ParseInfo] = None,
Expand All @@ -761,13 +762,16 @@ def create_dataset(
name (str): The name of your dataset
data (BinaryIO): The data to be uploaded and validated
dataset_type (str): The type of dataset you want to upload
eval_data (BinaryIO): (optional) If the dataset type supports it upload evaluation data
keep_fields (Union[str, List[str]]): (optional) A list of fields you want to keep in the dataset that are required
optional_fields (Union[str, List[str]]): (optional) A list of fields you want to keep in the dataset that are optional
parse_info: ParseInfo: (optional) information on how to parse the raw data
Returns:
Dataset: Dataset object.
"""
files = {"file": data}
if eval_data:
files["eval_file"] = eval_data
params = {
"name": name,
"type": dataset_type,
Expand Down
4 changes: 4 additions & 0 deletions cohere/client_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -489,6 +489,7 @@ async def create_dataset(
name: str,
data: BinaryIO,
dataset_type: str,
eval_data: Optional[BinaryIO] = None,
keep_fields: Union[str, List[str]] = None,
optional_fields: Union[str, List[str]] = None,
parse_info: Optional[ParseInfo] = None,
Expand All @@ -499,13 +500,16 @@ async def create_dataset(
name (str): The name of your dataset
data (BinaryIO): The data to be uploaded and validated
dataset_type (str): The type of dataset you want to upload
eval_data (BinaryIO): (optional) If the dataset type supports it upload evaluation data
keep_fields (Union[str, List[str]]): (optional) A list of fields you want to keep in the dataset that are required
optional_fields (Union[str, List[str]]): (optional) A list of fields you want to keep in the dataset that are optional
parse_info: ParseInfo: (optional) information on how to parse the raw data
Returns:
AsyncDataset: Dataset object.
"""
files = {"file": data}
if eval_data:
files["eval_file"] = eval_data
params = {
"name": name,
"type": dataset_type,
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "cohere"
version = "4.24"
version = "4.25"
description = ""
authors = ["Cohere"]
readme = "README.md"
Expand Down