Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions tests/async/test_async_tokenize.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,9 @@ async def test_tokenize(async_client):
assert tokens.meta["api_version"]["version"]


# TODO(manoj): Fix the test expectation due to the base model change (MS-913)
@pytest.mark.asyncio
@pytest.mark.skip
async def test_model_param_tokenization(async_client):
# Use tuples to be hashable (for set equality)
medium_res = tuple((await async_client.tokenize("Hello world!", model="medium")).tokens)
Expand All @@ -43,7 +45,9 @@ async def test_invalid_text(async_client):
await async_client.tokenize("")


# TODO(manoj): Fix the test expectation due to the base model change (MS-913)
@pytest.mark.asyncio
@pytest.mark.skip
async def test_detokenize(async_client):
detokenized = await async_client.detokenize([10104, 12221, 974, 514, 34], model="base")
assert detokenized == Detokenization("detokenize me!")
Expand All @@ -58,7 +62,9 @@ async def test_detokenize(async_client):
assert detokenized == Detokenization("")


# TODO(manoj): Fix the test expectation due to the base model change (MS-913)
@pytest.mark.asyncio
@pytest.mark.skip
async def test_model_param_detokenization(async_client):
medium_detokenized = (await async_client.detokenize([10104, 12221, 974, 514, 34], model="medium")).text
medium_detokenized_batch = [
Expand Down
7 changes: 7 additions & 0 deletions tests/sync/test_detokenize.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import unittest

import pytest
from utils import get_api_key

import cohere
Expand All @@ -8,6 +9,8 @@


class TestDetokenize(unittest.TestCase):
# TODO(manoj): Fix the test expectation due to the base model change (MS-913)
@pytest.mark.skip
def test_detokenize_success(self):
resp = co.detokenize([10104, 12221, 974, 514, 34], model="base")
text = resp.text
Expand All @@ -16,6 +19,8 @@ def test_detokenize_success(self):
self.assertTrue(resp.meta["api_version"])
self.assertTrue(resp.meta["api_version"]["version"])

# TODO(manoj): Fix the test expectation due to the base model change (MS-913)
@pytest.mark.skip
def test_detokenize_batched(self):
_batch_size = 3
texts = co.batch_detokenize([[10104, 12221, 974, 514, 34]] * _batch_size, model="base")
Expand All @@ -24,6 +29,8 @@ def test_detokenize_batched(self):
results.append(str(text))
self.assertEqual(results, ["detokenize me!"] * _batch_size)

# TODO(manoj): Fix the test expectation due to the base model change (MS-913)
@pytest.mark.skip
def test_empty_tokens(self):
text = co.detokenize([]).text
self.assertEqual(text, "")
2 changes: 2 additions & 0 deletions tests/sync/test_generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,8 @@ def test_return_likelihoods_generation(self):
self.assertIsNotNone(prediction.generations[0].likelihood)
self.assertEqual(prediction.return_likelihoods, "GENERATION")

# TODO(manoj): Fix the test expectation due to the base model change (MS-913)
@pytest.mark.skip
def test_return_likelihoods_all(self):
prediction = co.generate(model="medium", prompt="hi", max_tokens=1, return_likelihoods="ALL")
self.assertEqual(len(prediction.generations[0].token_likelihoods), 2)
Expand Down
3 changes: 3 additions & 0 deletions tests/sync/test_tokenize.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import unittest

import pytest
from utils import get_api_key

import cohere
Expand All @@ -8,6 +9,8 @@


class TestTokenize(unittest.TestCase):
# TODO(manoj): Fix the test expectation due to the base model change (MS-913)
@pytest.mark.skip
def test_model_param_tokenization(self):
medium_res = tuple(co.tokenize("Hello world!", model="medium").tokens)
medium_res_batch = [tuple(x.tokens) for x in co.batch_tokenize(["Hello world!"] * 3, model="medium")]
Expand Down