diff --git a/src/art/preprocessing/tokenize.py b/src/art/preprocessing/tokenize.py index a5aa54146..9a0bff38f 100644 --- a/src/art/preprocessing/tokenize.py +++ b/src/art/preprocessing/tokenize.py @@ -212,7 +212,7 @@ def tokenize_trajectory( int(token_logprob.token.split(":")[1]) for token_logprob in token_logprobs ) - except IndexError: + except (IndexError, ValueError): token_ids[start:end] = [ token_id if token_id is not None else tokenizer.eos_token_id for token_id in tokenizer.convert_tokens_to_ids( diff --git a/src/art/serverless/backend.py b/src/art/serverless/backend.py index 560d16593..4d6a5d3be 100644 --- a/src/art/serverless/backend.py +++ b/src/art/serverless/backend.py @@ -57,7 +57,7 @@ async def register( def _model_inference_name(self, model: "TrainableModel") -> str: assert model.entity is not None, "Model entity is required" - return f"{model.entity}/{model.project}/{model.name}" + return f"wandb-artifact:///{model.entity}/{model.project}/{model.name}" async def _get_step(self, model: "Model") -> int: if model.trainable: