Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/flat-plums-bake.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'ai': minor
---

feat (core): Add finishReason field to NoObjectGeneratedError
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ It can arise due to the following reasons:
- `text`: The text that was generated by the model. This can be the raw text or the tool call text, depending on the object generation mode.
- `response`: Metadata about the language model response, including response id, timestamp, and model.
- `usage`: Request token usage.
- `finishReason`: Request finish reason. For example 'length' if model generated maximum number of tokens, this could result in a JSON parsing error.
- `cause`: The cause of the error (e.g. a JSON parsing error). You can use this for more detailed error handling.

## Checking for this Error
Expand All @@ -36,6 +37,7 @@ try {
console.log('Text:', error.text);
console.log('Response:', error.response);
console.log('Usage:', error.usage);
console.log('Finish Reason:', error.finishReason);
}
}
```
1 change: 1 addition & 0 deletions packages/ai/core/generate-object/generate-object.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -801,6 +801,7 @@ describe('output = "object"', () => {
promptTokens: 10,
totalTokens: 30,
},
finishReason: 'stop',
});
}

Expand Down
4 changes: 4 additions & 0 deletions packages/ai/core/generate-object/generate-object.ts
Original file line number Diff line number Diff line change
Expand Up @@ -556,6 +556,7 @@ export async function generateObject<SCHEMA, RESULT>({
'No object generated: the model did not return a response.',
response: responseData,
usage: calculateLanguageModelUsage(result.usage),
finishReason: result.finishReason,
});
}

Expand Down Expand Up @@ -681,6 +682,7 @@ export async function generateObject<SCHEMA, RESULT>({
message: 'No object generated: the tool was not called.',
response: responseData,
usage: calculateLanguageModelUsage(result.usage),
finishReason: result.finishReason,
});
}

Expand Down Expand Up @@ -751,6 +753,7 @@ export async function generateObject<SCHEMA, RESULT>({
text: result,
response,
usage: calculateLanguageModelUsage(usage),
finishReason: finishReason,
});
}

Expand All @@ -770,6 +773,7 @@ export async function generateObject<SCHEMA, RESULT>({
text: result,
response,
usage: calculateLanguageModelUsage(usage),
finishReason: finishReason,
});
}

Expand Down
8 changes: 7 additions & 1 deletion packages/ai/core/generate-object/output-strategy.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,11 @@ import {
createAsyncIterableStream,
} from '../util/async-iterable-stream';
import { ObjectStreamPart } from './stream-object-result';
import { LanguageModelResponseMetadata, LanguageModelUsage } from '../types';
import {
FinishReason,
LanguageModelResponseMetadata,
LanguageModelUsage,
} from '../types';

export interface OutputStrategy<PARTIAL, RESULT, ELEMENT_STREAM> {
readonly type: 'object' | 'array' | 'enum' | 'no-schema';
Expand Down Expand Up @@ -64,6 +68,7 @@ const noSchemaOutputStrategy: OutputStrategy<JSONValue, JSONValue, never> = {
text: string;
response: LanguageModelResponseMetadata;
usage: LanguageModelUsage;
finishReason: FinishReason;
},
): ValidationResult<JSONValue> {
return value === undefined
Expand All @@ -74,6 +79,7 @@ const noSchemaOutputStrategy: OutputStrategy<JSONValue, JSONValue, never> = {
text: context.text,
response: context.response,
usage: context.usage,
finishReason: context.finishReason,
}),
}
: { success: true, value };
Expand Down
6 changes: 6 additions & 0 deletions packages/ai/core/generate-object/stream-object.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1311,6 +1311,7 @@ describe('streamObject', () => {
modelId: 'model-1',
},
usage: { completionTokens: 10, promptTokens: 3, totalTokens: 13 },
finishReason: 'stop',
});
}
});
Expand Down Expand Up @@ -1354,6 +1355,7 @@ describe('streamObject', () => {
modelId: 'model-1',
},
usage: { completionTokens: 10, promptTokens: 3, totalTokens: 13 },
finishReason: 'stop',
});
}
});
Expand Down Expand Up @@ -1403,6 +1405,7 @@ describe('streamObject', () => {
modelId: 'model-1',
},
usage: { completionTokens: 10, promptTokens: 3, totalTokens: 13 },
finishReason: 'stop',
});
}
});
Expand Down Expand Up @@ -1446,6 +1449,7 @@ describe('streamObject', () => {
modelId: 'model-1',
},
usage: { completionTokens: 10, promptTokens: 3, totalTokens: 13 },
finishReason: 'stop',
});
}
});
Expand Down Expand Up @@ -1488,6 +1492,7 @@ describe('streamObject', () => {
modelId: 'model-1',
},
usage: { completionTokens: 10, promptTokens: 3, totalTokens: 13 },
finishReason: 'stop',
});
}
});
Expand Down Expand Up @@ -1530,6 +1535,7 @@ describe('streamObject', () => {
modelId: 'model-1',
},
usage: { completionTokens: 10, promptTokens: 3, totalTokens: 13 },
finishReason: 'stop',
});
}
});
Expand Down
1 change: 1 addition & 0 deletions packages/ai/core/generate-object/stream-object.ts
Original file line number Diff line number Diff line change
Expand Up @@ -898,6 +898,7 @@ class DefaultStreamObjectResult<PARTIAL, RESULT, ELEMENT_STREAM>
text: accumulatedText,
response,
usage,
finishReason: finishReason,
});
self.objectPromise.reject(error);
}
Expand Down
6 changes: 5 additions & 1 deletion packages/ai/core/generate-text/generate-text.ts
Original file line number Diff line number Diff line change
Expand Up @@ -576,7 +576,11 @@ A function that attempts to repair a tool call that failed to parse.

return output.parseOutput(
{ text },
{ response: currentModelResponse.response, usage },
{
response: currentModelResponse.response,
usage,
finishReason: currentModelResponse.finishReason,
},
);
},
toolCalls: currentToolCalls,
Expand Down
4 changes: 4 additions & 0 deletions packages/ai/core/generate-text/output.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import { fail } from 'assert';
import { z } from 'zod';
import { verifyNoObjectGeneratedError } from '../../errors/no-object-generated-error';
import { object } from './output';
import { FinishReason } from '../types';

const context = {
response: {
Expand All @@ -14,6 +15,7 @@ const context = {
completionTokens: 2,
totalTokens: 3,
},
finishReason: 'length' as FinishReason,
};

describe('Output.object', () => {
Expand All @@ -37,6 +39,7 @@ describe('Output.object', () => {
message: 'No object generated: could not parse the response.',
response: context.response,
usage: context.usage,
finishReason: context.finishReason,
});
}
});
Expand All @@ -50,6 +53,7 @@ describe('Output.object', () => {
message: 'No object generated: response did not match schema.',
response: context.response,
usage: context.usage,
finishReason: context.finishReason,
});
}
});
Expand Down
5 changes: 5 additions & 0 deletions packages/ai/core/generate-text/output.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import { z } from 'zod';
import { NoObjectGeneratedError } from '../../errors';
import { injectJsonInstruction } from '../generate-object/inject-json-instruction';
import {
FinishReason,
LanguageModel,
LanguageModelV1CallOptions,
} from '../types/language-model';
Expand All @@ -33,6 +34,7 @@ export interface Output<OUTPUT, PARTIAL> {
context: {
response: LanguageModelResponseMetadata;
usage: LanguageModelUsage;
finishReason: FinishReason;
},
): OUTPUT;
}
Expand Down Expand Up @@ -108,6 +110,7 @@ export const object = <OUTPUT>({
context: {
response: LanguageModelResponseMetadata;
usage: LanguageModelUsage;
finishReason: FinishReason;
},
) {
const parseResult = safeParseJSON({ text });
Expand All @@ -119,6 +122,7 @@ export const object = <OUTPUT>({
text,
response: context.response,
usage: context.usage,
finishReason: context.finishReason,
});
}

Expand All @@ -134,6 +138,7 @@ export const object = <OUTPUT>({
text,
response: context.response,
usage: context.usage,
finishReason: context.finishReason,
});
}

Expand Down
13 changes: 13 additions & 0 deletions packages/ai/errors/no-object-generated-error.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { AISDKError } from '@ai-sdk/provider';
import { LanguageModelResponseMetadata } from '../core/types/language-model-response-metadata';
import { LanguageModelUsage } from '../core/types/usage';
import { FinishReason } from '../core';

const name = 'AI_NoObjectGeneratedError';
const marker = `vercel.ai.error.${name}`;
Expand Down Expand Up @@ -35,24 +36,32 @@ export class NoObjectGeneratedError extends AISDKError {
*/
readonly usage: LanguageModelUsage | undefined;

/**
Reason why the model finished generating a response.
*/
readonly finishReason: FinishReason | undefined;

constructor({
message = 'No object generated.',
cause,
text,
response,
usage,
finishReason,
}: {
message?: string;
cause?: Error;
text?: string;
response: LanguageModelResponseMetadata;
usage: LanguageModelUsage;
finishReason: FinishReason;
}) {
super({ name, message, cause });

this.text = text;
this.response = response;
this.usage = usage;
this.finishReason = finishReason;
}

static isInstance(error: unknown): error is NoObjectGeneratedError {
Expand All @@ -66,11 +75,15 @@ export function verifyNoObjectGeneratedError(
message: string;
response: LanguageModelResponseMetadata;
usage: LanguageModelUsage;
finishReason: FinishReason;
},
) {
expect(NoObjectGeneratedError.isInstance(error)).toBeTruthy();
const noObjectGeneratedError = error as NoObjectGeneratedError;
expect(noObjectGeneratedError.message).toStrictEqual(expected.message);
expect(noObjectGeneratedError.response).toStrictEqual(expected.response);
expect(noObjectGeneratedError.usage).toStrictEqual(expected.usage);
expect(noObjectGeneratedError.finishReason).toStrictEqual(
expected.finishReason,
);
}
Loading