UnhandledRejection: TypeError: cookies is not iterable
Closed this issue · 11 comments
Description
I'm new to the Vercel AI SDK, and I'm trying to implement some AI features in an existing NextJS app (app router, v14.0.4, using npm). I tried implementing all of the code examples from the documentation's examples page, and every time it keeps giving me this error:
unhandledRejection: TypeError: cookies is not iterable
at get [headers map sorted] (node:internal/deps/undici/undici:1844:34)
at node:internal/deps/undici/undici:1865:43
at Headers Iterator.next (node:internal/deps/undici/undici:1108:26)
at Object.sequence<sequence<ByteString>> (node:internal/deps/undici/undici:1395:42)
at webidl.converters.HeadersInit (node:internal/deps/undici/undici:1902:69)
at new Headers (node:internal/deps/undici/undici:1727:36)
at eval (webpack-internal:///(rsc)/./node_modules/next/dist/server/lib/patch-fetch.js:417:38)
at process.processTicksAndRejections (node:internal/process/task_queues:95:5)
I literally just copy pasted what was on the examples docs into my existing NextJS app. None of the examples (steamText, generateObject, etc) worked, each resulted in this same error. I even checked to see if it was because of my location, because I live in Hong Kong, and OpenAI is not accessible here without a VPN, and with and without a VPN, this same error keeps appearing after I try running the code.
Is there any fix to this?
Here's a list of all the existing dependencies I have. Maybe the error stems from conflicts.
"dependencies": {
"@ai-sdk/openai": "^0.0.13",
"@hookform/resolvers": "^3.3.3",
"@million/lint": "^1.0.0-rc.84",
"@numairawan/video-duration": "^1.0.0",
"@radix-ui/react-accordion": "^1.1.2",
"@radix-ui/react-alert-dialog": "^1.0.5",
"@radix-ui/react-aspect-ratio": "^1.0.3",
"@radix-ui/react-avatar": "^1.0.4",
"@radix-ui/react-checkbox": "^1.0.4",
"@radix-ui/react-context-menu": "^2.1.5",
"@radix-ui/react-dialog": "^1.0.5",
"@radix-ui/react-dropdown-menu": "^2.0.6",
"@radix-ui/react-hover-card": "^1.0.7",
"@radix-ui/react-icons": "^1.3.0",
"@radix-ui/react-label": "^2.0.2",
"@radix-ui/react-popover": "^1.0.7",
"@radix-ui/react-progress": "^1.0.3",
"@radix-ui/react-scroll-area": "^1.0.5",
"@radix-ui/react-select": "^2.0.0",
"@radix-ui/react-separator": "^1.0.3",
"@radix-ui/react-slider": "^1.1.2",
"@radix-ui/react-slot": "^1.0.2",
"@radix-ui/react-switch": "^1.0.3",
"@radix-ui/react-tabs": "^1.0.4",
"@radix-ui/react-toast": "^1.1.5",
"@remotion/bundler": "^4.0.77",
"@remotion/cli": "^4.0.77",
"@remotion/gif": "4.0.77",
"@remotion/google-fonts": "^4.0.77",
"@remotion/install-whisper-cpp": "4.0.178",
"@remotion/lambda": "^4.0.77",
"@remotion/lottie": "^4.0.77",
"@remotion/player": "^4.0.77",
"@remotion/renderer": "^4.0.77",
"@remotion/shapes": "^4.0.77",
"@supabase/auth-helpers-nextjs": "^0.8.7",
"@supabase/auth-helpers-react": "^0.4.2",
"@supabase/supabase-js": "^2.39.8",
"@tanstack/react-table": "^8.11.7",
"@vercel/analytics": "^1.3.1",
"@vercel/speed-insights": "^1.0.12",
"@vidstack/react": "^1.11.30",
"@xzdarcy/react-timeline-editor": "^0.1.9",
"ai": "^3.1.12",
"and": "^0.0.3",
"axios": "^1.6.7",
"class-variance-authority": "^0.7.0",
"clsx": "^2.1.0",
"cmdk": "^0.2.1",
"framer-motion": "^11.0.3",
"fs": "^0.0.1-security",
"geist": "^1.2.2",
"get-video-duration": "^4.1.0",
"lodash": "^4.17.21",
"lottie-web": "^5.12.2",
"lucide-react": "^0.367.0",
"next": "14.0.4",
"next-themes": "^0.2.1",
"nextra": "^2.13.4",
"nextra-theme-docs": "^2.13.4",
"pexels": "^1.4.0",
"random-word-slugs": "^0.1.7",
"raw-body": "^2.5.2",
"re-resizable": "^6.9.17",
"react": "^18",
"react-beautiful-dnd": "^13.1.1",
"react-color": "^2.19.3",
"react-colorful": "^5.6.1",
"react-confetti": "^6.1.0",
"react-dom": "^18",
"react-draggable": "^4.4.6",
"react-hook-form": "^7.49.2",
"react-icons": "^4.12.0",
"react-player": "^2.14.1",
"react-resizable-panels": "^1.0.6",
"react-wavesurfer.js": "^0.0.8",
"react-wrap-balancer": "^1.1.0",
"remotion": "^4.0.77",
"resend": "^3.1.0",
"stripe": "^14.16.0",
"tailwind-merge": "^2.2.1",
"tailwindcss-animate": "^1.0.7",
"url-file-size": "^1.0.5-1",
"wavesurfer.js": "^6.6.4",
"zod": "^3.22.4"
},
"devDependencies": {
"@types/lodash": "^4.14.202",
"@types/node": "^20",
"@types/react": "^18",
"@types/react-beautiful-dnd": "^13.1.8",
"@types/react-color": "^3.0.11",
"@types/react-dom": "^18",
"autoprefixer": "^10.0.1",
"postcss": "^8",
"tailwindcss": "^3.3.0",
"typescript": "^5"
}
Code example
app/page.tsx
'use client';
import { useState } from 'react';
import { generate } from './actions';
import { readStreamableValue } from 'ai/rsc';
// Allow streaming responses up to 30 seconds
export const maxDuration = 30;
export default function Home() {
const [generation, setGeneration] = useState<string>('');
return (
<div>
<button
onClick={async () => {
const { object } = await generate('Messages during finals week.');
for await (const partialObject of readStreamableValue(object)) {
if (partialObject) {
setGeneration(
JSON.stringify(partialObject.notifications, null, 2),
);
}
}
}}
>
Ask
</button>
<pre>{generation}</pre>
</div>
);
}
app/actions.tsx
'use server';
import { streamObject } from 'ai';
import { openai } from '@ai-sdk/openai';
import { createStreamableValue } from 'ai/rsc';
import { z } from 'zod';
export async function generate(input: string) {
'use server';
const stream = createStreamableValue();
(async () => {
const { partialObjectStream } = await streamObject({
model: openai('gpt-4-turbo'),
system: 'You generate three notifications for a messages app.',
prompt: input,
schema: z.object({
notifications: z.array(
z.object({
name: z.string().describe('Name of a fictional person.'),
message: z.string().describe('Do not use emojis or links.'),
minutesAgo: z.number(),
}),
),
}),
});
for await (const partialObject of partialObjectStream) {
stream.update(partialObject);
}
stream.done();
})();
return { object: stream.value };
}
Additional context
I remember before I faced a similarish problem with @steven-tey Novel AI
The fix was to downgrade the cmdk package.
I tried it here again, but still the same error
The AI SDK does not use cookies
anywhere. Can you search for cookies
in your codebase and check what packages might be related to this issue?
@lgrammel thanks for the reply, I've found out one of my Supabase packages uses "set-cookie-parser": "^2.6.0".
Could that be a conflicting package? If so any workarounds so the Vercel AI SDK can work well with my current codebase?
The AI SDK does not interact with cookies in any way, if you e.g. search the AI SDK codebase you will not find anything regarding cookies.
Can you try updating next.js to the latest version? (v14)
I'm already on NextJS v14.0.4
Can you try 14.2.8
Hey Lars, I've upgraded the NextJS app to v14.2.8 and the same error still pops up
I'm also using v3.1.12 of the ai package, and v0.0.13 of the ai-sdk/openai package. I'll try upgrading these two to the latest versions
@RexanWONG I hope this works. My sense is that this issue is unrelated to the AI SDK since we do not use cookies in any way.
I tried, and it didn't work :(. I agree, I think one of the other packages might be the culprit.
I also tried to see what happens if I try to use the AI from an API route, and it also returned a similar error. The difference was instead of it being an unhandledRejection, it was a TypeError this time.
Here's the error:
TypeError: cookies is not iterable
at get [headers map sorted] (node:internal/deps/undici/undici:1844:34)
at node:internal/deps/undici/undici:1865:43
at Headers Iterator.next (node:internal/deps/undici/undici:1108:26)
at Headers.forEach (node:internal/deps/undici/undici:1873:31)
at extractResponseHeaders (webpack-internal:///(rsc)/./node_modules/@ai-sdk/provider-utils/dist/index.mjs:81:20)
at postToApi (webpack-internal:///(rsc)/./node_modules/@ai-sdk/provider-utils/dist/index.mjs:366:29)
at process.processTicksAndRejections (node:internal/process/task_queues:95:5)
at async OpenAIChatLanguageModel.doStream (webpack-internal:///(rsc)/./node_modules/@ai-sdk/openai/dist/index.mjs:398:50)
at async fn (webpack-internal:///(rsc)/./node_modules/ai/dist/index.mjs:3603:23)
at async eval (webpack-internal:///(rsc)/./node_modules/ai/dist/index.mjs:332:22)
at async _retryWithExponentialBackoff (webpack-internal:///(rsc)/./node_modules/ai/dist/index.mjs:162:12)
at async startRoundtrip (webpack-internal:///(rsc)/./node_modules/ai/dist/index.mjs:3568:13)
at async fn (webpack-internal:///(rsc)/./node_modules/ai/dist/index.mjs:3641:11)
at async eval (webpack-internal:///(rsc)/./node_modules/ai/dist/index.mjs:332:22)
at async POST (webpack-internal:///(rsc)/./app/api/chat/route.ts:14:20)
at async /Users/rexanwong/Dropbox/Mac/Documents/web_projects/videofast/node_modules/next/dist/compiled/next-server/app-route.runtime.dev.js:6:55753
at async eO.execute (/Users/rexanwong/Dropbox/Mac/Documents/web_projects/videofast/node_modules/next/dist/compiled/next-server/app-route.runtime.dev.js:6:46523)
at async eO.handle (/Users/rexanwong/Dropbox/Mac/Documents/web_projects/videofast/node_modules/next/dist/compiled/next-server/app-route.runtime.dev.js:6:57007)
at async doRender (/Users/rexanwong/Dropbox/Mac/Documents/web_projects/videofast/node_modules/next/dist/server/base-server.js:1359:42)
at async cacheEntry.responseCache.get.routeKind (/Users/rexanwong/Dropbox/Mac/Documents/web_projects/videofast/node_modules/next/dist/server/base-server.js:1581:28)
at async DevServer.renderToResponseWithComponentsImpl (/Users/rexanwong/Dropbox/Mac/Documents/web_projects/videofast/node_modules/next/dist/server/base-server.js:1489:28)
at async DevServer.renderPageComponent (/Users/rexanwong/Dropbox/Mac/Documents/web_projects/videofast/node_modules/next/dist/server/base-server.js:1913:24)
at async DevServer.renderToResponseImpl (/Users/rexanwong/Dropbox/Mac/Documents/web_projects/videofast/node_modules/next/dist/server/base-server.js:1951:32)
at async DevServer.pipeImpl (/Users/rexanwong/Dropbox/Mac/Documents/web_projects/videofast/node_modules/next/dist/server/base-server.js:917:25)
at async NextNodeServer.handleCatchallRenderRequest (/Users/rexanwong/Dropbox/Mac/Documents/web_projects/videofast/node_modules/next/dist/server/next-server.js:272:17)
at async DevServer.handleRequestImpl (/Users/rexanwong/Dropbox/Mac/Documents/web_projects/videofast/node_modules/next/dist/server/base-server.js:813:17)
at async /Users/rexanwong/Dropbox/Mac/Documents/web_projects/videofast/node_modules/next/dist/server/dev/next-dev-server.js:339:20
at async Span.traceAsyncFn (/Users/rexanwong/Dropbox/Mac/Documents/web_projects/videofast/node_modules/next/dist/trace/trace.js:154:20)
at async DevServer.handleRequest (/Users/rexanwong/Dropbox/Mac/Documents/web_projects/videofast/node_modules/next/dist/server/dev/next-dev-server.js:336:24)
at async invokeRender (/Users/rexanwong/Dropbox/Mac/Documents/web_projects/videofast/node_modules/next/dist/server/lib/router-server.js:173:21)
at async handleRequest (/Users/rexanwong/Dropbox/Mac/Documents/web_projects/videofast/node_modules/next/dist/server/lib/router-server.js:350:24)
at async requestHandlerImpl (/Users/rexanwong/Dropbox/Mac/Documents/web_projects/videofast/node_modules/next/dist/server/lib/router-server.js:374:13)
at async Server.requestListener (/Users/rexanwong/Dropbox/Mac/Documents/web_projects/videofast/node_modules/next/dist/server/lib/start-server.js:141:13)
POST /api/chat 500 in 3831ms
And here's the route.ts:
import { openai } from '@ai-sdk/openai';
import { streamText, convertToCoreMessages } from 'ai';
// Allow streaming responses up to 30 seconds
export const maxDuration = 30;
export async function POST(req: Request) {
const { messages } = await req.json();
const result = await streamText({
model: openai('gpt-4-turbo'),
messages: convertToCoreMessages(messages),
});
return result.toDataStreamResponse();
}
I'm wondering if anyone can recreate this bug? Create a new nextjs app on v14.2.8, install the dependencies that I shared, implement one of the examples from the docs?
@RexanWONG can you use this as a starting point instead? https://github.com/vercel/ai/tree/main/examples/next-openai (working next app with ai sdk)
Closing since this seems unrelated to the AI SDK.