Search → details (chained)
The hard way: search, then fan out one detail call per zpid.
The easy way: POST /v1/search/with-details. We do the chaining server-side.
const api = "https://api.zillapi.com";const headers = { "authorization": `Bearer ${process.env.ZILLOW_API_KEY}`, "content-type": "application/json",};
// 1. Kick off the chained job.const start = await fetch(`${api}/v1/search/with-details`, { method: "POST", headers, body: JSON.stringify({ filters: { status: "for_sale", location: "Austin, TX", beds: { min: 3 } }, extractionMethod: "PAGINATION", maxItems: 200, }),}).then(r => r.json());
const jobId = start.data.job_id;
// 2a. Recommended: register a webhook (see /webhooks-guide/).// Skip the polling below.//// 2b. Or poll.async function awaitJob(id) { for (;;) { const r = await fetch(`${api}/v1/jobs/${id}`, { headers }).then(r => r.json()); if (["succeeded","failed","timed_out","aborted"].includes(r.data.status)) return r.data; await new Promise(r => setTimeout(r, 5000)); }}const finished = await awaitJob(jobId);if (finished.status !== "succeeded") throw new Error(finished.error);
// 3. Fetch results — these are detail rows, not search rows.async function* pages() { let offset = 0; for (;;) { const r = await fetch( `${api}/v1/jobs/${jobId}/results?limit=500&offset=${offset}`, { headers } ).then(r => r.json()); yield r.data; if (!r.meta.has_more) break; offset += r.data.length; }}for await (const batch of pages()) { for (const home of batch) { console.log(home.zpid, home.address, home.price, home.priceHistory?.length); }}Cost
Chained jobs charge once per search row + once per detail row. A 200-result search → 200 detail enrichments = 400 units. Save units by running detail extraction only on the zpids you actually need.