Overview
List endpoints in the InstaView API support pagination to handle large datasets efficiently. Understanding pagination helps you retrieve data optimally without hitting rate limits or performance issues.
All list endpoints accept these query parameters:
Page number to retrieve (1-indexed)
Number of items per page (max: 100)
Example Request
GET /candidates?page= 2 & limit = 50
Paginated responses include metadata about the result set:
{
"success" : true ,
"data" : {
"items" : [
{ "id" : "candidate-1" , "firstName" : "John" , "lastName" : "Doe" },
{ "id" : "candidate-2" , "firstName" : "Jane" , "lastName" : "Smith" }
],
"pagination" : {
"total" : 250 ,
"page" : 2 ,
"limit" : 50 ,
"totalPages" : 5 ,
"hasNextPage" : true ,
"hasPreviousPage" : true
}
},
"error" : null ,
"timestamp" : "2024-01-15T10:30:00Z"
}
Total number of items across all pages
Whether there are more pages after this one
Whether there are pages before this one
Iterate through pages sequentially:
async function getAllCandidates () {
const allCandidates = [];
let page = 1 ;
let hasMore = true ;
while ( hasMore ) {
const response = await fetch (
`https://api.instaview.sk/candidates?page= ${ page } &limit=100` ,
{
headers: { 'Authorization' : `Bearer ${ apiKey } ` }
}
);
const result = await response . json ();
allCandidates . push ( ... result . data . items );
hasMore = result . data . pagination . hasNextPage ;
page ++ ;
}
return allCandidates ;
}
Add delays between requests to avoid rate limits:
async function paginateWithRateLimit ( endpoint , delayMs = 200 ) {
const results = [];
let page = 1 ;
let hasMore = true ;
while ( hasMore ) {
const response = await fetch (
` ${ endpoint } ?page= ${ page } &limit=100` ,
{ headers: { 'Authorization' : `Bearer ${ apiKey } ` } }
);
const result = await response . json ();
results . push ( ... result . data . items );
hasMore = result . data . pagination . hasNextPage ;
page ++ ;
// Wait between requests
if ( hasMore ) {
await sleep ( delayMs );
}
}
return results ;
}
function sleep ( ms ) {
return new Promise ( resolve => setTimeout ( resolve , ms ));
}
// Usage
const candidates = await paginateWithRateLimit (
'https://api.instaview.sk/candidates' ,
250 // 250ms delay
);
Fetch multiple pages concurrently (if you know the total):
async function paginateParallel ( endpoint , limit = 100 ) {
// First, get total count
const firstPage = await fetch (
` ${ endpoint } ?page=1&limit= ${ limit } ` ,
{ headers: { 'Authorization' : `Bearer ${ apiKey } ` } }
);
const result = await firstPage . json ();
const totalPages = result . data . pagination . totalPages ;
// Fetch remaining pages in parallel (with concurrency limit)
const pagePromises = [];
const concurrency = 5 ; // Max 5 concurrent requests
for ( let page = 1 ; page <= totalPages ; page += concurrency ) {
const batch = [];
for ( let i = 0 ; i < concurrency && ( page + i ) <= totalPages ; i ++ ) {
batch . push (
fetch ( ` ${ endpoint } ?page= ${ page + i } &limit= ${ limit } ` , {
headers: { 'Authorization' : `Bearer ${ apiKey } ` }
})
);
}
const responses = await Promise . all ( batch );
const data = await Promise . all ( responses . map ( r => r . json ()));
pagePromises . push ( ... data );
}
// Flatten results
return pagePromises . flatMap ( r => r . data . items );
}
Use generators for memory-efficient iteration:
async function* paginateCandidates ( limit = 100 ) {
let page = 1 ;
let hasMore = true ;
while ( hasMore ) {
const response = await fetch (
`https://api.instaview.sk/candidates?page= ${ page } &limit= ${ limit } ` ,
{ headers: { 'Authorization' : `Bearer ${ apiKey } ` } }
);
const result = await response . json ();
yield * result . data . items ; // Yield items one by one
hasMore = result . data . pagination . hasNextPage ;
page ++ ;
}
}
// Usage: Process items one at a time
for await ( const candidate of paginateCandidates ()) {
await processCandidate ( candidate );
// Memory-efficient: only one page in memory at a time
}
Combine filters with pagination:
async function getCandidatesByJob ( jobId , limit = 50 ) {
const candidates = [];
let page = 1 ;
let hasMore = true ;
while ( hasMore ) {
const response = await fetch (
`https://api.instaview.sk/candidates?` +
`jobId= ${ jobId } &page= ${ page } &limit= ${ limit } ` ,
{ headers: { 'Authorization' : `Bearer ${ apiKey } ` } }
);
const result = await response . json ();
candidates . push ( ... result . data . items );
hasMore = result . data . pagination . hasNextPage ;
page ++ ;
}
return candidates ;
}
Complex Filtering
// Multiple filters with pagination
async function searchCandidates ( filters ) {
const params = new URLSearchParams ({
... filters ,
page: 1 ,
limit: 100
});
const response = await fetch (
`https://api.instaview.sk/candidates? ${ params } ` ,
{ headers: { 'Authorization' : `Bearer ${ apiKey } ` } }
);
return await response . json ();
}
// Usage
const results = await searchCandidates ({
jobId: 'job-uuid' ,
status: 'IN_PROCESS' ,
search: 'engineer'
});
Optimizing Page Size
Choose appropriate page sizes:
Small Pages (10-20)
Medium Pages (50-100)
Large Pages (100 max)
Use When :
Displaying in UI with client-side pagination
Network is slow or unreliable
Processing items individually
Pros :
Faster initial response
Less memory usage
Better for mobile clients
Cons :
More API calls required
Higher overhead for large datasets
Use When :
Batch processing operations
Syncing with external systems
General-purpose pagination
Pros :
Good balance of speed and efficiency
Reasonable memory usage
Fewer API calls
Cons :
May be slow for poor connections
Use When :
Bulk export operations
One-time data migrations
Analytics processing
Pros :
Minimal API calls
Fastest total time for large datasets
Cons :
Higher memory usage
Longer individual response times
May hit rate limits faster
Dynamic Page Sizing
Adjust page size based on response time:
class AdaptivePaginator {
constructor ( initialLimit = 100 ) {
this . limit = initialLimit ;
this . targetResponseTime = 2000 ; // 2 seconds
}
async fetchPage ( endpoint , page ) {
const startTime = Date . now ();
const response = await fetch (
` ${ endpoint } ?page= ${ page } &limit= ${ this . limit } ` ,
{ headers: { 'Authorization' : `Bearer ${ apiKey } ` } }
);
const responseTime = Date . now () - startTime ;
this . adjustLimit ( responseTime );
return await response . json ();
}
adjustLimit ( responseTime ) {
if ( responseTime > this . targetResponseTime && this . limit > 20 ) {
this . limit = Math . max ( 20 , Math . floor ( this . limit * 0.8 ));
console . log ( `Reduced page size to ${ this . limit } ` );
} else if ( responseTime < this . targetResponseTime / 2 && this . limit < 100 ) {
this . limit = Math . min ( 100 , Math . floor ( this . limit * 1.2 ));
console . log ( `Increased page size to ${ this . limit } ` );
}
}
}
// Usage
const paginator = new AdaptivePaginator ();
let page = 1 ;
let hasMore = true ;
while ( hasMore ) {
const result = await paginator . fetchPage ( endpoint , page );
processData ( result . data . items );
hasMore = result . data . pagination . hasNextPage ;
page ++ ;
}
Caching Paginated Results
Cache pages to reduce API calls:
class PaginationCache {
constructor ( ttl = 300000 ) { // 5 minutes
this . cache = new Map ();
this . ttl = ttl ;
}
getKey ( endpoint , page , limit ) {
return ` ${ endpoint } : ${ page } : ${ limit } ` ;
}
get ( endpoint , page , limit ) {
const key = this . getKey ( endpoint , page , limit );
const cached = this . cache . get ( key );
if ( cached && Date . now () - cached . timestamp < this . ttl ) {
console . log ( `Cache hit: page ${ page } ` );
return cached . data ;
}
return null ;
}
set ( endpoint , page , limit , data ) {
const key = this . getKey ( endpoint , page , limit );
this . cache . set ( key , {
data ,
timestamp: Date . now ()
});
}
}
const cache = new PaginationCache ();
async function fetchPageWithCache ( endpoint , page , limit ) {
// Check cache first
const cached = cache . get ( endpoint , page , limit );
if ( cached ) return cached ;
// Fetch from API
const response = await fetch (
` ${ endpoint } ?page= ${ page } &limit= ${ limit } ` ,
{ headers: { 'Authorization' : `Bearer ${ apiKey } ` } }
);
const result = await response . json ();
// Cache the result
cache . set ( endpoint , page , limit , result );
return result ;
}
Progress Tracking
Show progress for long-running pagination:
async function paginateWithProgress ( endpoint , onProgress ) {
// Get total count
const firstPage = await fetch (
` ${ endpoint } ?page=1&limit=100` ,
{ headers: { 'Authorization' : `Bearer ${ apiKey } ` } }
);
const firstResult = await firstPage . json ();
const total = firstResult . data . pagination . total ;
const totalPages = firstResult . data . pagination . totalPages ;
const allItems = [ ... firstResult . data . items ];
onProgress ( allItems . length , total , 1 , totalPages );
// Fetch remaining pages
for ( let page = 2 ; page <= totalPages ; page ++ ) {
const response = await fetch (
` ${ endpoint } ?page= ${ page } &limit=100` ,
{ headers: { 'Authorization' : `Bearer ${ apiKey } ` } }
);
const result = await response . json ();
allItems . push ( ... result . data . items );
onProgress ( allItems . length , total , page , totalPages );
}
return allItems ;
}
// Usage
const candidates = await paginateWithProgress (
'https://api.instaview.sk/candidates' ,
( current , total , page , totalPages ) => {
const percent = Math . round (( current / total ) * 100 );
console . log ( `Progress: ${ percent } % (Page ${ page } / ${ totalPages } )` );
}
);
Best Practices
Use Maximum Page Size for Bulk Operations
// ✅ Efficient: Fewer API calls
? page = 1 & limit = 100
// ❌ Inefficient: Too many calls for 1000 items
? page = 1 & limit = 10 // 100 API calls needed
const result = await fetch ( endpoint );
const data = await result . json ();
if ( ! data . data . items || data . data . items . length === 0 ) {
console . log ( 'No results found' );
return [];
}
return data . data . items ;
// Validate before making request
function validatePagination ( page , limit ) {
if ( page < 1 ) {
throw new Error ( 'Page must be >= 1' );
}
if ( limit < 1 || limit > 100 ) {
throw new Error ( 'Limit must be between 1 and 100' );
}
}
async function fetchPageWithRetry ( endpoint , page , limit , retries = 3 ) {
for ( let i = 0 ; i < retries ; i ++ ) {
try {
return await fetch (
` ${ endpoint } ?page= ${ page } &limit= ${ limit } ` ,
{ headers: { 'Authorization' : `Bearer ${ apiKey } ` } }
);
} catch ( error ) {
if ( i === retries - 1 ) throw error ;
await sleep ( 1000 * Math . pow ( 2 , i )); // Exponential backoff
}
}
}
Consider Memory Constraints
// ❌ Bad: Load all 10,000 items into memory
const allCandidates = await getAllCandidates ();
// ✅ Good: Process page by page
for await ( const candidate of paginateCandidates ()) {
await processCandidate ( candidate );
// Only one page in memory at a time
}
Troubleshooting
// Request: ?page=100 (but only 5 pages exist)
{
"success" : true ,
"data" : {
"items" : [],
"pagination" : {
"total" : 250 ,
"page" : 100 ,
"limit" : 50 ,
"totalPages" : 5 ,
"hasNextPage" : false ,
"hasPreviousPage" : true
}
}
}
Returns empty items array, not an error.
// Request: ?limit=500 (exceeds maximum)
{
"success" : false ,
"error" : {
"code" : "VALIDATION_ERROR" ,
"message" : "Limit must be between 1 and 100" ,
"details" : {
"field" : "limit" ,
"provided" : 500 ,
"max" : 100
}
}
}
Next Steps