The Lunch Money API uses offset-based pagination for the GET /transactions endpoint to efficiently retrieve large sets of transaction data. This guide explains how pagination works and how to implement it in your application.
When retrieving transactions using GET /transactions, the API supports pagination through the limit and offset query parameters. By default, the endpoint returns up to 1000 transactions per request. If more transactions match your filter criteria, the response includes a has_more property set to true, indicating that additional pages are available.
limit (optional): The maximum number of transactions to return in a single request
100012000offset (optional): The number of transactions to skip before returning results
0 (start from the beginning)0has_more: A boolean property in the response indicating whether more transactions are availabletrue: More transactions match your filter criteria and are available on subsequent pagesfalse: All matching transactions have been returnedlimit specified, defaults to 1000)has_more property in the responsehas_more is true, make another request with offset set to your previous offset + limithas_more is falseHere's how to fetch transactions page by page:
async function getAllTransactions(accessToken, limit = 1000) {
const allTransactions = [];
let offset = 0;
let hasMore = true;
while (hasMore) {
const url = `https://api.lunchmoney.dev/v2/transactions?limit=${limit}&offset=${offset}`;
const response = await fetch(url, {
headers: {
'Authorization': `Bearer ${accessToken}`
}
});
const data = await response.json();
allTransactions.push(...data.transactions);
hasMore = data.has_more;
offset += limit;
console.log(`Fetched ${data.transactions.length} transactions (total: ${allTransactions.length})`);
}
return allTransactions;
}
// Usage
const transactions = await getAllTransactions('YOUR_ACCESS_TOKEN');
console.log(`Total transactions: ${transactions.length}`);
import requests
def get_all_transactions(access_token, limit=1000):
all_transactions = []
offset = 0
has_more = True
headers = {
'Authorization': f'Bearer {access_token}'
}
while has_more:
url = f'https://api.lunchmoney.dev/v2/transactions?limit={limit}&offset={offset}'
response = requests.get(url, headers=headers)
data = response.json()
all_transactions.extend(data['transactions'])
has_more = data['has_more']
offset += limit
print(f"Fetched {len(data['transactions'])} transactions (total: {len(all_transactions)})")
return all_transactions
# Usage
transactions = get_all_transactions('YOUR_ACCESS_TOKEN')
print(f"Total transactions: {len(transactions)}")
#!/bin/bash
ACCESS_TOKEN="YOUR_ACCESS_TOKEN"
LIMIT=1000
OFFSET=0
HAS_MORE=true
ALL_TRANSACTIONS="[]"
while [ "$HAS_MORE" = "true" ]; do
echo "Fetching transactions with offset=$OFFSET, limit=$LIMIT..."
RESPONSE=$(curl -s -X GET \
"https://api.lunchmoney.dev/v2/transactions?limit=$LIMIT&offset=$OFFSET" \
-H "Authorization: Bearer $ACCESS_TOKEN")
TRANSACTIONS=$(echo $RESPONSE | jq -r '.transactions')
HAS_MORE=$(echo $RESPONSE | jq -r '.has_more')
# Merge transactions (simplified - in practice you'd want proper JSON merging)
COUNT=$(echo $TRANSACTIONS | jq 'length')
echo "Fetched $COUNT transactions"
OFFSET=$((OFFSET + LIMIT))
done
echo "All transactions fetched"
Pagination works seamlessly with date range filters:
async function getTransactionsByDateRange(accessToken, startDate, endDate) {
const allTransactions = [];
let offset = 0;
let hasMore = true;
const limit = 1000;
while (hasMore) {
const url = new URL('https://api.lunchmoney.dev/v2/transactions');
url.searchParams.set('start_date', startDate);
url.searchParams.set('end_date', endDate);
url.searchParams.set('limit', limit.toString());
url.searchParams.set('offset', offset.toString());
const response = await fetch(url, {
headers: {
'Authorization': `Bearer ${accessToken}`
}
});
const data = await response.json();
allTransactions.push(...data.transactions);
hasMore = data.has_more;
offset += limit;
}
return allTransactions;
}
// Usage: Get all transactions for January 2024
const janTransactions = await getTransactionsByDateRange(
'YOUR_ACCESS_TOKEN',
'2024-01-01',
'2024-01-31'
);
import requests
from urllib.parse import urlencode
def get_transactions_by_date_range(access_token, start_date, end_date):
all_transactions = []
offset = 0
has_more = True
limit = 1000
headers = {'Authorization': f'Bearer {access_token}'}
while has_more:
params = {
'start_date': start_date,
'end_date': end_date,
'limit': limit,
'offset': offset
}
url = f"https://api.lunchmoney.dev/v2/transactions?{urlencode(params)}"
response = requests.get(url, headers=headers)
data = response.json()
all_transactions.extend(data['transactions'])
has_more = data['has_more']
offset += limit
return all_transactions
# Usage: Get all transactions for January 2024
jan_transactions = get_transactions_by_date_range(
'YOUR_ACCESS_TOKEN',
'2024-01-01',
'2024-01-31'
)
#!/bin/bash
ACCESS_TOKEN="YOUR_ACCESS_TOKEN"
START_DATE="2024-01-01"
END_DATE="2024-01-31"
LIMIT=1000
OFFSET=0
HAS_MORE=true
while [ "$HAS_MORE" = "true" ]; do
RESPONSE=$(curl -s -X GET \
"https://api.lunchmoney.dev/v2/transactions?start_date=$START_DATE&end_date=$END_DATE&limit=$LIMIT&offset=$OFFSET" \
-H "Authorization: Bearer $ACCESS_TOKEN")
HAS_MORE=$(echo $RESPONSE | jq -r '.has_more')
COUNT=$(echo $RESPONSE | jq -r '.transactions | length')
echo "Fetched $COUNT transactions (offset=$OFFSET)"
OFFSET=$((OFFSET + LIMIT))
done
has_more FlagAlways check the has_more property to determine if you need to fetch more pages:
async function fetchTransactionsPage(accessToken, limit = 1000, offset = 0) {
const url = `https://api.lunchmoney.dev/v2/transactions?limit=${limit}&offset=${offset}`;
const response = await fetch(url, {
headers: {
'Authorization': `Bearer ${accessToken}`
}
});
const data = await response.json();
return {
transactions: data.transactions,
hasMore: data.has_more,
nextOffset: data.has_more ? offset + limit : null
};
}
// Usage
const page1 = await fetchTransactionsPage('YOUR_ACCESS_TOKEN', 1000, 0);
console.log(`Page 1: ${page1.transactions.length} transactions`);
console.log(`Has more: ${page1.hasMore}`);
if (page1.hasMore) {
const page2 = await fetchTransactionsPage('YOUR_ACCESS_TOKEN', 1000, page1.nextOffset);
console.log(`Page 2: ${page2.transactions.length} transactions`);
}
import requests
def fetch_transactions_page(access_token, limit=1000, offset=0):
url = f'https://api.lunchmoney.dev/v2/transactions?limit={limit}&offset={offset}'
headers = {'Authorization': f'Bearer {access_token}'}
response = requests.get(url, headers=headers)
data = response.json()
return {
'transactions': data['transactions'],
'has_more': data['has_more'],
'next_offset': offset + limit if data['has_more'] else None
}
# Usage
page1 = fetch_transactions_page('YOUR_ACCESS_TOKEN', 1000, 0)
print(f"Page 1: {len(page1['transactions'])} transactions")
print(f"Has more: {page1['has_more']}")
if page1['has_more']:
page2 = fetch_transactions_page('YOUR_ACCESS_TOKEN', 1000, page1['next_offset'])
print(f"Page 2: {len(page2['transactions'])} transactions")
ACCESS_TOKEN="YOUR_ACCESS_TOKEN"
LIMIT=1000
OFFSET=0
RESPONSE=$(curl -s -X GET \
"https://api.lunchmoney.dev/v2/transactions?limit=$LIMIT&offset=$OFFSET" \
-H "Authorization: Bearer $ACCESS_TOKEN")
HAS_MORE=$(echo $RESPONSE | jq -r '.has_more')
COUNT=$(echo $RESPONSE | jq -r '.transactions | length')
echo "Page 1: $COUNT transactions"
echo "Has more: $HAS_MORE"
if [ "$HAS_MORE" = "true" ]; then
OFFSET=$((OFFSET + LIMIT))
RESPONSE=$(curl -s -X GET \
"https://api.lunchmoney.dev/v2/transactions?limit=$LIMIT&offset=$OFFSET" \
-H "Authorization: Bearer $ACCESS_TOKEN")
COUNT=$(echo $RESPONSE | jq -r '.transactions | length')
echo "Page 2: $COUNT transactions"
fi
Choose a limit value that balances performance and number of requests:
// Good: Use default limit for most cases
const response = await fetch('/v2/transactions?limit=1000');
// Also good: Use smaller limit for incremental processing
const response = await fetch('/v2/transactions?limit=500');
has_moreDon't assume you've received all results. Always check the has_more property:
// ❌ Bad: Assumes one page is enough
const data = await fetch('/v2/transactions').then(r => r.json());
const transactions = data.transactions; // Might be incomplete!
// ✅ Good: Checks for more pages
let allTransactions = [];
let offset = 0;
let hasMore = true;
while (hasMore) {
const data = await fetch(`/v2/transactions?offset=${offset}`).then(r => r.json());
allTransactions.push(...data.transactions);
hasMore = data.has_more;
offset += 1000;
}
If you know the date range you need, use start_date and end_date parameters to reduce the result set size:
// ✅ Good: Uses date range to limit results
const response = await fetch(
'/v2/transactions?start_date=2024-01-01&end_date=2024-01-31'
);
// ❌ Less efficient: Fetches all transactions then filters client-side
const allTransactions = await fetchAllTransactions();
const janTransactions = allTransactions.filter(t =>
t.date >= '2024-01-01' && t.date <= '2024-01-31'
);
transactions array will be empty, and has_more will be falsehas_more will be false on the last pagehas_more: falseasync function safeFetchTransactions(accessToken, limit = 1000, offset = 0) {
const response = await fetch(
`https://api.lunchmoney.dev/v2/transactions?limit=${limit}&offset=${offset}`,
{
headers: { 'Authorization': `Bearer ${accessToken}` }
}
);
if (!response.ok) {
throw new Error(`API error: ${response.status}`);
}
const data = await response.json();
// Handle empty results
if (data.transactions.length === 0 && !data.has_more) {
console.log('No transactions found');
return [];
}
return data;
}
When paginating through many pages, be mindful of rate limits. Consider adding delays between requests:
async function getAllTransactionsWithRateLimit(accessToken) {
const allTransactions = [];
let offset = 0;
let hasMore = true;
const limit = 1000;
while (hasMore) {
const data = await fetchTransactionsPage(accessToken, limit, offset);
allTransactions.push(...data.transactions);
hasMore = data.has_more;
offset += limit;
// Small delay to avoid hitting rate limits
if (hasMore) {
await new Promise(resolve => setTimeout(resolve, 100));
}
}
return allTransactions;
}
Issue: Getting duplicate transactions across pages
Solution: Ensure you're incrementing offset by the same value as your limit. If you use limit=1000, increment offset by 1000 for each subsequent page.
// ❌ Wrong: Offset not incremented correctly
let offset = 0;
const page1 = await fetch(`/v2/transactions?limit=1000&offset=${offset}`);
offset += 500; // Wrong! Should be 1000
const page2 = await fetch(`/v2/transactions?limit=1000&offset=${offset}`);
// ✅ Correct: Offset matches limit
let offset = 0;
const page1 = await fetch(`/v2/transactions?limit=1000&offset=${offset}`);
offset += 1000; // Correct!
const page2 = await fetch(`/v2/transactions?limit=1000&offset=${offset}`);
Issue: has_more is always true in a loop
Solution: Make sure you're updating both hasMore and offset in your loop condition:
// ❌ Wrong: Infinite loop
let offset = 0;
while (true) { // Never updates hasMore!
const data = await fetch(`/v2/transactions?offset=${offset}`);
// Missing: hasMore = data.has_more
offset += 1000;
}
// ✅ Correct: Proper loop condition
let offset = 0;
let hasMore = true;
while (hasMore) {
const data = await fetch(`/v2/transactions?offset=${offset}`);
hasMore = data.has_more; // Update the condition
offset += 1000;
}
Issue: Getting fewer transactions than expected
Solution: Remember that filters (date ranges, account IDs, etc.) are applied before pagination. If you're filtering, the total number of transactions will be smaller:
// If you have 5000 total transactions but filter to a date range with only 200 transactions,
// you'll get 200 transactions total, not 5000
const data = await fetch('/v2/transactions?start_date=2024-01-01&end_date=2024-01-31');
// data.transactions.length might be 200, and has_more will be false
If you're experiencing pagination issues that can't be resolved through the techniques described above:
offset is being incremented correctlyhas_more property on each response