Skip to main content

Understanding API Rate Limits

The Entrupy Android SDK and API have rate limits to ensure fair usage and system stability. Understanding these limits is crucial for building robust applications.

SDK Rate Limits

Authorization Requests

  • Limit: 10 requests per minute per user
  • Scope: Per unique user ID
  • Reset: Every minute

Capture Flow Sessions

  • Limit: 5 concurrent capture sessions per user
  • Scope: Per unique user ID
  • Reset: When capture session ends (success, failure, or timeout)

Configuration Fetching

  • Limit: 1 request per 5 minutes per app
  • Scope: Per application (API key)
  • Reset: Every 5 minutes

API Rate Limits

User Authorization Endpoint

  • Limit: 100 requests per minute per API key
  • Scope: Per Entrupy API key
  • Reset: Every minute

Webhook Delivery

  • Limit: 1000 webhook deliveries per hour per endpoint
  • Scope: Per webhook URL
  • Reset: Every hour

Item Status Queries

  • Limit: 1000 requests per minute per API key
  • Scope: Per Entrupy API key
  • Reset: Every minute

Rate Limit Headers

When you hit rate limits, the API returns specific headers:

HTTP/1.1 429 Too Many Requests
X-RateLimit-Limit: 100
X-RateLimit-Remaining: 0
X-RateLimit-Reset: 1640995200
Retry-After: 60

Header Explanation

  • X-RateLimit-Limit: Maximum requests allowed in the time window
  • X-RateLimit-Remaining: Number of requests remaining in the current window
  • X-RateLimit-Reset: Unix timestamp when the rate limit resets
  • Retry-After: Recommended wait time in seconds before retrying

Handling Rate Limits

1. SDK-Level Handling

The Android SDK automatically handles rate limits for most operations:

// The SDK will automatically retry with exponential backoff
EntrupySdk.getInstance().startCapture(
context = this,
itemMetadata = metadata,
captureCallback = object : EntrupyCaptureCallback {
override fun onCaptureError(
errorCode: EntrupyErrorCode,
message: String,
item: Map<String, Any>
) {
when (errorCode) {
EntrupyErrorCode.RATE_LIMIT_EXCEEDED -> {
// SDK will automatically retry
Log.w("Entrupy", "Rate limit hit, retrying automatically")
}
else -> {
// Handle other errors
handleOtherErrors(errorCode, message)
}
}
}
}
)

2. Backend-Level Handling

Implement rate limit handling in your backend:

// Example backend rate limit handling
class RateLimitHandler {

private val requestCounts = mutableMapOf<String, MutableList<Long>>()

fun checkRateLimit(identifier: String, limit: Int, windowSeconds: Int): Boolean {
val now = System.currentTimeMillis()
val windowStart = now - (windowSeconds * 1000L)

// Clean old requests
requestCounts[identifier]?.removeAll { it < windowStart }

val requests = requestCounts.getOrPut(identifier) { mutableListOf() }

return if (requests.size < limit) {
requests.add(now)
true
} else {
false
}
}

fun getRetryAfter(identifier: String, windowSeconds: Int): Long {
val requests = requestCounts[identifier] ?: return 0L
if (requests.isEmpty()) return 0L

val oldestRequest = requests.minOrNull() ?: return 0L
val windowStart = System.currentTimeMillis() - (windowSeconds * 1000L)

return maxOf(0L, (oldestRequest + (windowSeconds * 1000L)) - System.currentTimeMillis())
}
}

3. Exponential Backoff Implementation

class RetryHandler {

private val maxRetries = 3
private val baseDelay = 1000L // 1 second

suspend fun <T> retryWithBackoff(
operation: suspend () -> T,
maxRetries: Int = this.maxRetries
): T {
var lastException: Exception? = null

repeat(maxRetries + 1) { attempt ->
try {
return operation()
} catch (e: Exception) {
lastException = e

if (attempt < maxRetries && isRetryableError(e)) {
val delay = baseDelay * (2.0.pow(attempt.toDouble())).toLong()
delay(delay)
} else {
break
}
}
}

throw lastException ?: Exception("Retry failed")
}

private fun isRetryableError(exception: Exception): Boolean {
return when {
exception.message?.contains("429") == true -> true
exception.message?.contains("rate limit") == true -> true
exception.message?.contains("too many requests") == true -> true
else -> false
}
}
}

Best Practices

1. Implement Caching

class EntrupyCache {

private val cache = LruCache<String, CacheEntry>(100)

fun get(key: String): Any? {
val entry = cache.get(key)
return if (entry != null && !entry.isExpired()) {
entry.data
} else {
cache.remove(key)
null
}
}

fun put(key: String, data: Any, ttlSeconds: Long) {
val entry = CacheEntry(data, System.currentTimeMillis() + (ttlSeconds * 1000L))
cache.put(key, entry)
}

private data class CacheEntry(
val data: Any,
val expiryTime: Long
) {
fun isExpired(): Boolean = System.currentTimeMillis() > expiryTime
}
}

2. Batch Operations

// Batch multiple status checks
class BatchStatusChecker {

private val pendingRequests = mutableListOf<String>()
private val batchSize = 10
private val batchDelay = 1000L // 1 second

fun addRequest(customerItemId: String) {
pendingRequests.add(customerItemId)

if (pendingRequests.size >= batchSize) {
processBatch()
} else {
// Schedule batch processing
Handler(Looper.getMainLooper()).postDelayed({
if (pendingRequests.isNotEmpty()) {
processBatch()
}
}, batchDelay)
}
}

private fun processBatch() {
val batch = pendingRequests.take(batchSize)
pendingRequests.removeAll(batch.toSet())

// Process batch request
processBatchRequest(batch)
}
}

3. Monitor Rate Limit Usage

class RateLimitMonitor {

private val usageTracker = mutableMapOf<String, UsageStats>()

fun trackRequest(endpoint: String) {
val stats = usageTracker.getOrPut(endpoint) { UsageStats() }
stats.incrementRequest()
}

fun getUsageStats(endpoint: String): UsageStats? {
return usageTracker[endpoint]
}

fun shouldThrottle(endpoint: String): Boolean {
val stats = usageTracker[endpoint] ?: return false
return stats.getRequestsInLastMinute() > 80 // 80% of limit
}

data class UsageStats(
private val requests: MutableList<Long> = mutableListOf()
) {
fun incrementRequest() {
val now = System.currentTimeMillis()
requests.add(now)

// Clean old requests (older than 1 minute)
requests.removeAll { it < now - 60000 }
}

fun getRequestsInLastMinute(): Int = requests.size
}
}

Testing Rate Limits

1. Load Testing

// Test rate limit behavior
class RateLimitTest {

@Test
fun testRateLimitHandling() {
val rateLimitHandler = RateLimitHandler()

// Simulate rapid requests
repeat(15) { requestId ->
val allowed = rateLimitHandler.checkRateLimit("test_user", 10, 60)

if (requestId < 10) {
assertTrue(allowed, "Request $requestId should be allowed")
} else {
assertFalse(allowed, "Request $requestId should be blocked")
}
}
}
}

2. Integration Testing

@RunWith(AndroidJUnit4::class)
class RateLimitIntegrationTest {

@Test
fun testSDKRateLimitRecovery() {
// Simulate rate limit scenario
// Verify SDK handles it gracefully
// Check that operations resume after rate limit period
}
}

Monitoring and Alerting

1. Rate Limit Metrics

class RateLimitMetrics {

fun recordRateLimitHit(endpoint: String) {
// Send metric to monitoring service
Analytics.track("rate_limit_hit", mapOf(
"endpoint" to endpoint,
"timestamp" to System.currentTimeMillis()
))
}

fun recordRetryAttempt(endpoint: String, attempt: Int) {
Analytics.track("rate_limit_retry", mapOf(
"endpoint" to endpoint,
"attempt" to attempt,
"timestamp" to System.currentTimeMillis()
))
}
}

2. Alerting

class RateLimitAlerting {

fun checkRateLimitHealth() {
val monitor = RateLimitMonitor()

listOf("authorization", "capture", "status").forEach { endpoint ->
if (monitor.shouldThrottle(endpoint)) {
sendAlert("Rate limit threshold reached for $endpoint")
}
}
}

private fun sendAlert(message: String) {
// Send alert to monitoring system
Log.w("RateLimit", message)
}
}

Understanding and properly handling rate limits ensures your Android application provides a smooth user experience while respecting Entrupy's system constraints.