|
| 1 | +# Generic Limiter |
| 2 | + |
| 3 | +This guide explains the {ruby Async::Limiter::Generic} class, which provides unlimited concurrency by default and serves as the base implementation for all other limiters. It's ideal when you need timing constraints without concurrency limits, or when building custom limiter implementations. |
| 4 | + |
| 5 | +## Usage |
| 6 | + |
| 7 | +The simplest case - no limits on concurrent execution: |
| 8 | + |
| 9 | +```ruby |
| 10 | +require "async" |
| 11 | +require "async/limiter" |
| 12 | + |
| 13 | +Async do |
| 14 | + limiter = Async::Limiter::Generic.new |
| 15 | + |
| 16 | + # All 100 tasks run concurrently |
| 17 | + 100.times do |i| |
| 18 | + limiter.async do |task| |
| 19 | + puts "Task #{i} running" |
| 20 | + task.sleep 1 |
| 21 | + end |
| 22 | + end |
| 23 | +end |
| 24 | +``` |
| 25 | + |
| 26 | +All tasks start immediately and run in parallel, limited only by system resources. |
| 27 | + |
| 28 | +### Async Execution |
| 29 | + |
| 30 | +The primary way to use Generic limiter is through the `async` method: |
| 31 | + |
| 32 | +```ruby |
| 33 | +require "async" |
| 34 | +require "async/limiter" |
| 35 | + |
| 36 | +Async do |
| 37 | + limiter = Async::Limiter::Generic.new |
| 38 | + |
| 39 | + # Create async tasks through the limiter: |
| 40 | + tasks = 5.times.map do |i| |
| 41 | + limiter.async do |task| |
| 42 | + puts "Task #{i} started at #{Time.now}" |
| 43 | + task.sleep 1 |
| 44 | + puts "Task #{i} completed at #{Time.now}" |
| 45 | + "result_#{i}" |
| 46 | + end |
| 47 | + end |
| 48 | + |
| 49 | + # Wait for all tasks to complete: |
| 50 | + results = tasks.map(&:wait) |
| 51 | + puts "All results: #{results}" |
| 52 | +end |
| 53 | +``` |
| 54 | + |
| 55 | +### Sync Execution |
| 56 | + |
| 57 | +For synchronous execution within an async context: |
| 58 | + |
| 59 | +```ruby |
| 60 | +Async do |
| 61 | + limiter = Async::Limiter::Generic.new |
| 62 | + |
| 63 | + # Execute synchronously within the limiter: |
| 64 | + result = limiter.sync do |task| |
| 65 | + puts "Executing in task: #{task}" |
| 66 | + "sync result" |
| 67 | + end |
| 68 | + |
| 69 | + puts result # => "sync result" |
| 70 | +end |
| 71 | +``` |
| 72 | + |
| 73 | +## Timing Coordination |
| 74 | + |
| 75 | +Generic limiters excel when combined with timing strategies for pure rate limiting: |
| 76 | + |
| 77 | +### Rate Limiting Without Concurrency Limits |
| 78 | + |
| 79 | +```ruby |
| 80 | +Async do |
| 81 | + # Allow unlimited concurrency but rate limit to 10 operations per second: |
| 82 | + timing = Async::Limiter::Timing::LeakyBucket.new(10.0, 50.0) |
| 83 | + limiter = Async::Limiter::Generic.new(timing: timing) |
| 84 | + |
| 85 | + # All tasks start immediately, but timing strategy controls rate: |
| 86 | + 100.times do |i| |
| 87 | + limiter.async do |task| |
| 88 | + puts "Task #{i} executing at #{Time.now}" |
| 89 | + # Timing strategy ensures rate limiting. |
| 90 | + end |
| 91 | + end |
| 92 | +end |
| 93 | +``` |
| 94 | + |
| 95 | +### Burst Handling |
| 96 | + |
| 97 | +```ruby |
| 98 | +Async do |
| 99 | + # Allow bursts up to 20 operations, then limit to 5 per second: |
| 100 | + timing = Async::Limiter::Timing::SlidingWindow.new( |
| 101 | + 1.0, # 1-second window. |
| 102 | + Async::Limiter::Timing::BurstStrategy::Greedy, # Allow bursting. |
| 103 | + 5 # 5 operations per second. |
| 104 | + ) |
| 105 | + |
| 106 | + limiter = Async::Limiter::Generic.new(timing: timing) |
| 107 | + |
| 108 | + # First 20 operations execute immediately (burst). |
| 109 | + # Subsequent operations are rate limited: |
| 110 | + 50.times do |i| |
| 111 | + limiter.async do |task| |
| 112 | + puts "Operation #{i} at #{Time.now}" |
| 113 | + end |
| 114 | + end |
| 115 | +end |
| 116 | +``` |
| 117 | + |
| 118 | +## Advanced Usage Patterns |
| 119 | + |
| 120 | +### Cost-Based Operations |
| 121 | + |
| 122 | +When using timing strategies, you can specify different costs for operations: |
| 123 | + |
| 124 | +```ruby |
| 125 | +# Create limiter with timing strategy that supports costs: |
| 126 | +timing = Async::Limiter::Timing::LeakyBucket.new(10.0, 50.0) # 10/sec rate, 50 capacity. |
| 127 | +limiter = Async::Limiter::Generic.new(timing: timing) |
| 128 | + |
| 129 | +Async do |
| 130 | + # Light operations: |
| 131 | + limiter.acquire(cost: 0.5) do |resource| |
| 132 | + puts "Light operation using #{resource}" |
| 133 | + end |
| 134 | + |
| 135 | + # Standard operations (default cost: 1.0): |
| 136 | + limiter.acquire do |resource| |
| 137 | + puts "Standard operation using #{resource}" |
| 138 | + end |
| 139 | + |
| 140 | + # Heavy operations: |
| 141 | + limiter.acquire(cost: 5.0) do |resource| |
| 142 | + puts "Heavy operation using #{resource}" |
| 143 | + end |
| 144 | + |
| 145 | + # Operations that exceed timing capacity will fail: |
| 146 | + begin |
| 147 | + limiter.acquire(cost: 100.0) # Exceeds capacity of 50.0. |
| 148 | + rescue ArgumentError => error |
| 149 | + Console.error(self, error) |
| 150 | + end |
| 151 | +end |
| 152 | +``` |
| 153 | + |
| 154 | +Note that by default, lower cost operations will occur before higher cost operations. In other words, low cost operations will starve out higher cost operations unless you use {ruby Async::Limiter::Timing::Ordered} to force FIFO acquires. |
| 155 | + |
| 156 | +```ruby |
| 157 | +# Default behavior - potential starvation: |
| 158 | +timing = Async::Limiter::Timing::LeakyBucket.new(2.0, 10.0) |
| 159 | + |
| 160 | +# FIFO ordering - prevents starvation: |
| 161 | +timing = Async::Limiter::Timing::Ordered.new( |
| 162 | + Async::Limiter::Timing::LeakyBucket.new(2.0, 10.0) |
| 163 | +) |
| 164 | +``` |
0 commit comments