1
- using Polly ;
2
- using Polly . Bulkhead ;
3
- using System ;
1
+ using System ;
4
2
using System . Collections . Concurrent ;
5
3
using System . Collections . Generic ;
6
4
using System . Linq ;
@@ -37,44 +35,50 @@ public override async Task ExecuteAsync()
37
35
}
38
36
#endregion
39
37
40
- private readonly AsyncBulkheadPolicy _bulkheadPolicy ;
41
- private readonly PriorityQueue < Request , TPriority > _requestQueue ;
38
+ private readonly PriorityQueue < Request , TPriority > _requestQueue = new ( ) ;
39
+ private readonly ConcurrentQueue < Task > _runnerTasks = new ( ) ;
40
+ private volatile int _parallelRunCount = 0 ;
42
41
43
- public PriorityExecutionQueue ( int parallelRunCount )
42
+ public PriorityExecutionQueue ( int maxParallelRunCount )
44
43
{
45
- _bulkheadPolicy = Policy . BulkheadAsync ( parallelRunCount , int . MaxValue ) ;
46
- _requestQueue = new PriorityQueue < Request , TPriority > ( ) ;
44
+ if ( maxParallelRunCount < 1 )
45
+ {
46
+ throw new ArgumentOutOfRangeException ( nameof ( maxParallelRunCount ) ) ;
47
+ }
48
+ MaxParallelRunCount = maxParallelRunCount ;
47
49
}
48
50
49
- public int ParallelRunCount => _bulkheadPolicy . BulkheadAvailableCount ;
51
+ public int MaxParallelRunCount { get ; }
50
52
51
53
public async Task < T > RequestRunAsync < T > ( TPriority priority , Func < Task < T > > actionAsync )
52
54
{
53
- var request = new Request < T > ( actionAsync ) ;
55
+ // Optimistic path: if there is capacity
56
+ if ( TryOptimistic ( ) )
57
+ { // Optimistic try out succeeded!
58
+ var result = await actionAsync ( ) ;
59
+
60
+ Interlocked . Decrement ( ref _parallelRunCount ) ;
61
+ TryDequeueRequest ( ) ;
54
62
55
- lock ( _requestQueue )
56
- { // Add our item in the queue
57
- _requestQueue . Enqueue ( request , priority ) ;
63
+ return result ;
58
64
}
59
- // Remove/execute one item from the queue (when parallelism allows)
60
- // Either the item is "us" or someone before us
61
- await _bulkheadPolicy . ExecuteAsync ( async ( ) =>
62
- {
63
- Request ? request ;
65
+ else
66
+ { // Optimistic try out failed: get in queue
67
+ var request = new Request < T > ( actionAsync ) ;
64
68
65
69
lock ( _requestQueue )
66
- {
67
- if ( ! _requestQueue . TryDequeue ( out request , out _ ) )
68
- {
69
- throw new InvalidOperationException ( "Request queue is corrupted" ) ;
70
- }
70
+ { // Add our item in the queue
71
+ _requestQueue . Enqueue ( request , priority ) ;
71
72
}
73
+ TryDequeueRequest ( ) ;
72
74
73
- await request . ExecuteAsync ( ) ;
74
- } ) ;
75
+ // Wait for our own turn
76
+ var result = await request . Source . Task ;
77
+
78
+ await ObserveRunnerTasksAsync ( ) ;
75
79
76
- // Wait for our own turn
77
- return await request . Source . Task ;
80
+ return result ;
81
+ }
78
82
}
79
83
80
84
public async Task RequestRunAsync ( TPriority priority , Func < Task > actionAsync )
@@ -86,5 +90,71 @@ await RequestRunAsync(priority, async () =>
86
90
return 0 ;
87
91
} ) ;
88
92
}
93
+
94
+ private bool TryOptimistic ( )
95
+ {
96
+ var currentSnapshot = _parallelRunCount ;
97
+
98
+ if ( currentSnapshot >= MaxParallelRunCount )
99
+ { // We've reached capacity
100
+ return false ;
101
+ }
102
+ else
103
+ {
104
+ if ( Interlocked . CompareExchange (
105
+ ref _parallelRunCount ,
106
+ currentSnapshot + 1 ,
107
+ currentSnapshot ) == currentSnapshot )
108
+ {
109
+ return true ;
110
+ }
111
+ else
112
+ { // Somebody else modified in the meantime, we retry
113
+ return TryOptimistic ( ) ;
114
+ }
115
+ }
116
+ }
117
+
118
+ private void TryDequeueRequest ( )
119
+ {
120
+ if ( TryOptimistic ( ) )
121
+ {
122
+ lock ( _requestQueue )
123
+ {
124
+ if ( _requestQueue . TryDequeue ( out var request , out _ ) )
125
+ {
126
+ var runningTask = Task . Run ( async ( ) =>
127
+ {
128
+ await request . ExecuteAsync ( ) ;
129
+ Interlocked . Decrement ( ref _parallelRunCount ) ;
130
+ TryDequeueRequest ( ) ;
131
+ } ) ;
132
+
133
+ _runnerTasks . Enqueue ( runningTask ) ;
134
+ }
135
+ else
136
+ { // Revert increment since there won't be any run
137
+ Interlocked . Decrement ( ref _parallelRunCount ) ;
138
+ }
139
+ }
140
+ }
141
+ }
142
+
143
+ private async Task ObserveRunnerTasksAsync ( )
144
+ {
145
+ while ( _runnerTasks . TryDequeue ( out var task ) )
146
+ {
147
+ if ( task . IsCompleted )
148
+ {
149
+ await task ;
150
+ }
151
+ else
152
+ {
153
+ _runnerTasks . Enqueue ( task ) ;
154
+
155
+ return ;
156
+ }
157
+ }
158
+ }
89
159
}
90
160
}
0 commit comments