Request/Response pattern with TPL Dataflow
我们有一个问题,在使用TPL数据流库时,我们需要一个请求/响应模式。我们的问题是,我们有一个调用依赖服务的.NET核心API。从属服务限制并发请求。我们的API不会限制并发请求;因此,我们一次可以收到数千个请求。在这种情况下,从属服务在达到其限制后将拒绝请求。因此,我们实现了
特定于以下代码,我们的问题出在
因此,我们的问题是:是否可以使用数据流块来关联请求/响应?最终目标是向我们的API发出请求,将其发出给相关服务,然后返回给客户端。下面是用于我们的数据流实现的代码。
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 | public class HttpClientWrapper : IHttpClientManager { private readonly IConfiguration _configuration; private readonly ITokenService _tokenService; private HttpClient _client; private BufferBlock<string> _bufferBlock; private TransformBlock<string, JObject> _actionBlock; public HttpClientWrapper(IConfiguration configuration, ITokenService tokenService) { _configuration = configuration; _tokenService = tokenService; _bufferBlock = new BufferBlock<string>(); var executionDataFlowBlockOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 10 }; var dataFlowLinkOptions = new DataflowLinkOptions { PropagateCompletion = true }; _actionBlock = new TransformBlock<string, JObject>(t => ProcessRequest(t), executionDataFlowBlockOptions); _bufferBlock.LinkTo(_actionBlock, dataFlowLinkOptions); } public void Connect() { _client = new HttpClient(); _client.DefaultRequestHeaders.Add("x-ms-client-application-name", "ourappname"); } public async Task<JObject> GetContent(string request) { await _bufferBlock.SendAsync(request); var result = await _actionBlock.ReceiveAsync(); return result; } private async Task<JObject> ProcessRequest(string request) { if (_client == null) { Connect(); } try { var accessToken = await _tokenService.GetTokenAsync(_configuration); var httpRequestMessage = new HttpRequestMessage(HttpMethod.Post, new Uri($"https://{_configuration.Uri}")); // add the headers httpRequestMessage.Headers.Add("Authorization", $"Bearer {accessToken}"); // add the request body httpRequestMessage.Content = new StringContent(request, Encoding.UTF8, "application/json"); var postRequest = await _client.SendAsync(httpRequestMessage); var response = await postRequest.Content.ReadAsStringAsync(); return JsonConvert.DeserializeObject<JObject>(response); } catch (Exception ex) { // log error return new JObject(); } } } |
您要做的是为每个传入项目添加一个ID,以便您可以将数据输入与结果输出相关联。这是如何执行此操作的示例:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 | namespace ConcurrentFlows.DataflowJobs { using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Threading.Tasks; using System.Threading.Tasks.Dataflow; /// <summary> /// A generic interface defining that: /// for a specified input type => an awaitable result is produced. /// </summary> /// <typeparam name="TInput">The type of data to process.</typeparam> /// <typeparam name="TOutput">The type of data the consumer expects back.</typeparam> public interface IJobManager<TInput, TOutput> { Task<TOutput> SubmitRequest(TInput data); } /// <summary> /// A TPL-Dataflow based job manager. /// </summary> /// <typeparam name="TInput">The type of data to process.</typeparam> /// <typeparam name="TOutput">The type of data the consumer expects back.</typeparam> public class DataflowJobManager<TInput, TOutput> : IJobManager<TInput, TOutput> { /// <summary> /// It is anticipated that jobHandler is an injected /// singleton instance of a Dataflow based 'calculator', though this implementation /// does not depend on it being a singleton. /// </summary> /// <param name="jobHandler">A singleton Dataflow block through which all jobs are processed.</param> public DataflowJobManager(IPropagatorBlock<KeyValuePair<Guid, TInput>, KeyValuePair<Guid, TOutput>> jobHandler) { if (jobHandler == null) { throw new ArgumentException("Argument cannot be null.","jobHandler"); } this.JobHandler = JobHandler; if (!alreadyLinked) { JobHandler.LinkTo(ResultHandler, new DataflowLinkOptions() { PropagateCompletion = true }); alreadyLinked = true; } } private static bool alreadyLinked = false; /// <summary> /// Submits the request to the JobHandler and asynchronously awaits the result. /// </summary> /// <param name="data">The input data to be processd.</param> /// <returns></returns> public async Task<TOutput> SubmitRequest(TInput data) { var taggedData = TagInputData(data); var job = CreateJob(taggedData); Jobs.TryAdd(job.Key, job.Value); await JobHandler.SendAsync(taggedData); return await job.Value.Task; } private static ConcurrentDictionary<Guid, TaskCompletionSource<TOutput>> Jobs { get; } = new ConcurrentDictionary<Guid, TaskCompletionSource<TOutput>>(); private static ExecutionDataflowBlockOptions Options { get; } = GetResultHandlerOptions(); private static ITargetBlock<KeyValuePair<Guid, TOutput>> ResultHandler { get; } = CreateReplyHandler(Options); private IPropagatorBlock<KeyValuePair<Guid, TInput>, KeyValuePair<Guid, TOutput>> JobHandler { get; } private KeyValuePair<Guid, TInput> TagInputData(TInput data) { var id = Guid.NewGuid(); return new KeyValuePair<Guid, TInput>(id, data); } private KeyValuePair<Guid, TaskCompletionSource<TOutput>> CreateJob(KeyValuePair<Guid, TInput> taggedData) { var id = taggedData.Key; var jobCompletionSource = new TaskCompletionSource<TOutput>(); return new KeyValuePair<Guid, TaskCompletionSource<TOutput>>(id, jobCompletionSource); } private static ExecutionDataflowBlockOptions GetResultHandlerOptions() { return new ExecutionDataflowBlockOptions() { MaxDegreeOfParallelism = Environment.ProcessorCount, BoundedCapacity = 1000 }; } private static ITargetBlock<KeyValuePair<Guid, TOutput>> CreateReplyHandler(ExecutionDataflowBlockOptions options) { return new ActionBlock<KeyValuePair<Guid, TOutput>>((result) => { RecieveOutput(result); }, options); } private static void RecieveOutput(KeyValuePair<Guid, TOutput> result) { var jobId = result.Key; TaskCompletionSource<TOutput> jobCompletionSource; if (!Jobs.TryRemove(jobId, out jobCompletionSource)) { throw new InvalidOperationException($"The jobId: {jobId} was not found."); } var resultValue = result.Value; jobCompletionSource.SetResult(resultValue); } } } |
另请参阅此答案以供参考。
对TPL数据流库来说,进行简单的调节并不是一个特别诱人的用例,而使用
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 | public class ThrottledExecution< T > { private readonly ActionBlock<Task<Task< T >>> _actionBlock; private readonly CancellationToken _cancellationToken; public ThrottledExecution(int concurrencyLevel, int minDurationMilliseconds = 0, CancellationToken cancellationToken = default) { if (minDurationMilliseconds < 0) throw new ArgumentOutOfRangeException(); _actionBlock = new ActionBlock<Task<Task< T >>>(async task => { try { var delay = Task.Delay(minDurationMilliseconds, cancellationToken); task.RunSynchronously(); await task.Unwrap().ConfigureAwait(false); await delay.ConfigureAwait(false); } catch { } // Ignore exceptions (errors are propagated through the task) }, new ExecutionDataflowBlockOptions() { MaxDegreeOfParallelism = concurrencyLevel, CancellationToken = cancellationToken, }); _cancellationToken = cancellationToken; } public Task< T > Run(Func<Task< T >> function) { // Create a cold task (the function will be invoked later) var task = new Task<Task< T >>(function, _cancellationToken); var accepted = _actionBlock.Post(task); _cancellationToken.ThrowIfCancellationRequested(); if (!accepted) throw new InvalidOperationException( "The component has been marked as complete."); return task.Unwrap(); } public void Complete() => _actionBlock.Complete(); public Task Completion => _actionBlock.Completion; } |
用法示例:
1 2 3 4 5 6 7 | private ThrottledExecution<JObject> throttledExecution = new ThrottledExecution<JObject>(concurrencyLevel: 10); public Task<JObject> GetContent(string request) { return throttledExecution.Run(() => ProcessRequest(request)); } |
我感谢JSteward提供的答案。他的方法是完全可以接受的。但是,我最终通过使用SemaphoreSlim来完成此操作。 SemaphoreSlim提供了两件事,这使它成为一个强大的解决方案。首先,它提供了一个构造函数重载,您可以在其中发送计数。此计数是指能够通过信号量等待机制的并发项数。等待机制由称为WaitAsync的方法提供。使用以下方法,其中Worker类作为Singleton,并发请求传入,一次执行HTTP请求的次数限制为10,并且所有响应均返回到正确的请求。因此,实现可能如下所示:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 | public class Worker: IWorker { private readonly IHttpClientManager _httpClient; private readonly ITokenService _tokenService; private readonly SemaphoreSlim _semaphore; public Worker(IHttpClientManager httpClient, ITokenService tokenService) { _httpClient = httpClient; _tokenService = tokenService; // we want to limit the number of items here _semaphore = new SemaphoreSlim(10); } public async Task<JObject> ProcessRequestAsync(string request, string route) { try { var accessToken = await _tokenService.GetTokenAsync( _timeSeriesConfiguration.TenantId, _timeSeriesConfiguration.ClientId, _timeSeriesConfiguration.ClientSecret); var cancellationToken = new CancellationTokenSource(); cancellationToken.CancelAfter(30000); await _semaphore.WaitAsync(cancellationToken.Token); var httpResponseMessage = await _httpClient.SendAsync(new HttpClientRequest { Method = HttpMethod.Post, Uri = $"https://someuri/someroute", Token = accessToken, Content = request }); var response = await httpResponseMessage.Content.ReadAsStringAsync(); return response; } catch (Exception ex) { // do some logging throw; } finally { _semaphore.Release(); } } } |