Warning: file_get_contents(/data/phpspider/zhask/data//catemap/2/csharp/338.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
C# 嵌套并行等待。ForEach_C#_Wcf_Async Await_Task Parallel Library_Parallel.foreach - Fatal编程技术网

C# 嵌套并行等待。ForEach

C# 嵌套并行等待。ForEach,c#,wcf,async-await,task-parallel-library,parallel.foreach,C#,Wcf,Async Await,Task Parallel Library,Parallel.foreach,在metro应用程序中,我需要执行许多WCF调用。需要进行大量调用,因此我需要在并行循环中执行这些调用。问题是并行循环在WCF调用完成之前退出 您将如何重构它以使其按预期工作 var ids = new List<string>() { "1", "2", "3", "4", "5", "6", "7", "8", "9", "10" }; var customers = new System.Collections.Concurrent.BlockingCollection<

在metro应用程序中,我需要执行许多WCF调用。需要进行大量调用,因此我需要在并行循环中执行这些调用。问题是并行循环在WCF调用完成之前退出

您将如何重构它以使其按预期工作

var ids = new List<string>() { "1", "2", "3", "4", "5", "6", "7", "8", "9", "10" };
var customers = new  System.Collections.Concurrent.BlockingCollection<Customer>();

Parallel.ForEach(ids, async i =>
{
    ICustomerRepo repo = new CustomerRepo();
    var cust = await repo.GetCustomer(i);
    customers.Add(cust);
});

foreach ( var customer in customers )
{
    Console.WriteLine(customer.ID);
}

Console.ReadKey();
varids=newlist(){“1”、“2”、“3”、“4”、“5”、“6”、“7”、“8”、“9”、“10”};
var customers=new System.Collections.Concurrent.BlockingCollection();
Parallel.ForEach(id,async i=>
{
ICCustomerRepo repo=新CustomerRepo();
var cust=等待回购GetCustomer(i);
添加(cust);
});
foreach(客户中的var客户)
{
Console.WriteLine(customer.ID);
}
Console.ReadKey();

Parallel.ForEach()背后的整个思想是,您有一组线程,每个线程处理集合的一部分。正如您所注意到的,这不适用于
async
-
wait
,您希望在异步调用期间释放线程

您可以通过阻塞
ForEach()
线程来“修复”这一问题,但这破坏了
async
-
wait
的整个要点

您可以做的是使用而不是
Parallel.ForEach()
,后者很好地支持异步
任务

具体来说,您的代码可以使用
TransformBlock
编写,它使用
async
lambda将每个id转换为
Customer
。此块可以配置为并行执行。您可以将该块链接到一个
操作块
,该操作块将每个
客户
写入控制台。 设置块网络后,可以
Post()
将每个id发送到
TransformBlock

代码:

var ids = new List<string> { "1", "2", "3", "4", "5", "6", "7", "8", "9", "10" };

var getCustomerBlock = new TransformBlock<string, Customer>(
    async i =>
    {
        ICustomerRepo repo = new CustomerRepo();
        return await repo.GetCustomer(i);
    }, new ExecutionDataflowBlockOptions
    {
        MaxDegreeOfParallelism = DataflowBlockOptions.Unbounded
    });
var writeCustomerBlock = new ActionBlock<Customer>(c => Console.WriteLine(c.ID));
getCustomerBlock.LinkTo(
    writeCustomerBlock, new DataflowLinkOptions
    {
        PropagateCompletion = true
    });

foreach (var id in ids)
    getCustomerBlock.Post(id);

getCustomerBlock.Complete();
writeCustomerBlock.Completion.Wait();
varids=新列表{“1”、“2”、“3”、“4”、“5”、“6”、“7”、“8”、“9”、“10”};
var getCustomerBlock=新转换块(
异步i=>
{
ICCustomerRepo repo=新CustomerRepo();
退货待回购客户(一);
},新的ExecutionDataflowBlockOptions
{
MaxDegreeOfParallelism=DataflowBlockOptions.Unbounded
});
var writeCustomerBlock=newactionblock(c=>Console.WriteLine(c.ID));
getCustomerBlock.LinkTo(
writeCustomerBlock,新的DataflowLinkOptions
{
完成=真
});
foreach(id中的变量id)
getCustomerBlock.Post(id);
getCustomerBlock.Complete();
writeCustomerBlock.Completion.Wait();
尽管您可能希望将
TransformBlock
的并行性限制为某个小常量。此外,您还可以限制
TransformBlock
的容量,并使用
sendsync()
异步向其添加项,例如,如果集合太大

与您的代码(如果有效)相比,还有一个额外的好处,那就是编写将在单个项目完成后立即开始,而不是等到所有处理完成。

与往常一样出色

然而,我发现当您实际有大量数据要传输时,数据流更有用。或者当您需要与
异步
兼容的队列时

在您的情况下,一个更简单的解决方案是只使用
异步
样式的并行:

var ids = new List<string>() { "1", "2", "3", "4", "5", "6", "7", "8", "9", "10" };

var customerTasks = ids.Select(i =>
  {
    ICustomerRepo repo = new CustomerRepo();
    return repo.GetCustomer(i);
  });
var customers = await Task.WhenAll(customerTasks);

foreach (var customer in customers)
{
  Console.WriteLine(customer.ID);
}

Console.ReadKey();
using System.Collections.Async;
...

await ids.ParallelForEachAsync(async i =>
{
    ICustomerRepo repo = new CustomerRepo();
    var cust = await repo.GetCustomer(i);
    customers.Add(cust);
},
maxDegreeOfParallelism: 10);
varids=newlist(){“1”、“2”、“3”、“4”、“5”、“6”、“7”、“8”、“9”、“10”};
var customerTasks=ids.Select(i=>
{
ICCustomerRepo repo=新CustomerRepo();
退货回购客户(一);
});
var客户=等待任务.WhenAll(客户任务);
foreach(客户中的var客户)
{
Console.WriteLine(customer.ID);
}
Console.ReadKey();

按照svick的建议使用数据流可能有些过分,Stephen的回答没有提供控制操作并发性的方法。然而,这可以非常简单地实现:

public static async Task RunWithMaxDegreeOfConcurrency<T>(
     int maxDegreeOfConcurrency, IEnumerable<T> collection, Func<T, Task> taskFactory)
{
    var activeTasks = new List<Task>(maxDegreeOfConcurrency);
    foreach (var task in collection.Select(taskFactory))
    {
        activeTasks.Add(task);
        if (activeTasks.Count == maxDegreeOfConcurrency)
        {
            await Task.WhenAny(activeTasks.ToArray());
            //observe exceptions here
            activeTasks.RemoveAll(t => t.IsCompleted); 
        }
    }
    await Task.WhenAll(activeTasks.ToArray()).ContinueWith(t => 
    {
        //observe exceptions in a manner consistent with the above   
    });
}
编辑同事,所以用户和TPL wiz给我指了一个。与往常一样,他的实现既优雅又高效:

public static Task ForEachAsync<T>(
      this IEnumerable<T> source, int dop, Func<T, Task> body) 
{ 
    return Task.WhenAll( 
        from partition in Partitioner.Create(source).GetPartitions(dop) 
        select Task.Run(async delegate { 
            using (partition) 
                while (partition.MoveNext()) 
                    await body(partition.Current).ContinueWith(t => 
                          {
                              //observe exceptions
                          });
                      
        })); 
}
公共静态任务ForEachAsync(
此IEnumerable源、int dop、Func body)
{ 
返回任务。当所有(
从Partitioner.Create(source).GetPartitions(dop)中的分区
选择Task.Run(异步委托{
使用(分区)
while(partition.MoveNext())
等待正文(partition.Current).ContinueWith(t=>
{
//观察例外情况
});
})); 
}

Parallel.Foreach
包装成一个
任务。运行()
,使用
[yourasyncmethod]关键字代替
wait
。结果

(您需要执行该任务。运行thing以不阻塞UI线程)

大概是这样的:

var yourForeachTask = Task.Run(() =>
        {
            Parallel.ForEach(ids, i =>
            {
                ICustomerRepo repo = new CustomerRepo();
                var cust = repo.GetCustomer(i).Result;
                customers.Add(cust);
            });
        });
await yourForeachTask;

这应该是非常有效的,并且比让整个TPL数据流工作起来更容易:

var customers = await ids.SelectAsync(async i =>
{
    ICustomerRepo repo = new CustomerRepo();
    return await repo.GetCustomer(i);
});

...

public static async Task<IList<TResult>> SelectAsync<TSource, TResult>(this IEnumerable<TSource> source, Func<TSource, Task<TResult>> selector, int maxDegreesOfParallelism = 4)
{
    var results = new List<TResult>();

    var activeTasks = new HashSet<Task<TResult>>();
    foreach (var item in source)
    {
        activeTasks.Add(selector(item));
        if (activeTasks.Count >= maxDegreesOfParallelism)
        {
            var completed = await Task.WhenAny(activeTasks);
            activeTasks.Remove(completed);
            results.Add(completed.Result);
        }
    }

    results.AddRange(await Task.WhenAll(activeTasks));
    return results;
}
var客户=等待ID。选择异步(异步i=>
{
ICCustomerRepo repo=新CustomerRepo();
退货待回购客户(一);
});
...
公共静态异步任务SelectAsync(此IEnumerable源,Func选择器,int-MaxDegreesOffParallelism=4)
{
var results=新列表();
var activeTasks=newhashset();
foreach(源中的var项)
{
添加(选择器(项));
如果(activeTasks.Count>=maxDegreesOfParallelism)
{
var completed=等待任务.WhenAny(活动任务);
activeTasks.Remove(已完成);
结果.添加(已完成.结果);
}
}
结果.AddRange(等待任务.WhenAll(活动任务));
返回结果;
}

> p>我对聚会有点晚了,但是你可能想考虑使用GeaAuthIt.GETRESUTE()来在同步上下文中运行你的异步代码,但如下所示;p>
 Parallel.ForEach(ids, i =>
{
    ICustomerRepo repo = new CustomerRepo();
    // Run this in thread which Parallel library occupied.
    var cust = repo.GetCustomer(i).GetAwaiter().GetResult();
    customers.Add(cust);
});

你可以省力地使用新的,这在4年前问题最初发布时是不存在的。它允许您控制并行度:

var ids = new List<string>() { "1", "2", "3", "4", "5", "6", "7", "8", "9", "10" };

var customerTasks = ids.Select(i =>
  {
    ICustomerRepo repo = new CustomerRepo();
    return repo.GetCustomer(i);
  });
var customers = await Task.WhenAll(customerTasks);

foreach (var customer in customers)
{
  Console.WriteLine(customer.ID);
}

Console.ReadKey();
using System.Collections.Async;
...

await ids.ParallelForEachAsync(async i =>
{
    ICustomerRepo repo = new CustomerRepo();
    var cust = await repo.GetCustomer(i);
    customers.Add(cust);
},
maxDegreeOfParallelism: 10);
免责声明:我是AsyncEnumerator库的作者,该库是开放源码的,并在麻省理工学院获得许可,我
double[] result2 = await Enumerable.Range(0, 1000000)
    .Select(async i => await CalculateAsync(i).ConfigureAwait(false))
    .WhenAll()
    .ConfigureAwait(false);
public static class CollectionExtensions
{
    /// <summary>
    /// Splits collection into number of collections of nearly equal size.
    /// </summary>
    public static IEnumerable<List<T>> Split<T>(this IEnumerable<T> src, int slicesCount)
    {
        if (slicesCount <= 0) throw new ArgumentOutOfRangeException(nameof(slicesCount));

        List<T> source = src.ToList();
        var sourceIndex = 0;
        for (var targetIndex = 0; targetIndex < slicesCount; targetIndex++)
        {
            var list = new List<T>();
            int itemsLeft = source.Count - targetIndex;
            while (slicesCount * list.Count < itemsLeft)
            {
                list.Add(source[sourceIndex++]);
            }

            yield return list;
        }
    }

    /// <summary>
    /// Takes collection of collections, projects those in parallel and merges results.
    /// </summary>
    public static async Task<IEnumerable<TResult>> SelectManyAsync<T, TResult>(
        this IEnumerable<IEnumerable<T>> source,
        Func<T, Task<TResult>> func)
    {
        List<TResult>[] slices = await source
            .Select(async slice => await slice.SelectListAsync(func).ConfigureAwait(false))
            .WhenAll()
            .ConfigureAwait(false);
        return slices.SelectMany(s => s);
    }

    /// <summary>Runs selector and awaits results.</summary>
    public static async Task<List<TResult>> SelectListAsync<TSource, TResult>(this IEnumerable<TSource> source, Func<TSource, Task<TResult>> selector)
    {
        List<TResult> result = new List<TResult>();
        foreach (TSource source1 in source)
        {
            TResult result1 = await selector(source1).ConfigureAwait(false);
            result.Add(result1);
        }
        return result;
    }

    /// <summary>Wraps tasks with Task.WhenAll.</summary>
    public static Task<TResult[]> WhenAll<TResult>(this IEnumerable<Task<TResult>> source)
    {
        return Task.WhenAll<TResult>(source);
    }
}
    /// <summary>
    /// Concurrently Executes async actions for each item of <see cref="IEnumerable<typeparamref name="T"/>
    /// </summary>
    /// <typeparam name="T">Type of IEnumerable</typeparam>
    /// <param name="enumerable">instance of <see cref="IEnumerable<typeparamref name="T"/>"/></param>
    /// <param name="action">an async <see cref="Action" /> to execute</param>
    /// <param name="maxDegreeOfParallelism">Optional, An integer that represents the maximum degree of parallelism,
    /// Must be grater than 0</param>
    /// <returns>A Task representing an async operation</returns>
    /// <exception cref="ArgumentOutOfRangeException">If the maxActionsToRunInParallel is less than 1</exception>
    public static async Task ForEachAsyncConcurrent<T>(
        this IEnumerable<T> enumerable,
        Func<T, Task> action,
        int? maxDegreeOfParallelism = null)
    {
        if (maxDegreeOfParallelism.HasValue)
        {
            using (var semaphoreSlim = new SemaphoreSlim(
                maxDegreeOfParallelism.Value, maxDegreeOfParallelism.Value))
            {
                var tasksWithThrottler = new List<Task>();

                foreach (var item in enumerable)
                {
                    // Increment the number of currently running tasks and wait if they are more than limit.
                    await semaphoreSlim.WaitAsync();

                    tasksWithThrottler.Add(Task.Run(async () =>
                    {
                        await action(item).ContinueWith(res =>
                        {
                            // action is completed, so decrement the number of currently running tasks
                            semaphoreSlim.Release();
                        });
                    }));
                }

                // Wait for all tasks to complete.
                await Task.WhenAll(tasksWithThrottler.ToArray());
            }
        }
        else
        {
            await Task.WhenAll(enumerable.Select(item => action(item)));
        }
    }
await enumerable.ForEachAsyncConcurrent(
    async item =>
    {
        await SomeAsyncMethod(item);
    },
    5);
public static Task ForEachAsync<T>(this IEnumerable<T> source,
    Func<T, Task> action, int dop)
{
    // Arguments validation omitted
    var block = new ActionBlock<T>(action,
        new ExecutionDataflowBlockOptions() { MaxDegreeOfParallelism = dop });
    try
    {
        foreach (var item in source) block.Post(item);
        block.Complete();
    }
    catch (Exception ex) { ((IDataflowBlock)block).Fault(ex); }
    return block.Completion;
}
public static async Task ForEachAsync<T>(this IEnumerable<T> source,
    Func<T, Task> action, int dop)
{
    // Arguments validation omitted
    var block = new ActionBlock<T>(action, new ExecutionDataflowBlockOptions()
    { MaxDegreeOfParallelism = dop, BoundedCapacity = dop });
    try
    {
        foreach (var item in source)
            if (!await block.SendAsync(item).ConfigureAwait(false)) break;
        block.Complete();
    }
    catch (Exception ex) { ((IDataflowBlock)block).Fault(ex); }
    try { await block.Completion.ConfigureAwait(false); }
    catch { block.Completion.Wait(); } // Propagate AggregateException
}
int totalThreads = 0; int maxThreads = 3;

foreach (var item in YouList)
{
    while (totalThreads >= maxThreads) await Task.Delay(500);
    Interlocked.Increment(ref totalThreads);

    MyAsyncTask(item).ContinueWith((res) => Interlocked.Decrement(ref totalThreads));
}
async static Task MyAsyncTask(string item)
{
    await Task.Delay(2500);
    Console.WriteLine(item);
}