关于 LimitedConcurrencyLevelTaskScheduler 的疑惑

1. LimitedConcurrencyLevelTaskScheduler 介绍

这个TaskScheduler用过的应该都知道,微软开源的一个任务调度器,它的代码很简单, 也很好懂,但是我没有明白的是他是如何实现限制并发数的 首先贴下它的代码,大家先熟悉一下。
public class LimitedConcurrencyLevelTaskScheduler : TaskScheduler
{
    /// <summary>Whether the current thread is processing work items.</summary> 
    [ThreadStatic]
    private static bool _currentThreadIsProcessingItems;
    /// <summary>The list of tasks to be executed.</summary> 
    private readonly LinkedList<Task> _tasks = new LinkedList<Task>(); // protected by lock(_tasks) 
                                                                       /// <summary>The maximum concurrency level allowed by this scheduler.</summary> 
    private readonly int _maxDegreeOfParallelism;
    /// <summary>Whether the scheduler is currently processing work items.</summary> 
    private int _delegatesQueuedOrRunning = 0; // protected by lock(_tasks) 

    /// <summary> 
    /// Initializes an instance of the LimitedConcurrencyLevelTaskScheduler class with the 
    /// specified degree of parallelism. 
    /// </summary> 
    /// <param name="maxDegreeOfParallelism">The maximum degree of parallelism provided by this scheduler.</param> 
    public LimitedConcurrencyLevelTaskScheduler(int maxDegreeOfParallelism)
    {
        if (maxDegreeOfParallelism < 1) throw new ArgumentOutOfRangeException("maxDegreeOfParallelism");
        _maxDegreeOfParallelism = maxDegreeOfParallelism;
    }

    /// <summary>
    /// current executing number;
    /// </summary>
    public int CurrentCount { get; set; }

    /// <summary>Queues a task to the scheduler.</summary> 
    /// <param name="task">The task to be queued.</param> 
    protected sealed override void QueueTask(Task task)
    {
        // Add the task to the list of tasks to be processed. If there aren't enough 
        // delegates currently queued or running to process tasks, schedule another. 
        lock (_tasks)
        {
            Console.WriteLine("Task Count : {0} ", _tasks.Count);
            _tasks.AddLast(task);
            if (_delegatesQueuedOrRunning < _maxDegreeOfParallelism)
            {
                ++_delegatesQueuedOrRunning;
                NotifyThreadPoolOfPendingWork();
            }
        }
    }
    int executingCount = 0;
    private static object executeLock = new object();
    /// <summary> 
    /// Informs the ThreadPool that there's work to be executed for this scheduler. 
    /// </summary> 
    private void NotifyThreadPoolOfPendingWork()
    {
        ThreadPool.UnsafeQueueUserWorkItem(_ =>
        {
            // Note that the current thread is now processing work items. 
            // This is necessary to enable inlining of tasks into this thread. 
            _currentThreadIsProcessingItems = true;
            try
            {
                // Process all available items in the queue. 
                while (true)
                {
                    Task item;
                    lock (_tasks)
                    {
                        // When there are no more items to be processed, 
                        // note that we're done processing, and get out. 
                        if (_tasks.Count == 0)
                        {
                            --_delegatesQueuedOrRunning;

                            break;
                        }
                        
                        // Get the next item from the queue 
                        item = _tasks.First.Value;
                        _tasks.RemoveFirst();
                    }
                  

                    // Execute the task we pulled out of the queue 
                    base.TryExecuteTask(item);
                }
            }
            // We're done processing items on the current thread 
            finally { _currentThreadIsProcessingItems = false; }
        }, null);
    }

    /// <summary>Attempts to execute the specified task on the current thread.</summary> 
    /// <param name="task">The task to be executed.</param> 
    /// <param name="taskWasPreviouslyQueued"></param> 
    /// <returns>Whether the task could be executed on the current thread.</returns> 
    protected sealed override bool TryExecuteTaskInline(Task task, bool taskWasPreviouslyQueued)
    {

        // If this thread isn't already processing a task, we don't support inlining 
        if (!_currentThreadIsProcessingItems) return false;

        // If the task was previously queued, remove it from the queue 
        if (taskWasPreviouslyQueued) TryDequeue(task);

        // Try to run the task. 
        return base.TryExecuteTask(task);
    }

    /// <summary>Attempts to remove a previously scheduled task from the scheduler.</summary> 
    /// <param name="task">The task to be removed.</param> 
    /// <returns>Whether the task could be found and removed.</returns> 
    protected sealed override bool TryDequeue(Task task)
    {
        lock (_tasks) return _tasks.Remove(task);
    }

    /// <summary>Gets the maximum concurrency level supported by this scheduler.</summary> 
    public sealed override int MaximumConcurrencyLevel { get { return _maxDegreeOfParallelism; } }

    /// <summary>Gets an enumerable of the tasks currently scheduled on this scheduler.</summary> 
    /// <returns>An enumerable of the tasks currently scheduled.</returns> 
    protected sealed override IEnumerable<Task> GetScheduledTasks()
    {
        bool lockTaken = false;
        try
        {
            Monitor.TryEnter(_tasks, ref lockTaken);
            if (lockTaken) return _tasks.ToArray();
            else throw new NotSupportedException();
        }
        finally
        {
            if (lockTaken) Monitor.Exit(_tasks);
        }
    }
}

简单使用

下面是调用代码。

static void Main(string[] args)
{
    
        TaskFactory fac = new TaskFactory(new LimitedConcurrencyLevelTaskScheduler(5));
         
        //TaskFactory fac = new TaskFactory();
        for (int i = 0; i < 1000; i++)
        {
           
            fac.StartNew(s => {
                Thread.Sleep(1000);
                Console.WriteLine("Current Index {0}, ThreadId {1}",s,Thread.CurrentThread.ManagedThreadId);
            }, i);
        }

        Console.ReadKey();
}

调用很简单
根据调试调用顺序可以知道。
使用 LimitedConcurrencyLevelTaskScheduler 创建好TaskFactory 后,
调用该TaskFacotry.StartNew 方法后。会进入 LimitedConcurrencyLevelTaskScheduler
QueueTask 方法。

/// <summary>Queues a task to the scheduler.</summary> 
    /// <param name="task">The task to be queued.</param> 
    protected sealed override void QueueTask(Task task)
    {
        // Add the task to the list of tasks to be processed. If there aren't enough 
        // delegates currently queued or running to process tasks, schedule another. 
        lock (_tasks)
        {
            Console.WriteLine("Task Count : {0} ", _tasks.Count);
            _tasks.AddLast(task);
            if (_delegatesQueuedOrRunning < _maxDegreeOfParallelism)
            {
                ++_delegatesQueuedOrRunning;
                NotifyThreadPoolOfPendingWork();
            }
        }
    }

代码很简单,把刚创建的Task 添加到任务队列中去,然后判断当前正在执行的任务数量与设置的允许最大并发数进行比较, 如果小于该值,则开始通知正在挂起的任务开始执行。
我的疑问主要在 NotifyThreadPoolOfPendingWork 这个方法上。

private void NotifyThreadPoolOfPendingWork()
    {
        ThreadPool.UnsafeQueueUserWorkItem(_ =>
        {
            // Note that the current thread is now processing work items. 
            // This is necessary to enable inlining of tasks into this thread. 
            _currentThreadIsProcessingItems = true;
            try
            {
                // Process all available items in the queue. 
                while (true)
                {
                    Task item;
                    lock (_tasks)
                    {
                        // When there are no more items to be processed, 
                        // note that we're done processing, and get out. 
                        if (_tasks.Count == 0)
                        {
                            --_delegatesQueuedOrRunning;

                            break;
                        }
                        
                        // Get the next item from the queue 
                        item = _tasks.First.Value;
                        _tasks.RemoveFirst();
                    }
                    // Execute the task we pulled out of the queue 
                    base.TryExecuteTask(item);
                }
            }
            // We're done processing items on the current thread 
            finally { _currentThreadIsProcessingItems = false; }
        }, null);
    }

从代码中看到的意思是一直跑一个死循环, 不断从_tasks 中取出Task执行,
直到_task为空为止,然后退出循环。从这里并没有看到限制并发数的限制,只有在QueueTask中调用的时候有个简单的限制,然而好像并没有什么卵用,
因为只要 NotifyThreadPoolOfPendingWork 方法启动了, 就会一直跑,直到所有的Task执行完成。那他的并发数是如何限制的呢?

一直很迷惑,是不是我哪里理解错了, 还请知道的大神解惑一下。

~~简直醉了。markdown 不太会用,显示有些问题。。
如果看着不舒服可以看这里 https://www.zybuluo.com/kevinsforever/note/115066
使用这里的编辑器写的,但是复制过来不能正常显示。。
3Q .

posted @ 2015-06-10 21:47  ◇゛   仅此而已  阅读(4278)  评论(5编辑  收藏  举报