标准的并发控制实现
public void ScheduleIncomingRequest(IEndpointIncomingRequest request) { // queue the request. lock (m_lock) // i.e. Monitor.Enter(m_lock) { // check able to schedule requests. if (m_stopped || m_queue.Count >= m_maxRequestCount) { request.OperationCompleted(null, StatusCodes.BadTooManyOperations); return; } m_queue.Enqueue(request); // wake up an idle thread to handle the request if there is one if (m_activeThreadCount < m_totalThreadCount) { Monitor.Pulse(m_lock); } // start a new thread to handle the request if none are idle and the pool is not full. else if (m_totalThreadCount < m_maxThreadCount) { Thread thread = new Thread(OnProcessRequestQueue); thread.IsBackground = true; thread.Start(null); m_totalThreadCount++; m_activeThreadCount++; // new threads start in an active state Utils.Trace("Thread created: " + Thread.CurrentThread.ManagedThreadId + ". Current thread count: " + m_totalThreadCount + ". Active thread count" + m_activeThreadCount); } } }
/// <summary> /// Processes the requests in the request queue. /// </summary> private void OnProcessRequestQueue(object state) { lock (m_lock) // i.e. Monitor.Enter(m_lock) { while (true) { // check if the queue is empty. while (m_queue.Count == 0) { m_activeThreadCount--; // wait for a request. end the thread if no activity. if (m_stopped || (!Monitor.Wait(m_lock, 30000) && m_totalThreadCount > m_minThreadCount)) { m_totalThreadCount--; Utils.Trace("Thread ended: " + Thread.CurrentThread.ManagedThreadId + ". Current thread count: " + m_totalThreadCount + ". Active thread count" + m_activeThreadCount); return; } m_activeThreadCount++; } IEndpointIncomingRequest request = m_queue.Dequeue(); Monitor.Exit(m_lock); try { // process the request. m_server.ProcessRequest(request, state); } catch (Exception e) { Utils.Trace(e, "Unexpected error processing incoming request."); } finally { Monitor.Enter(m_lock); } } } }