Does the .NET framework provide an ability to implement access to a shared resource in a manner such that some Writers trying to access that resource will have priority over others?
My problem has the following constraints:
1. Only 1 concurrent write request to the resource can be granted 2. There are many Writers waiting for access to this resource, but some Writers have precedence over others (starvation of low-priority Writers is okay). 3. Thread affinity is a NON-requirement. One thread can set th开发者_Go百科e lock, but another may reset it. 4. All Writer threads are from the same process.In short, I need a primitive that exposes its wait-queue and allows modify access to it. If there isn't any such thing, any tips on how I can proceed on building one for myself, using the already available classes, such as Semaphore?
Here is some quick'n'dirty code I could come up with. I will refine this, but as a POC this works...
public class PrioritisedLock
{
private List<CountdownEvent> waitQueue; //wait queue for the shared resource
private Semaphore waitQueueSemaphore; //ensure safe access to wait queue itself
public PrioritisedLock()
{
waitQueue = new List<CountdownEvent>();
waitQueueSemaphore = new Semaphore(1, 1);
}
public bool WaitOne(int position = 0)
{
//CountdownEvent needs to have a initial count of 1
//otherwise it is created in signaled state
position++;
bool containsGrantedRequest = false; //flag to check if wait queue still contains object which owns the lock
CountdownEvent thisRequest = position<1 ? new CountdownEvent(1) : new CountdownEvent(position);
int leastPositionMagnitude=Int32.MaxValue;
waitQueueSemaphore.WaitOne();
//insert the request at the appropriate position
foreach (CountdownEvent cdEvent in waitQueue)
{
if (cdEvent.CurrentCount > position)
cdEvent.AddCount();
else if (cdEvent.CurrentCount == position)
thisRequest.AddCount();
if (cdEvent.CurrentCount == 0)
containsGrantedRequest = true;
}
waitQueue.Add(thisRequest);
foreach (CountdownEvent cdEvent in waitQueue)
if (cdEvent.CurrentCount < leastPositionMagnitude)
leastPositionMagnitude = cdEvent.CurrentCount;
//If nobody holds the lock, grant the lock to the current request
if (containsGrantedRequest==false && thisRequest.CurrentCount == leastPositionMagnitude)
thisRequest.Signal(leastPositionMagnitude);
waitQueueSemaphore.Release();
//now do the actual wait for this request; if it is already signaled, it ends immediately
thisRequest.Wait();
return true;
}
public int Release()
{
int waitingCount = 0, i = 0, positionLeastMagnitude=Int32.MaxValue;
int grantedIndex = -1;
waitQueueSemaphore.WaitOne();
foreach(CountdownEvent cdEvent in waitQueue)
{
if (cdEvent.CurrentCount <= 0)
{
grantedIndex = i;
break;
}
i++;
}
//remove the request which is already fulfilled
if (grantedIndex != -1)
waitQueue.RemoveAt(grantedIndex);
//find the wait count of the first element in the queue
foreach (CountdownEvent cdEvent in waitQueue)
if (cdEvent.CurrentCount < positionLeastMagnitude)
positionLeastMagnitude = cdEvent.CurrentCount;
//decrement the wait counter for each waiting object, such that the first object in the queue is now signaled
foreach (CountdownEvent cdEvent in waitQueue)
{
waitingCount++;
cdEvent.Signal(positionLeastMagnitude);
}
waitQueueSemaphore.Release();
return waitingCount;
}
}
}
Use priority queue to keep list of pending requests. See here: Priority queue in .Net. Use stanadrd Monitor functionality to lock and signal what and when to do, as proposed by kenny.
精彩评论