Home > OS >  A question about internal_add_timer
A question about internal_add_timer

Time:03-31

Note:
2.4 kernel


 
The static inline void internal_add_timer (struct timer_list * timer)
{
/*
* must be cli - Ed the when calling this
*/
Unsigned long expires=timer - & gt; Expires;
Unsigned long independence idx=expires - timer_jiffies;
Struct list_head * vec.
If (independence idx & lt; TVR_SIZE) {
Int I=expires & amp; TVR_MASK;
Vec=tv1. Vec + I;
} else if (independence idx & lt; 1 & lt; <(TVR_BITS + TVN_BITS)) {
Int I=(expires & gt;> TVR_BITS) & amp; TVN_MASK;
Vec=tv2. Vec + I;
} else if (independence idx & lt; 1 & lt; <(TVR_BITS + 2 * TVN_BITS)) {
Int I=(expires & gt;> (TVR_BITS + TVN_BITS)) & amp; TVN_MASK;
Vec=tv3. Vec + I;
} else if (independence idx & lt; 1 & lt; <(TVR_BITS + 3 * TVN_BITS)) {
Int I=(expires & gt;> (TVR_BITS + 2 * TVN_BITS)) & amp; TVN_MASK;
Vec=tv4. Vec + I;
} else if ((signed long) independence idx & lt; 0 {
/* can happen if you add a timer with expires==jiffies,
* or you set a timer to go off in the past
*/
Vec=tv1. Vec + tv1. Index;
} else if (independence idx & lt; {
=0 xfffffffful)Int I=(expires & gt;> (TVR_BITS + 3 * TVN_BITS)) & amp; TVN_MASK;
Vec=network tv5 took. Vec + I;
} else {
/* Can only get here on architectures with 64 - bit jiffies */
INIT_LIST_HEAD (& amp; The timer - & gt; The list);
return;
}
/*
* Timers are FIFO!
*/
List_add (& amp; The timer - & gt; The list, vec - & gt; Prev);
}



Each vec (hash table) in the index to indicate the next queue to be processed, why don't insert the basis for the insert index, become as follows,
 
The static inline void internal_add_timer (struct timer_list * timer)
{
/*
* must be cli - Ed the when calling this
*/
Unsigned long expires=timer - & gt; Expires;
Unsigned long independence idx=expires - timer_jiffies;
Struct list_head * vec.
If (independence idx & lt; TVR_SIZE) {
Int I=expires & amp; TVR_MASK;
Vec=tv1. Vec + tv1. Index + I;
} else if (independence idx & lt; 1 & lt; <(TVR_BITS + TVN_BITS)) {
Int I=(expires & gt;> TVR_BITS) & amp; TVN_MASK;
Vec=tv2. Vec + tv2. Index + I;
} else if (independence idx & lt; 1 & lt; <(TVR_BITS + 2 * TVN_BITS)) {
Int I=(expires & gt;> (TVR_BITS + TVN_BITS)) & amp; TVN_MASK;
Vec=tv3. Vec + tv3. Index + I;
} else if (independence idx & lt; 1 & lt; <(TVR_BITS + 3 * TVN_BITS)) {
Int I=(expires & gt;> (TVR_BITS + 2 * TVN_BITS)) & amp; TVN_MASK;
Vec=tv4. Vec + tv4. Index + I;
} else if ((signed long) independence idx & lt; 0 {
/* can happen if you add a timer with expires==jiffies,
* or you set a timer to go off in the past
*/
Vec=tv1. Vec + tv1. Index;
} else if (independence idx & lt; {
=0 xfffffffful)Int I=(expires & gt;> (TVR_BITS + 3 * TVN_BITS)) & amp; TVN_MASK;
Vec=network tv5 took. Vec + network tv5 took. Index + I;
} else {
/* Can only get here on architectures with 64 - bit jiffies */
INIT_LIST_HEAD (& amp; The timer - & gt; The list);
return;
}
/*
* Timers are FIFO!
*/
List_add (& amp; The timer - & gt; The list, vec - & gt; Prev);
}


So, insert the next Jeffies need to deal with the timer, will be put in
 tv1. Vec + tv1. Index + I; 


Another note with timer function
 
The static inline void run_timer_list (void)
{
Spin_lock_irq (& amp; Timerlist_lock);
While ((long) (jiffies - timer_jiffies) & gt;=0) {
Struct list_head head * * curr;
if (! Tv1. Index) {
Int n=1;
Do {
Cascade_timers (tvecs [n]);
} while (tvecs [n] - & gt; The index==1 & amp; & + + n & lt; NOOF_TVECS);
}
Repeat:
The head=tv1. Vec + tv1. Index;
Curr=head - & gt; next;
If (curr!={the head)
Struct timer_list * timer;
Void * (fn) (unsigned long);
Unsigned long data;

The timer=list_entry (curr, struct timer_list, a list).
Fn=timer - & gt; The function;
Data=https://bbs.csdn.net/topics/timer-> data;

Detach_timer (timer);
The timer - & gt; List. The next=timer - & gt; List. Prev=NULL;
Timer_enter (timer);
Spin_unlock_irq (& amp; Timerlist_lock);
Fn (data);
Spin_lock_irq (& amp; Timerlist_lock);
Timer_exit ();
Goto repeat;
}
+ + timer_jiffies;
Tv1. Index=(tv1. Index + 1) & amp; TVR_MASK;
}
Spin_unlock_irq (& amp; Timerlist_lock);
}


  • Related