kernel/idle: fix rt_thread_idle_excute in high optimization level

The rt_list_isempty has prototype of "int rt_list_isempty(const rt_list_t *l)".
So the compiler has a good reason that the rt_thread_defunct list does
not change within rt_thread_idle_excute thus optimize the "while" loop
into a "if".

So add the volatile qualifier when test against the rt_thread_defunc list.
This commit is contained in:
Grissiom 2014-09-02 17:21:31 +08:00
parent be9aa36abf
commit d4133990b0
1 changed files with 16 additions and 2 deletions

View File

@ -67,6 +67,20 @@ void rt_thread_idle_sethook(void (*hook)(void))
/*@}*/ /*@}*/
#endif #endif
/* Return whether there is defunctional thread to be deleted. */
rt_inline int _has_defunct_thread(void)
{
/* The rt_list_isempty has prototype of "int rt_list_isempty(const rt_list_t *l)".
* So the compiler has a good reason that the rt_thread_defunct list does
* not change within rt_thread_idle_excute thus optimize the "while" loop
* into a "if".
*
* So add the volatile qualifier here. */
const volatile rt_list_t *l = (const volatile rt_list_t*)&rt_thread_defunct;
return l->next != l;
}
/** /**
* @ingroup Thread * @ingroup Thread
* *
@ -76,7 +90,7 @@ void rt_thread_idle_excute(void)
{ {
/* Loop until there is no dead thread. So one call to rt_thread_idle_excute /* Loop until there is no dead thread. So one call to rt_thread_idle_excute
* will do all the cleanups. */ * will do all the cleanups. */
while (!rt_list_isempty(&rt_thread_defunct)) while (_has_defunct_thread())
{ {
rt_base_t lock; rt_base_t lock;
rt_thread_t thread; rt_thread_t thread;
@ -89,7 +103,7 @@ void rt_thread_idle_excute(void)
lock = rt_hw_interrupt_disable(); lock = rt_hw_interrupt_disable();
/* re-check whether list is empty */ /* re-check whether list is empty */
if (!rt_list_isempty(&rt_thread_defunct)) if (_has_defunct_thread())
{ {
/* get defunct thread */ /* get defunct thread */
thread = rt_list_entry(rt_thread_defunct.next, thread = rt_list_entry(rt_thread_defunct.next,