@@ -87,7 +87,7 @@ _tx_thread_context_restore:
8787
8888 /* Recover floating point registers. */
8989#if defined(__riscv_flen)
90- LOAD t1, TX_STACK_OFFSET_MSTATUS(t0 ) // Pickup thread's floating point state */
90+ LOAD t1, TX_STACK_OFFSET_MSTATUS(sp ) // Pickup thread's floating point state */
9191 /* Check if floating point is enabled */
9292 srli t1, t1, 13
9393 andi t1, t1, 0x3
@@ -151,12 +151,8 @@ _tx_thread_skip_fp_restore:
151151
152152 LOAD t0, TX_STACK_OFFSET_MEPC(sp) // Recover mepc
153153 csrw mepc, t0 // Setup mepc
154- li t0, 0x1880 // Prepare MPIP
155- #if defined(__riscv_flen) && ((__riscv_flen == 32 ) || (__riscv_flen == 64 ))
156- li t1, 1 <<13
157- or t0, t1, t0
158- #endif
159- csrw mstatus, t0 // Enable MPIP
154+ LOAD t0, TX_STACK_OFFSET_MSTATUS(sp) // Recover mstatus
155+ csrw mstatus, t0
160156
161157 LOAD x1, TX_STACK_OFFSET_X1(sp) // Recover RA
162158 LOAD x5, TX_STACK_OFFSET_X5(sp) // Recover t0
@@ -190,13 +186,16 @@ _tx_thread_not_nested_restore:
190186 || (_tx_thread_preempt_disable))
191187 { */
192188
193- LOAD t1, _tx_thread_current_ptr // Pickup current thread pointer
189+ la t0, _tx_thread_current_ptr
190+ LOAD t1, 0 (t0) // Pickup current thread pointer
194191 beqz t1, _tx_thread_idle_system_restore // If NULL, idle system restore
195192
196- LOAD t2, _tx_thread_preempt_disable // Pickup preempt disable flag
193+ la t0, _tx_thread_preempt_disable
194+ LOAD t2, 0 (t0) // Pickup preempt disable flag
197195 bgtz t2, _tx_thread_no_preempt_restore // If set, restore interrupted thread
198196
199- LOAD t2, _tx_thread_execute_ptr // Pickup thread execute pointer
197+ la t0, _tx_thread_execute_ptr
198+ LOAD t2, 0 (t0) // Pickup thread execute pointer
200199 bne t1, t2, _tx_thread_preempt_restore // If higher-priority thread is ready, preempt
201200
202201
@@ -210,7 +209,7 @@ _tx_thread_no_preempt_restore:
210209
211210 /* Recover floating point registers. */
212211#if defined(__riscv_flen)
213- LOAD t1, TX_STACK_OFFSET_MSTATUS(t0 ) // Pickup thread's floating point state */
212+ LOAD t1, TX_STACK_OFFSET_MSTATUS(sp ) // Pickup thread's floating point state */
214213 /* Check if floating point is enabled */
215214 srli t1, t1, 13
216215 andi t1, t1, 0x3
@@ -273,12 +272,8 @@ _tx_thread_no_preempt_skip_fp_restore:
273272
274273 LOAD t0, TX_STACK_OFFSET_MEPC(sp) // Recover mepc
275274 csrw mepc, t0 // Setup mepc
276- li t0, 0x1880 // Prepare MPIP
277- #if defined(__riscv_flen)
278- li t1, 1 <<13
279- or t0, t1, t0
280- #endif
281- csrw mstatus, t0 // Enable MPIP
275+ LOAD t0, TX_STACK_OFFSET_MSTATUS(sp) // Recover mstatus
276+ csrw mstatus, t0
282277
283278 LOAD x1, TX_STACK_OFFSET_X1(sp) // Recover RA
284279 LOAD x5, TX_STACK_OFFSET_X5(sp) // Recover t0
@@ -312,62 +307,61 @@ _tx_thread_preempt_restore:
312307 /* Instead of directly activating the thread again, ensure we save the
313308 entire stack frame by saving the remaining registers. */
314309
315- LOAD t0 , TX_THREAD_STACK_PTR(t1) // Pickup thread's stack pointer
310+ LOAD sp , TX_THREAD_STACK_PTR(t1)
316311 ori t3, x0, 1 // Build interrupt stack type
317- STORE t3, 0 (t0) // Store stack type
318-
312+ STORE t3, 0 (sp) // Store stack type
319313 /* Store floating point preserved registers. */
320314#if defined(__riscv_flen)
321- LOAD t2, TX_STACK_OFFSET_MSTATUS(t0 ) // Pickup thread's floating point state */
315+ LOAD t2, TX_STACK_OFFSET_MSTATUS(sp ) // Pickup thread's floating point state */
322316 /* Check if floating point is enabled */
323317 srli t2, t2, 13
324318 andi t2, t2, 0x3
325319 beqz t2, _tx_thread_preempt_skip_fp_restore // Skip floating point restore FS is Off
326320#if __riscv_flen == 32
327- fsw f8, TX_STACK_OFFSET_F8(t0 ) // Store fs0
328- fsw f9, TX_STACK_OFFSET_F9(t0 ) // Store fs1
329- fsw f18, TX_STACK_OFFSET_F18(t0 ) // Store fs2
330- fsw f19, TX_STACK_OFFSET_F19(t0 ) // Store fs3
331- fsw f20, TX_STACK_OFFSET_F20(t0 ) // Store fs4
332- fsw f21, TX_STACK_OFFSET_F21(t0 ) // Store fs5
333- fsw f22, TX_STACK_OFFSET_F22(t0 ) // Store fs6
334- fsw f23, TX_STACK_OFFSET_F23(t0 ) // Store fs7
335- fsw f24, TX_STACK_OFFSET_F24(t0 ) // Store fs8
336- fsw f25, TX_STACK_OFFSET_F25(t0 ) // Store fs9
337- fsw f26, TX_STACK_OFFSET_F26(t0 ) // Store fs10
338- fsw f27, TX_STACK_OFFSET_F27(t0 ) // Store fs11
321+ fsw f8, TX_STACK_OFFSET_F8(sp ) // Store fs0
322+ fsw f9, TX_STACK_OFFSET_F9(sp ) // Store fs1
323+ fsw f18, TX_STACK_OFFSET_F18(sp ) // Store fs2
324+ fsw f19, TX_STACK_OFFSET_F19(sp ) // Store fs3
325+ fsw f20, TX_STACK_OFFSET_F20(sp ) // Store fs4
326+ fsw f21, TX_STACK_OFFSET_F21(sp ) // Store fs5
327+ fsw f22, TX_STACK_OFFSET_F22(sp ) // Store fs6
328+ fsw f23, TX_STACK_OFFSET_F23(sp ) // Store fs7
329+ fsw f24, TX_STACK_OFFSET_F24(sp ) // Store fs8
330+ fsw f25, TX_STACK_OFFSET_F25(sp ) // Store fs9
331+ fsw f26, TX_STACK_OFFSET_F26(sp ) // Store fs10
332+ fsw f27, TX_STACK_OFFSET_F27(sp ) // Store fs11
339333#elif __riscv_flen == 64
340- fsd f8, TX_STACK_OFFSET_F8(t0 ) // Store fs0
341- fsd f9, TX_STACK_OFFSET_F9(t0 ) // Store fs1
342- fsd f18, TX_STACK_OFFSET_F18(t0 ) // Store fs2
343- fsd f19, TX_STACK_OFFSET_F19(t0 ) // Store fs3
344- fsd f20, TX_STACK_OFFSET_F20(t0 ) // Store fs4
345- fsd f21, TX_STACK_OFFSET_F21(t0 ) // Store fs5
346- fsd f22, TX_STACK_OFFSET_F22(t0 ) // Store fs6
347- fsd f23, TX_STACK_OFFSET_F23(t0 ) // Store fs7
348- fsd f24, TX_STACK_OFFSET_F24(t0 ) // Store fs8
349- fsd f25, TX_STACK_OFFSET_F25(t0 ) // Store fs9
350- fsd f26, TX_STACK_OFFSET_F26(t0 ) // Store fs10
351- fsd f27, TX_STACK_OFFSET_F27(t0 ) // Store fs11
334+ fsd f8, TX_STACK_OFFSET_F8(sp ) // Store fs0
335+ fsd f9, TX_STACK_OFFSET_F9(sp ) // Store fs1
336+ fsd f18, TX_STACK_OFFSET_F18(sp ) // Store fs2
337+ fsd f19, TX_STACK_OFFSET_F19(sp ) // Store fs3
338+ fsd f20, TX_STACK_OFFSET_F20(sp ) // Store fs4
339+ fsd f21, TX_STACK_OFFSET_F21(sp ) // Store fs5
340+ fsd f22, TX_STACK_OFFSET_F22(sp ) // Store fs6
341+ fsd f23, TX_STACK_OFFSET_F23(sp ) // Store fs7
342+ fsd f24, TX_STACK_OFFSET_F24(sp ) // Store fs8
343+ fsd f25, TX_STACK_OFFSET_F25(sp ) // Store fs9
344+ fsd f26, TX_STACK_OFFSET_F26(sp ) // Store fs10
345+ fsd f27, TX_STACK_OFFSET_F27(sp ) // Store fs11
352346#endif
353347_tx_thread_preempt_skip_fp_restore:
354348#endif
355349
356350 /* Store standard preserved registers. */
357351
358- STORE x9, TX_STACK_OFFSET_X9(t0 ) // Store s1
359- STORE x18, TX_STACK_OFFSET_X18(t0 ) // Store s2
360- STORE x19, TX_STACK_OFFSET_X19(t0 ) // Store s3
361- STORE x20, TX_STACK_OFFSET_X20(t0 ) // Store s4
362- STORE x21, TX_STACK_OFFSET_X21(t0 ) // Store s5
363- STORE x22, TX_STACK_OFFSET_X22(t0 ) // Store s6
364- STORE x23, TX_STACK_OFFSET_X23(t0 ) // Store s7
365- STORE x24, TX_STACK_OFFSET_X24(t0 ) // Store s8
366- STORE x25, TX_STACK_OFFSET_X25(t0 ) // Store s9
367- STORE x26, TX_STACK_OFFSET_X26(t0 ) // Store s10
368- STORE x27, TX_STACK_OFFSET_X27(t0 ) // Store s11
352+ STORE x9, TX_STACK_OFFSET_X9(sp ) // Store s1
353+ STORE x18, TX_STACK_OFFSET_X18(sp ) // Store s2
354+ STORE x19, TX_STACK_OFFSET_X19(sp ) // Store s3
355+ STORE x20, TX_STACK_OFFSET_X20(sp ) // Store s4
356+ STORE x21, TX_STACK_OFFSET_X21(sp ) // Store s5
357+ STORE x22, TX_STACK_OFFSET_X22(sp ) // Store s6
358+ STORE x23, TX_STACK_OFFSET_X23(sp ) // Store s7
359+ STORE x24, TX_STACK_OFFSET_X24(sp ) // Store s8
360+ STORE x25, TX_STACK_OFFSET_X25(sp ) // Store s9
361+ STORE x26, TX_STACK_OFFSET_X26(sp ) // Store s10
362+ STORE x27, TX_STACK_OFFSET_X27(sp ) // Store s11
369363 // Note: s0 is already stored!
370-
364+ STORE sp, TX_THREAD_STACK_PTR(t1)
371365 /* Save the remaining time-slice and disable it. */
372366 /* if (_tx_timer_time_slice)
373367 { */
@@ -391,7 +385,8 @@ _tx_thread_dont_save_ts:
391385 /* Return to the scheduler. */
392386 /* _tx_thread_schedule(); */
393387
394- STORE x0, _tx_thread_current_ptr, t0 // Clear current thread pointer*/
388+ la t0, _tx_thread_current_ptr
389+ STORE x0, 0 (t0) // Clear current thread pointer*/
395390 /* } */
396391
397392_tx_thread_idle_system_restore:
0 commit comments