diff --git a/CHANGELOG.md b/CHANGELOG.md index c6f043dea..285f1e8d9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,9 @@ # CHANGELOG +## [3.8.0] - 2023-10-26 +- Updated trap handler to handle delegated exceptions in S-mode for both bare and virtual modes. +- Added Hypervisor mode support in Trap handler +- Updated the save area within the trap handler file. +- Improved CSR Rename macro for code clarity. ## [3.7.5] - 2023-10-11 Add missing check ISA fields in recently modified div and amo tests diff --git a/riscv-test-suite/env/arch_test.h b/riscv-test-suite/env/arch_test.h index 3aaf6ae5c..40c135ccf 100644 --- a/riscv-test-suite/env/arch_test.h +++ b/riscv-test-suite/env/arch_test.h @@ -158,7 +158,7 @@ #endif //========================================================================================== -// By default, it is defined as nop for the implementation that does not support Zifencei +// By default, ZIFENCE is defined as nop for the implementation that does not support Zifencei // Implementations that support Zifencei may use the fence.i instruction. // This only gets executed if xTVEC is not writable to point to the trap trampoline, // and if it isn't writable, the model better have the zifencei extension implemented. @@ -166,10 +166,10 @@ #ifndef RVMODEL_FENCEI #ifndef ZIFENCE - #define RVMODEL_FENCEI nop + #define RVMODEL_FENCEI nop // make sure ifetches get new code #else #define RVMODEL_FENCEI fence.i - #endif + #endif #endif #ifndef UNROLLSZ @@ -185,7 +185,10 @@ 1<=-2048)) ;\ addi dst, src, imm ;\ .else ;\ LI( dst, imm) ;\ @@ -510,10 +516,22 @@ .endm /******************************************************************************/ -/**** this is a helper macro that creates CSR aliases so code that ****/ -/**** accesses CSRs when V=1 in different modes can share the code ****/ +/**** These macros enable parameterization of trap handlers for each mode ****/ /******************************************************************************/ + .macro _XCSR_RENAME_V + .set CSR_XSTATUS, CSR_VSSTATUS /****FIXME? is the right substitution? ****/ + .set CSR_XEDELEG, CSR_HEDELEG /****FIXME? is the right substitution? ****/ + .set CSR_XIE, CSR_HIE + .set CSR_XIP, CSR_HIP + .set CSR_XCAUSE, CSR_VSCAUSE + .set CSR_XEPC, CSR_VSEPC + .set CSR_XSATP, CSR_VSATP + .set CSR_XSCRATCH,CSR_VSSCRATCH + .set CSR_XTVAL, CSR_VSTVAL + .set CSR_XTVEC, CSR_VSTVEC +.endm + .macro _XCSR_RENAME_S .set CSR_XSTATUS, CSR_SSTATUS .set CSR_XEDELEG, CSR_SEDELEG @@ -527,48 +545,54 @@ .set CSR_XTVEC, CSR_STVEC .endm -.macro XCSR_RENAME __MODE__ // enable CSR names to be parameterized, V,S merged - .ifc \__MODE__\, S - _XCSR_RENAME_S - .else - .ifc \__MODE__, V - _XCSR_RENAME_S - .else - .set CSR_XSTATUS, CSR_MSTATUS - .set CSR_XEDELEG, CSR_MEDELEG - .set CSR_XIE, CSR_MIE - .set CSR_XIP, CSR_MIP - .set CSR_XCAUSE, CSR_MCAUSE - .set CSR_XEPC, CSR_MEPC - .set CSR_XSATP, CSR_SATP - .set CSR_XSCRATCH,CSR_MSCRATCH - .set CSR_XTVAL, CSR_MTVAL - .set CSR_XTVEC, CSR_MTVEC - .endif - .endif +.macro _XCSR_RENAME_M + .set CSR_XSTATUS, CSR_MSTATUS + .set CSR_XEDELEG, CSR_MEDELEG + .set CSR_XIE, CSR_MIE + .set CSR_XIP, CSR_MIP + .set CSR_XCAUSE, CSR_MCAUSE + .set CSR_XEPC, CSR_MEPC + .set CSR_XSATP, CSR_SATP + .set CSR_XSCRATCH,CSR_MSCRATCH + .set CSR_XTVAL, CSR_MTVAL + .set CSR_XTVEC, CSR_MTVEC +.endm + +/******************************************************************************/ +/**** this is a helper macro that creates CSR aliases so code that ****/ +/**** accesses CSRs when V=1 in different modes can share the code ****/ +/******************************************************************************/ + + .macro XCSR_RENAME __MODE__ // enable CSR names to be parameterized, V,S merged + .ifc \__MODE__, M + _XCSR_RENAME_M + .endif + .ifc \__MODE__, S + _XCSR_RENAME_S + .endif + .ifc \__MODE__, V + _XCSR_RENAME_S + .endif .endm /******************************************************************************/ /**** this is a helper macro that creates CSR aliases so code that ****/ /**** accesses CSRs when V=1 in different modes can share the code ****/ /**** this verasion treats Vmodes separately as opposed to XCSR_RENAME ****/ +/**** this is used when the using it is run from Mmode ****/ /******************************************************************************/ + .macro XCSR_VRENAME __MODE__ // enable CSR names to be parameterized, V,S separate - .ifc \__MODE__, V - .set CSR_XSTATUS, CSR_HSTATUS - .set CSR_XEDELEG, CSR_HEDELEG - .set CSR_XIE, CSR_HIE - .set CSR_XIP, CSR_HIP - .set CSR_XCAUSE, CSR_VSCAUSE - .set CSR_XEPC, CSR_VSEPC - .set CSR_XSATP, CSR_VSATP - .set CSR_XSCRATCH,CSR_VSSCRATCH - .set CSR_XTVAL, CSR_VSTVAL - .set CSR_XTVEC, CSR_VSTVEC - .else - XCSR_RENAME \__MODE__\() - .endif -.endm + .ifc \__MODE__, M + _XCSR_RENAME_M + .endif + .ifc \__MODE__, S + _XCSR_RENAME_S + .endif + .ifc \__MODE__, V + _XCSR_RENAME_V + .endif + .endm //////////////////////////////////////////////////////////////////////////////////////// //**** This is a helper macro that saves GPRs. Normally used only inside CODE_END ****// @@ -578,108 +602,108 @@ //////////////////////////////////////////////////////////////////////////////////////// #define RVTEST_SAVE_GPRSM(_BR, _LBL, ...) ;\ - .option push ;\ - .option norvc ;\ + .option push ;\ + .option norvc ;\ .set __SV_MASK__, -1 /* default to save all */ ;\ .if NARG(__VA_ARGS__) == 1 ;\ .set __SV_MASK__, _ARG1(__VA_OPT__(__VA_ARGS__,0)) ;\ .endif ;\ - .set offset, 0 ;\ - LA(_BR, _LBL) ;\ - .if (__SV_MASK__ & (0x2)) == 0x2 ;\ - RVTEST_SIGUPD(_BR, x1) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x4)) == 0x4 ;\ - RVTEST_SIGUPD(_BR, x2) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x8)) == 0x8 ;\ - RVTEST_SIGUPD(_BR, x3) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x10)) == 0x10 ;\ - RVTEST_SIGUPD(_BR, x4) ;\ + .set offset, 0 ;\ + LA(_BR, _LBL) ;\ + .if (__SV_MASK__ & (0x2)) == 0x2 ;\ + RVTEST_SIGUPD(_BR, x1) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x4)) == 0x4 ;\ + RVTEST_SIGUPD(_BR, x2) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x8)) == 0x8 ;\ + RVTEST_SIGUPD(_BR, x3) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x10)) == 0x10 ;\ + RVTEST_SIGUPD(_BR, x4) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x20)) == 0x20 ;\ + RVTEST_SIGUPD(_BR, x5) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x40)) == 0x40 ;\ + RVTEST_SIGUPD(_BR, x6) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x80)) == 0x80 ;\ + RVTEST_SIGUPD(_BR, x7) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x100)) == 0x100 ;\ + RVTEST_SIGUPD(_BR, x8) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x200)) == 0x200 ;\ + RVTEST_SIGUPD(_BR, x9) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x400)) == 0x400 ;\ + RVTEST_SIGUPD(_BR, x10) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x800)) == 0x800 ;\ + RVTEST_SIGUPD(_BR, x11) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x1000)) == 0x1000 ;\ + RVTEST_SIGUPD(_BR, x12) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x2000)) == 0x2000 ;\ + RVTEST_SIGUPD(_BR, x13) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x4000)) == 0x4000 ;\ + RVTEST_SIGUPD(_BR, x14) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x8000)) == 0x8000 ;\ + RVTEST_SIGUPD(_BR, x15) ;\ + .endif ;\ +#ifndef RVTEST_E ;\ + .if (__SV_MASK__ & (0x10000)) == 0x10000 ;\ + RVTEST_SIGUPD(_BR, x16) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x20000)) == 0x20000 ;\ + RVTEST_SIGUPD(_BR, x17) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x40000)) == 0x40000 ;\ + RVTEST_SIGUPD(_BR, x18) ;\ .endif ;\ - .if (__SV_MASK__ & (0x20)) == 0x20 ;\ - RVTEST_SIGUPD(_BR, x5) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x40)) == 0x40 ;\ - RVTEST_SIGUPD(_BR, x6) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x80)) == 0x80 ;\ - RVTEST_SIGUPD(_BR, x7) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x100)) == 0x100 ;\ - RVTEST_SIGUPD(_BR, x8) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x200)) == 0x200 ;\ - RVTEST_SIGUPD(_BR, x9) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x400)) == 0x400 ;\ - RVTEST_SIGUPD(_BR, x10) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x800)) == 0x800 ;\ - RVTEST_SIGUPD(_BR, x11) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x1000)) == 0x1000 ;\ - RVTEST_SIGUPD(_BR, x12) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x2000)) == 0x2000 ;\ - RVTEST_SIGUPD(_BR, x13) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x4000)) == 0x4000 ;\ - RVTEST_SIGUPD(_BR, x14) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x8000)) == 0x8000 ;\ - RVTEST_SIGUPD(_BR, x15) ;\ + .if (__SV_MASK__ & (0x80000)) == 0x80000 ;\ + RVTEST_SIGUPD(_BR, x19) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x100000)) == 0x100000 ;\ + RVTEST_SIGUPD(_BR, x20) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x200000)) == 0x200000 ;\ + RVTEST_SIGUPD(_BR, x21) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x400000)) == 0x400000 ;\ + RVTEST_SIGUPD(_BR, x22) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x800000)) == 0x800000 ;\ + RVTEST_SIGUPD(_BR, x23) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x1000000)) == 0x1000000 ;\ + RVTEST_SIGUPD(_BR, x24) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x2000000)) == 0x2000000 ;\ + RVTEST_SIGUPD(_BR, x25) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x4000000)) == 0x4000000 ;\ + RVTEST_SIGUPD(_BR, x26) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x8000000)) == 0x8000000 ;\ + RVTEST_SIGUPD(_BR, x27) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x10000000)) == 0x10000000 ;\ + RVTEST_SIGUPD(_BR, x28) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x20000000)) == 0x20000000 ;\ + RVTEST_SIGUPD(_BR, x29) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x40000000)) == 0x40000000 ;\ + RVTEST_SIGUPD(_BR, x30) ;\ + .endif ;\ + .if (__SV_MASK__ & (0x80000000)) == 0x80000000 ;\ + RVTEST_SIGUPD(_BR, x31) ;\ .endif ;\ -#ifndef RVTEST_E ;\ - .if (__SV_MASK__ & (0x10000)) == 0x10000 ;\ - RVTEST_SIGUPD(_BR, x16) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x20000)) == 0x20000 ;\ - RVTEST_SIGUPD(_BR, x17) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x40000)) == 0x40000 ;\ - RVTEST_SIGUPD(_BR, x18) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x80000)) == 0x80000 ;\ - RVTEST_SIGUPD(_BR, x19) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x100000)) == 0x100000 ;\ - RVTEST_SIGUPD(_BR, x20) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x200000)) == 0x200000 ;\ - RVTEST_SIGUPD(_BR, x21) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x400000)) == 0x400000 ;\ - RVTEST_SIGUPD(_BR, x22) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x800000)) == 0x800000 ;\ - RVTEST_SIGUPD(_BR, x23) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x1000000)) == 0x1000000 ;\ - RVTEST_SIGUPD(_BR, x24) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x2000000)) == 0x2000000 ;\ - RVTEST_SIGUPD(_BR, x25) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x4000000)) == 0x4000000 ;\ - RVTEST_SIGUPD(_BR, x26) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x8000000)) == 0x8000000 ;\ - RVTEST_SIGUPD(_BR, x27) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x10000000)) == 0x10000000 ;\ - RVTEST_SIGUPD(_BR, x28) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x20000000)) == 0x20000000 ;\ - RVTEST_SIGUPD(_BR, x29) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x40000000)) == 0x40000000 ;\ - RVTEST_SIGUPD(_BR, x30) ;\ - .endif ;\ - .if (__SV_MASK__ & (0x80000000)) == 0x80000000 ;\ - RVTEST_SIGUPD(_BR, x31) ;\ - .endif ;\ .option pop ;\ #endif @@ -735,10 +759,10 @@ RVMODEL_DATA_END /* model specific stuff */ /***********************************************************************************/ /**** At end of test, this code is entered. It sets a register x2 to 0 and by ****/ -/**** default executes an ecall. The handler checks if the cause of the trap ****/ +/**** default executes an ecall. The handler checks if the cause of the trap ****/ /**** was ecall, w/ x2=0, and divert a special rtn_fm_mmode handler. That code ****/ /**** determines the caller's mode, uses it to select it's CODE_BEGIN, and uses ****/ -/**** to calculate it offset from Mmode's CODE_BEGIN, adjusts MEPC by that amt ****/ +/**** to calculate it offset from Mmode's CODE_BEGIN, adjusts MEPC by that amt ****/ /**** to convert it to an Mmode address, restores saved regs, and branches to ****/ /**** the relocated addr+4, immediately following the ECALL, but now in Mmode ****/ /**** **NOTE**: this destroys T2 and clears x2 (AKA sp) ****/ @@ -945,35 +969,37 @@ init_\__MODE__\()scratch: //---------------------------------------------------------------------- init_\__MODE__\()edeleg: li T2, 0 // save and clear edeleg so we can exit to Mmode -.ifc \__MODE__, V +.if (\__MODE__\() == V) csrrw T2, CSR_VEDELEG, T2 // special case: VS EDELEG available from Vmode .else -.ifc \__MODE__, M - #ifdef rvtest_strap_routine + .if (\__MODE__\() == M) + #ifdef rvtest_strap_routine csrrw T2, CSR_XEDELEG, T2 // this handles M mode save, but only if Smode exists - #endif -.else + #endif + .else //FIXME: if N-extension or anything like it is implemented, uncomment the following // csrrw T2, CSR_XEDELEG, T2 // this handles S mode -.endif + .endif .endif SREG T2, xedeleg_sv_off(T1) // now do the save //---------------------------------------------------------------------- init_\__MODE__\()satp: -.ifnc \__MODE__, M // if S or VS mode **FIXME: fixed offset frm trapreg_sv? +.ifnc \__MODE__ , M // if S or VS mode **FIXME: fixed offset frm trapreg_sv? LA( T4, rvtest_\__MODE__\()root_pg_tbl) // rplc xsatp w/ identity-mapped pg table csrrw T4, CSR_XSATP, T4 SREG T4, xsatp_sv_off(T1) .endif //---------------------------------------------------------------------- init_\__MODE__\()tvec: - LA( T4, \__MODE__\()trampoline) //this is a code-relative pointer - ADDI( T3,T4,actual_tramp_sz) - SREG T3, tentry_addr(T1) // initialize to original common entry point +// LA( T4, \__MODE__\()trampoline) //this is a code-relative pointer +// ADDI( T3, T4, actual_tramp_sz) +// SREG T3, tentry_addr(T1) // initialize to original common entry point csrr T3, CSR_XTVEC - SREG T3, xtvec_sav_off(T1) // save orig mtvec+mode in tvec_save - andi T2, T3, WDBYTMSK // merge .mode & tramp ptr and store to both XTVEC, tvec_new - or T2, T4, T2 + SREG T3, xtvec_sav_off(T1) // save orig mtvec+mode in tvec_save + andi T2, T3, WDBYTMSK // extract mode bits + LREG T4, tentry_addr(T1) + addi T4, T4, -actual_tramp_sz// load trampoline addr (common entry pt) avoiding an LA() + or T2, T4, T2 // merge .mode & tramp ptr and store to both XTVEC, tvec_new SREG T2, xtvec_new_off(T1) csrw CSR_XTVEC, T2 // write xtvec with trap_trampoline+mode, so trap will go to the trampoline @@ -1015,7 +1041,7 @@ init_\__MODE__\()tramp: /**** copy trampoline at mtvec tgt; T4->T2->T1 T3=end o endcopy_\__MODE__\()tramp: // vector table not writeable, restore RVMODEL_FENCEI // By default it is defined as nop. See the definition above csrr T1, CSR_XSCRATCH // reload trapreg_sv from scratch - SREG T2, trampend_addr(T1) // save copy progress; used to restore orig tramp + SREG T2, trampend_off(T1) // save copy progress; used to restore orig tramp SREG T4, tentry_addr(T1) // this is common entry point address, end of orig trampoline beq T3,T2, rvtest_\__MODE__\()prolog_done //full loop, don't exit abort\__MODE__\()test: @@ -1032,8 +1058,8 @@ rvtest_\__MODE__\()prolog_done: .macro RVTEST_TRAP_HANDLER __MODE__ .option push -.option rvc // temporarily allow compress to allow c.nop alignment -.align MTVEC_ALIGN +.option rvc // temporarily allow compress to allow c.nop alignment +.align MTVEC_ALIGN // ensure that a trampoline is on a model defined or reasonable boundary .option pop /**********************************************************************/ @@ -1103,9 +1129,7 @@ common_\__MODE__\()handler: // enter with vector addr in T6 SREG T5, trap_sv_off+5*REGWIDTH(sp) // x30 save remaining regs, starting with T5 csrrw T5, CSR_XSCRATCH, sp // restore ptr to reg sv area, and get old sp SREG T5, trap_sv_off+7*REGWIDTH(sp) // save old sp -// auipc T5, 0 -// addi T5, T5, 3*WDBYTSZ // quick calculation of common Xentry: label (3ops past auipc) - LREG T5, tentry_addr(sp) // get the address of the common entry ppoint + LREG T5, tentry_addr(sp) // get the address of the common entry point jr T5 // needed if trampoline gets moved elsewhere, else it's effectively a noop common_\__MODE__\()entry: @@ -1120,7 +1144,7 @@ spcl_\__MODE__\()2mmode_test: addi T4, T5, -8 // is cause 8..11? Mmode should avoid ECALL 0 andi T4, T4, -4 // NOTE: cause 10 is RSVD. Sail will diverge, but buggy anyway bnez T4, \__MODE__\()trapsig_ptr_upd // no, not in special mode, just continue - LREG T2, trap_sv_off+7*REGWIDTH(sp) // get test x2 (which is sp, which has been saved in the trap_sv area + LREG T2, trap_sv_off+7*REGWIDTH(sp) // get test x2 (which is sp, which has been saved in the trap_sv area beqz T2, rtn2mmode // spcl code 0 in T2 means spcl ECALL goto_mmode, just rtn after ECALL //------pre-update trap_sig pointer so handlers can themselves trap----- \__MODE__\()trapsig_ptr_upd: // calculate entry size based on int vs. excpt, int type, and h mode @@ -1144,11 +1168,11 @@ rvtest_\__MODE__\()endtest: // target may be too far away, s jalr x0, T1 \__MODE__\()xcpt_sig_sv: -.ifc \__MODE__, M // exception case, don't adjust if hypervisor mode disabled +.ifc \__MODE__ , M // exception case, don't adjust if hypervisor mode disabled csrr T1, CSR_MISA slli T1, T1, XLEN-8 // shift H bit into msb bgez T1, \__MODE__\()trap_sig_sv // no hypervisor mode, keep std width - li T2, 6*REGWIDTH // Hmode implemented & Mmode trap, override prein to be 6*regsz + li T2, 6*REGWIDTH // Hmode implemented & Mmode trap, override preinc to be 6*regsz .endif \__MODE__\()trap_sig_sv: @@ -1159,19 +1183,24 @@ rvtest_\__MODE__\()endtest: // target may be too far away, s // ****FIXME - this breaks if the signature area cross a page boundary and the mapping isn't contiguous .set sv_area_off, (-0*sv_area_sz) // get trapsig ptr val offset for Mmode, (M) -.ifc \__MODE__, S +.ifc \__MODE__ , S .set sv_area_off, (-1*sv_area_sz) // get trapsig_ptr val up 1 save areas (M<-S) .else -.ifc \__MODE__, V + .ifc \__MODE__ , V .set sv_area_off, (-2*sv_area_sz) // get trapsig ptr val up 2 save areas, (M<-S<-V)) -.endif + .endif .endif //------this should be atomic------------------------------------- - LREG T1, sv_area_off+trapsig_ptr_off(sp) + LREG T1, trapsig_ptr_off+sv_area_off(sp) add T4, T1, T2 - SREG T4, sv_area_off+trapsig_ptr_off(sp) + SREG T4, trapsig_ptr_off+sv_area_off(sp) //------end atomic------------------------------------------------ +// convert mtrap_sigptr to curr_mode trap_sigptr + LREG T3, sig_bgn_off+sv_area_off(sp) // load Mmode sig begin addr + sub T1, T1, T3 // cvt to offset from sig begin + LREG T3, sig_bgn_off+ 0(sp) // load sig begin addr + add T1, T1, T3 // add offset from sig_begin to curr sig_begin addr LREG T3, xtvec_new_off(sp) // get pointer to actual tramp table //---------------------------------------------------------------- @@ -1209,52 +1238,118 @@ common_\__MODE__\()excpt_handler: //******************************************************************************** // calculate the delta between trap mode and handler mode sv areas & add to sp - // This code calculates this table: (X conditions not possible & are ignored ) - // +-----------+--------+--------+-----+ - // |Hndlr Mode | MPP==3 | xPV==0 | ==1 | - // +-----------+--------+--------+-----+ - // | M | 0 * | 1 * | 2 * | sv_area_sz (prev priv can be M, S, or V) - // | [H]S, U | X | 0 * | 1 * | sv_area_sz (prev priv S or V if this is S/U) - // | VS,VU | X | X | 0 * | sv_area_sz (prev priv is V if this is V) - // +-----------+-----------------+-----+ + // This code calculates this table: (H-ext is determined by Vtrap_routine variable + + // +-------+-------+-------+-------+---------+ + // | Hndlr | vMPP | M.GVA | H-ext | sv area | + // | Mode | =3 | | | delta | + // +-------+------+-------+-------+---------+ + // | M | 0 | 1 | 1 | 2 | + // | M | 0 | 0 | x | 1 | + // | M | 1 | 0 | x | 0 | + // | M | x | 1 | 0 | illegal | + // | M | 1 | 1 | x | illegal | + // +-------+------+-------+-------+---------+ + // | | | H.GVA | H-ext | sv area | + // +-------+------+-------+-------+---------+ + // | S/HS | 0* | 1 | 1 | 1 | + // | S/HS | 0* | 0 | 1 | 0 | + // | S/HS | 0* | * | 0 | 0 | + // +-------+------+-------+-------+---------+ + // | | | noGVA | H-ext | sv area | | + // +-------+------+-------+-------+---------+ + // | VS | 0* | - | 1* | 0 | + // +-------+------+-------+-------+---------+ +// where vMPP is + // +-------+-------+-------+-------+------+ + // | Hndlr | | | sved | | + // | Mode | MPRV | MPP=3 | MPP=3 | vMPP | + // +-------+-------+-------+-------+------+ + // | M | 0 | 1 | x | 1 | + // | M | 0 | 0 | x | 0 | + // | M | 1 | 1 | 0 | 1 | + // | M | 1 | 1 | 1 | 0 | + // | M | 1 | 0 | x |illegl| + // +-------+-------+-------+-------+------+ + // |S/HS/VS| 0* | 1* | x | 1 | + // +-------+-------+-------+-------+------+ + + // * means can't be read, but must or would have value indicated + // all other values are illegal // lvs result in T4 to be used during relocation, (so doesn't touch sp) // can use T3, T6 because relocation will overwrite them //******************************************************************************** - mv T4, sp /* default reloc offset = 0*sv_area_sz */ - - // handle row 3:if Vmode handler, this must be horiz trap, lv default value -.ifnc \__MODE__, V /* skip the rest if in VS handler */ - - // handle row1,MPP==11; if this is Mmode; can't happen in other modes - .ifc \__MODE__, M /* vert trap into Mmode, min off= *1 */ + + // create an index from these values: vMPP, x.GVA , H-ext + // where vMPP = m.PRV ? svedMPP : m.MPP & svedMPP + + .ifc \__MODE__ , M csrr T6, CSR_MSTATUS - slli T3, T6, XLEN-MPP_LSB-2 /* move mpp into msb */ - bltz T3, vmem_adj_\__MODE__\()epc /* mpp!=0, lv dflt value */ - // handle row1,MPP!=11; check MPV in either of 2 CSRs and positions - addi T4, T4, sv_area_sz /* upd offset to be at least sv_area_sz */ - #if (XLEN==32) /* now get Vbit into correct position */ - csrr T6, CSR_MSTATUSH /* get half of status with V-bit */ - #else - srli T6, T6, 32 /* reposition mstatus for RV64 */ - #endif - slli T6, T6, WDSZ-MPV_LSB-1 /* but V into MSB ****FIXME if RV128?****/ - bgez T6, vmem_adj_\__MODE__\()epc /* V==0, ~virtualized, use *0,1 off*/ - addi T4, T4, sv_area_sz /* upd offset to be 1,2*sv_area_sz */ - .else /* S-or-HSmode, now get spv */ - - // now handle row2, [H]Smode; assumes misa.H is set is hypervisor implemented - #ifdef rvtest_vtrap_routine /******FIXME: needs bkdoor to misa.H ****/ - csrr T6, CSR_HSTATUS /* HS: hstatus exists only if H-ext */ - slli T6, T6, WDSZ-MPV_LSB-1 /* but V into MSB ****FIXME if RV128?****/ - bgez T6, vmem_adj_\__MODE__\()epc /* V==0, ~virtualized, use *0,1 off*/ - addi T4, T4, sv_area_sz /* upd offset to be 1,2*sv_area_sz */ - #endif + LREG T4, mpp_sv_off(sp) /* saved MPP, overwritten if MPRV=1 */ +// extract MPRV into bit0. Note that only Mmode cares; all other modes can have garbage + slli T3, T6, XLEN-MPRV_LSB-1 /* put MPRV into sign bit & test */ + bge T3, x0, 1f + and T4, T4, T6 /* MPP=11 if MPRV=0, so AND w/ prevMPP */ +1: + // FIXME: add code here to end test if MPRV=1 & MPP<3 + // e.g. rt justify, extract, add mprv, end if <4 +// now convert 2 bit xMPP field into a single bit 2 + srli T4, T4, MPP_LSB /* now cvt MPP (in its natural position)*/ + andi T4, T4, 3 /* to a single bit in bit2 iff ==3 */ + addi T4, T4, 1 + andi T4, T4, 4 +// extract GVA into bit 1 + #if (XLEN==32) + csrr T3, CSR_MSTATUSH /* get CSR with GVA bit, but only H-ext */ + srli T3, T3, GVA_LSB-1 /* reposition RV32 mstatush into bit1 */ + #else + srli T3, T6, GVA_LSB-1+32 /* reposition RV32 mstatus into bit1 */ + #endif + andi T3, T3, 1<<1 + or T4, T4, T3 /* extract GVA in bit1, insert into msk */ +// put H-extension implemented into bit 0 + #if rvtest_vtrap_routine + ori T4, T4, 1 /* set LSB if H-ext present */ + //****FIXME: this doesn't work if misa.H is RW but set to zero ****/ + #endif +// chk for illegal combination + LI( T6, 0x3B) /*lgl msk(vMPP,GVA,H)= 011,00x,10x=0x3B */ + srl T6, T6, T4 + andi T6, T6, 1 + beq T6, x0, rvtest_\__MODE__\()endtest /* illegal combination */ +//determine sv offset multiplier + LI( T6, sv_area_sz) + andi T3, T4, 2 + srli T3, T3, 1 /* extract GVA & move to bito cases */ + srl T6, T6, T3 /* mul by 2 if GVAelse mul by 1 */ + slli T3, T4, XLEN-3 + srai T3, T3, XLEN-1 /* sg ext vMPP, user it to clr delta */ + and T6, T6, T3 + + .else // do it again, but from VS or HS mode + .ifc \__MODE__ , S +// vMPP cannot be 11 because you it cannot handle at a lower mode than trap mode +// MPRV cannot be 1 because that only applies to Mmode +// GVA can only exist if there is H-ext + #if rvtest_vtrap_routine + LI( T6, sv_area_sz) + csrr T3, CSR_HSTATUS /* get CSR with GVA bit, but only H-ext */ + slli T3, T3, XLEN-1-GVA_LSB /* sign extend rt justified GVA bit */ + slri T3, T3, XLEN-1 + and T4, T3, T6 /* clr delta if GVA=0 */ + #else + li T4,0 /* clr delta if no H-ext */ + #endif + .else // handler is in VS mode, vtrap_routine must be defined, offset must be 0 + li T4,0 + .endif .endif -.endif + //******************************************************************************** -vmem_adj_\__MODE__\()epc: /* T4 now points to sv area of trapping mode */ - csrr T2, CSR_XEPC +vmem_adj_\__MODE__\()epc: + add T4, T4, sp /* calc address of correct sv_area */ + csrr T2, CSR_XEPC /* T4 now pts to trapping sv_area mode */ LREG T3, vmem_bgn_off(T4) // see if epc is in the vmem area LREG T6, vmem_seg_siz(T4) @@ -1298,6 +1393,9 @@ adj_\__MODE__\()epc_rtn: // adj mepc so there is at least 4B of p /* This uses offset sp in T4 from epc relocation */ /******************************************************************************/ +/**** FIXME: if in Mmode and mode!=bare & MPRV=1, then T4 be altered to point to + the mode of the mstatus.mpp that is stored in Xtrampend_sv ****/ + csrr T2, CSR_XTVAL chk_\__MODE__\()tval: @@ -1343,7 +1441,7 @@ sv_\__MODE__\()tval: skp_\__MODE__\()tval: - .ifc \__MODE__, M + .ifc \__MODE__ , M .ifdef __H_EXT__ csrr T2, CSR_MTVAL2 // **** FIXME: does this need reloc also? Its a guest phys addr SREG T2, 4*REGWIDTH(T1) // store 5th sig value, only if mmode handler and VS mode exists @@ -1410,7 +1508,7 @@ spcl_\__MODE__\()handler: // case table branch to special handler add T3, T3, T2 // offset into the correct int/excpt dispatch table slli T2, T5, 3 // index into 8b aligned dispatch entry and jump through it add T3, T3, T2 - andi T3, T3, -8 // make sure this is dblwd aligned, correct if it is 4 too large + andi T3, T3, -8 // make sure this is dblwd aligned, correct if it is 4 too large LREG T3, 0(T3) spcl_\__MODE__\()dispatch_hndling: beqz T3, abort_tests // if address is 0, this is an error, exit test @@ -1429,7 +1527,7 @@ spcl_\__MODE__\()dispatch: /**** Note that the external interrupt routines are expected to ****/ /**** return with an interrupt ID in T3 ****/ - .align 3 //make sure this is a dblwd boundary + .align 3 //make sure this is a dblwd boundary clrint_\__MODE__\()tbl: //this code should only touch T2..T6 #ifdef rvtest_vtrap_routine // M/S/V/U .dword 0 // int cause 0 is reserved, error @@ -1537,7 +1635,7 @@ excpt_\__MODE__\()hndlr_tbl: // handler code should only touch T2..T6 SREG T3, 3*REGWIDTH(T1) // save 4rd sig value, (intID) j resto_\__MODE__\()rtn -//------------- VSode---------------- +//------------- VSmode---------------- \__MODE__\()clr_Vsw_int: // int 2 default to just return if not defined RVMODEL_CLR_VSW_INT j resto_\__MODE__\()rtn @@ -1551,7 +1649,7 @@ excpt_\__MODE__\()hndlr_tbl: // handler code should only touch T2..T6 SREG T3, 3*REGWIDTH(T1) // save 4rd sig value, (intID) j resto_\__MODE__\()rtn -.ifc \__MODE__, M +.ifc \__MODE__ , M /*************** Spcl handler for returning from GOTO_MMODE. ********/ /*************** Only gets executed if GOTO_MMODE not called from Mmode ********/ @@ -1607,29 +1705,31 @@ excpt_\__MODE__\()hndlr_tbl: // handler code should only touch T2..T6 exit_\__MODE__\()cleanup: csrr T1, mscratch // pointer to save area - .ifc \__MODE__, S - addi T1, T1, 1*sv_area_sz - .else - .ifc \__MODE__, V - addi T1, T1, 2*sv_area_sz - .endif + .ifc \__MODE__ , S + addi T1, T1, 1*sv_area_sz + .else + .ifc \__MODE__ , V + addi T1, T1, 2*sv_area_sz .endif + .endif + resto_\__MODE__\()edeleg: LREG T2, xedeleg_sv_off(T1) // get saved xedeleg at offset -32 -.ifc \__MODE__, V +.ifc \__MODE__ , V csrw CSR_VEDELEG, T2 //special case: VS EDELEG available from Vmode .else -.ifc \__MODE__, M + .ifc \__MODE__ , M #ifdef rvtest_strap_routine csrw CSR_XEDELEG, T2 //this handles M mode restore, but only if Smode exists #endif -.else -//FIXME: if N-extension or anything like it is implemented, uncomment the following + .else +//FIXME: if Umode-int-extension or anything like it is implemented, uncomment the following // csrw CSR_XEDELEG, T2 //this handles S mode restore + .endif .endif -.endif -.ifnc \__MODE__, M + +.ifnc \__MODE__ , M resto_\__MODE__\()satp: LREG T2, xsatp_sv_off(T1) // restore saved xsatp csrw CSR_XSATP, T2 @@ -1646,7 +1746,7 @@ resto_\__MODE__\()xtvec: resto_\__MODE__\()tramp: // T2 now contains where to restore to addi T4, T1, tramp_sv_off // T4 now contains where to restore from - LREG T3, trampend_addr(T1) // T3 tracks how much to restore + LREG T3, trampend_off(T1) // T3 tracks how much to restore resto_\__MODE__\()loop: lw T6, 0(T4) // read saved tramp entry @@ -1688,43 +1788,45 @@ rvtest_\__MODE__\()end: .endr \__MODE__\()code_bgn_ptr: - .dword rvtest_code_begin// pointer to code bgn area using this mode's mapping trampsvend+0*8 + .dword rvtest_code_begin // ptr to code bgn area using this mode's mapping trampsvend+0*8 \__MODE__\()code_seg_sz: .dword rvtest_code_end-rvtest_code_begin // code seg size in any mode trampsvend+1*8 \__MODE__\()data_bgn_ptr: - .dword rvtest_data_begin// pointer to data bgn area using this mode's mapping trampsvend+2*8 + .dword rvtest_data_begin // ptr to data bgn area using this mode's mapping trampsvend+2*8 \__MODE__\()data_seg_sz: .dword rvtest_data_end-rvtest_data_begin // code seg size in any mode trampsvend+3*8 \__MODE__\()sig_bgn_ptr: - .dword rvtest_sig_begin // pointer to sig bgn area using this mode's mapping trampsvend+4*8 + .dword rvtest_sig_begin // ptr to sig bgn area using this mode's mapping trampsvend+4*8 \__MODE__\()sig_seg_sz: .dword rvtest_sig_end-rvtest_sig_begin // code seg size in any mode trampsvend+5*8 \__MODE__\()vmem_bgn_ptr: - .dword rvtest_data_begin// pointer to vmem bgn area using this mode's mapping trampsvend+6*8 + .dword rvtest_code_begin // default to code bgn area w/ this mode's mapping trampsvend+6*8 \__MODE__\()vmem_seg_sz: - .dword rvtest_data_end-rvtest_data_begin // vmem seg size in any mode trampsvend+7*8 + .dword rvtest_code_end-rvtest_code_begin // vmem seg size in any mode trampsvend+7*8 +\__MODE__\()mpp_sv: + // save mpp=3<<1 during test for mprv spcl case,***only Smode vers \__MODE__\()trap_sig: - .dword mtrap_sigptr // pointer to next trapsig (only Mmode version used) trampsvend+8*8 + .dword mtrap_sigptr // ptr to next trapsig ***GLBL(only Mmode ver. used) trampsvend+8*8 \__MODE__\()satp_sv: .dword 0 // save area for incoming xsatp trampsvend+9*8 \__MODE__\()trampend_sv: - .dword 0 // save location of end of saved trampoline trampsvend+10*8 + .dword 0 // save loc of end of sved trampoline prolog/epilog trampsvend+10*8 \__MODE__\()tentry_sv: - .dword 0 // save location of end of orig. trampoline trampsvend+11*8 + .dword \__MODE__\()trampoline + actual_tramp_sz // save loc of comm entry pt trampsvend+11*8 \__MODE__\()edeleg_sv: - .dword 0 // save location for edeleg CSR trampsvend+12*8 + .dword 0 // save loc for edeleg CSR trampsvend+12*8: \__MODE__\()tvec_new: .dword 0 // points to in-use tvec, actual tramp table used trampsvend+13*8 \__MODE__\()tvec_save: .dword 0 // save area for incoming mtvec trampsvend+14*8 \__MODE__\()scratch_save: .dword 0 // save area for incoming mscratch trampsvend+15*8 + //****GLOBAL:***** onlyMMode version used +\__MODE__\()trapreg_sv: // hndler regsave area, T1..T6,sp+spare keep dbl algn trampsvend+16*8 + .fill 8, REGWIDTH, 0xdeadbeef -\__MODE__\()trapreg_sv: //****GLOBAL:***** - .fill 8, REGWIDTH, 0xdeadbeef // handler regsave area, T1..T6,sp +1 extra,trampsvend+16*8; keep dbl alignment - -\__MODE__\()sv_area_end: // used to calc size, which is used to avoid CSR read trampsvend+16+8 +\__MODE__\()sv_area_end: // used to calc size, which is used to avoid CSR read trampsvend+24/32+8 .option pop .endm // end of TRAP_SAVEAREA @@ -1775,7 +1877,7 @@ rvtest_\__MODE__\()end: .option norvc .global rvtest_code_end // define the label and make it available .global cleanup_epilogs // ****ALERT: tests must populate x1 with a point to the end of regular sig area - +/**** MPRV must be clear here !!! ****/ rvtest_code_end: // RVMODEL_HALT should get here #ifdef rvtest_gpr_save // gpr_save area is instantiated at end of signature RVTEST_SAVE_GPRS x1 gpr_save @@ -1873,3 +1975,4 @@ rvtest_\__MODE__\()end: #endif rvtest_data_end: .endm + diff --git a/riscv-test-suite/env/test_macros.h b/riscv-test-suite/env/test_macros.h index 4d33b691c..20a31a710 100644 --- a/riscv-test-suite/env/test_macros.h +++ b/riscv-test-suite/env/test_macros.h @@ -15,6 +15,47 @@ 1: Superpage */ +#define LEVEL0 0x00 +#define LEVEL1 0x01 +#define LEVEL2 0x02 +#define LEVEL3 0x03 +#define LEVEL4 0x04 + +#define sv39 0x00 +#define sv48 0x01 +#define sv57 0x02 + +#define CODE code_bgn_off +#define DATA data_bgn_off +#define SIG sig_bgn_off +#define VMEM vmem_bgn_off + +#define ALL_MEM_PMP ;\ + li t2, -1 ;\ + csrw pmpaddr0, t2 ;\ + li t2, 0x0F ;\ + csrw pmpcfg0, t2 ;\ + sfence.vma ; + +#define SIGNATURE_AREA(TYPE,ARG1,ARG2, ...) ;\ + LI (t0, ARG1) ;\ + .if(TYPE == CODE) ;\ + LI (t1, ARG2) ;\ + sub t0, t0, t1 ;\ + csrr sp, mscratch ;\ + add t1,sp,t0 ;\ + csrw sscratch, t1 ;\ + .else ;\ + LA (t1, ARG2) ;\ + sub t0, t0, t1 ;\ + .endif ;\ + LREG t1, TYPE+0*sv_area_sz(sp) ;\ + add t2, t1, t0 ;\ + SREG t2, TYPE+1*sv_area_sz(sp) ;\ + .if NARG(__VA_ARGS__) == 1 ;\ + SREG t2, TYPE+2*sv_area_sz(sp) ;\ + .endif ; + //****NOTE: label `rvtest_Sroot_pg_tbl` must be declared after RVTEST_DATA_END // in the test aligned at 4kiB (use .align 12) @@ -34,6 +75,68 @@ add _TR1, _TR1, _TR0 ;\ SREG _PAR, 0(_TR1); +#define PTE_SETUP_RV64(_PAR, _PR, _TR0, _TR1, VA, level, mode) ;\ + srli _PAR, _PAR, 12 ;\ + slli _PAR, _PAR, 10 ;\ + or _PAR, _PAR, _PR ;\ + .if (mode == sv39) ;\ + .if (level == 2) ;\ + LA(_TR1, rvtest_Sroot_pg_tbl) ;\ + .set vpn, ((VA >> 30) & 0x1FF) << 3 ;\ + .endif ;\ + .if (level == 1) ;\ + LA(_TR1, rvtest_slvl1_pg_tbl) ;\ + .set vpn, ((VA >> 21) & 0x1FF) << 3 ;\ + .endif ;\ + .if (level == 0) ;\ + LA(_TR1, rvtest_slvl2_pg_tbl) ;\ + .set vpn, ((VA >> 12) & 0x1FF) << 3 ;\ + .endif ;\ + .endif ;\ + .if (mode == sv48) ;\ + .if (level == 3) ;\ + LA(_TR1, rvtest_Sroot_pg_tbl) ;\ + .set vpn, ((VA >> 39) & 0x1FF) << 3 ;\ + .endif ;\ + .if (level == 2) ;\ + LA(_TR1, rvtest_slvl1_pg_tbl) ;\ + .set vpn, ((VA >> 30) & 0x1FF) << 3 ;\ + .endif ;\ + .if (level == 1) ;\ + LA(_TR1, rvtest_slvl2_pg_tbl) ;\ + .set vpn, ((VA >> 21) & 0x1FF) << 3 ;\ + .endif ;\ + .if (level == 0) ;\ + LA(_TR1, rvtest_slvl3_pg_tbl) ;\ + .set vpn, ((VA >> 12) & 0x1FF) << 3 ;\ + .endif ;\ + .endif ;\ + .if (mode == sv57) ;\ + .if (level == 4) ;\ + LA(_TR1, rvtest_Sroot_pg_tbl) ;\ + .set vpn, ((VA >> 48) & 0x1FF) << 3 ;\ + .endif ;\ + .if (level == 3) ;\ + LA(_TR1, rvtest_slvl1_pg_tbl) ;\ + .set vpn, ((VA >> 39) & 0x1FF) << 3 ;\ + .endif ;\ + .if (level == 2) ;\ + LA(_TR1, rvtest_slvl2_pg_tbl) ;\ + .set vpn, ((VA >> 30) & 0x1FF) << 3 ;\ + .endif ;\ + .if (level == 1) ;\ + LA(_TR1, rvtest_slvl3_pg_tbl) ;\ + .set vpn, ((VA >> 21) & 0x1FF) << 3 ;\ + .endif ;\ + .if (level == 0) ;\ + LA(_TR1, rvtest_slvl3_pg_tbl) ;\ + .set vpn, ((VA >> 12) & 0x1FF) << 3 ;\ + .endif ;\ + .endif ;\ + LI(_TR0, vpn) ;\ + add _TR1, _TR1, _TR0 ;\ + SREG _PAR, 0(_TR1) ; + #define PTE_PERMUPD_RV32(_PR, _TR0, _TR1, VA, level) ;\ .if (level==1) ;\ LA(_TR1, rvtest_Sroot_pg_tbl) ;\ @@ -49,14 +152,46 @@ srli _TR0, _TR0, 10 ;\ slli _TR0, _TR0, 10 ;\ or _TR0, _TR0, _PR ;\ - SREG _TR0, 0(_TR1) ;\ + SREG _TR0, 0(_TR1) ; #define SATP_SETUP_SV32 ;\ LA(t6, rvtest_Sroot_pg_tbl) ;\ LI(t5, SATP32_MODE) ;\ srli t6, t6, 12 ;\ or t6, t6, t5 ;\ - csrw satp, t6 ;\ + csrw satp, t6 ; + +#define SATP_SETUP_RV64(MODE) ;\ + LA(t6, rvtest_Sroot_pg_tbl) ;\ + .if (MODE == sv39) ;\ + LI(t5, (SATP64_MODE) & (SATP_MODE_SV39 << 60)) ;\ + .endif ;\ + .if (MODE == sv48) ;\ + LI(t5, (SATP64_MODE) & (SATP_MODE_SV48 << 60)) ;\ + .endif ;\ + .if (MODE == sv57) ;\ + LI(t5, (SATP64_MODE) & (SATP_MODE_SV57 << 60)) ;\ + .endif ;\ + .if (MODE == sv64) ;\ + LI(t5, (SATP64_MODE) & (SATP_MODE_SV64 << 60)) ;\ + .endif ;\ + srli t6, t6, 12 ;\ + or t6, t6, t5 ;\ + csrw satp, t6 ; + +//Tests for atomic memory operation(AMO) instructions +#define TEST_AMO_OP(inst, destreg, origptr, reg2, origval, updval, sigptr, ...) ;\ + .if NARG(__VA_ARGS__) == 1 ;\ + .set offset,_ARG1(__VA_OPT__(__VA_ARGS__,0)) ;\ + .endif ;\ + LI(reg2, MASK_XLEN(origval)) ;\ + RVTEST_SIGUPD(sigptr, reg2) /*Write original AMO src */ ;\ + LI(reg2, MASK_XLEN(updval)) ;\ + addi origptr, sigptr, offset-REGWIDTH /* Calculate where orig AMO src is stored */ ;\ + inst destreg, reg2, (origptr) /*origval -> destreg; updated val -> (origptr) */ ;\ + RVTEST_SIGUPD(sigptr, destreg) /* write original AMO val */ + + #define NAN_BOXED(__val__,__width__,__max__) ;\ .if __width__ == 32 ;\ @@ -480,18 +615,6 @@ RVTEST_SIGUPD_F(swreg,destreg,flagreg) RVTEST_SIGUPD_FID(swreg,destreg,flagreg) ;\ RVMODEL_IO_ASSERT_GPR_EQ(testreg, destreg, correctval) -//Tests for atomic memory operation(AMO) instructions -#define TEST_AMO_OP(inst, destreg, origptr, reg2, origval, updval, sigptr, ...) ;\ - .if NARG(__VA_ARGS__) == 1 ;\ - .set offset,_ARG1(__VA_OPT__(__VA_ARGS__,0)) ;\ - .endif ;\ - LI(reg2, MASK_XLEN(origval)) ;\ - RVTEST_SIGUPD(sigptr, reg2) /*Write original AMO src */ ;\ - LI(reg2, MASK_XLEN(updval)) ;\ - addi origptr, sigptr, offset-REGWIDTH /* Calculate where orig AMO src is stored */ ;\ - inst destreg, reg2, (origptr) /*origval -> destreg; updated val -> (origptr) */ ;\ - RVTEST_SIGUPD(sigptr, destreg) /* write original AMO val */ - #define TEST_AUIPC(inst, destreg, correctval, imm, swreg, offset, testreg) ;\ TEST_CASE(testreg, destreg, correctval, swreg, offset, \ LA testreg, 1f ;\