2 void glue(do_lsw, MEMSUFFIX) (int dst);
3 void glue(do_stsw, MEMSUFFIX) (int src);
5 static inline uint16_t glue(ld16r, MEMSUFFIX) (void *EA)
7 uint16_t tmp = glue(lduw, MEMSUFFIX)(EA);
8 return ((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
11 static inline uint32_t glue(ld32r, MEMSUFFIX) (void *EA)
13 uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
14 return ((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
15 ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
18 static inline void glue(st16r, MEMSUFFIX) (void *EA, uint16_t data)
20 uint16_t tmp = ((data & 0xFF00) >> 8) | ((data & 0x00FF) << 8);
21 glue(stw, MEMSUFFIX)(EA, tmp);
24 static inline void glue(st32r, MEMSUFFIX) (void *EA, uint32_t data)
26 uint32_t tmp = ((data & 0xFF000000) >> 24) | ((data & 0x00FF0000) >> 8) |
27 ((data & 0x0000FF00) << 8) | ((data & 0x000000FF) << 24);
28 glue(stl, MEMSUFFIX)(EA, tmp);
31 /*** Integer load ***/
32 #define PPC_LD_OP(name, op) \
33 PPC_OP(glue(glue(l, name), MEMSUFFIX)) \
35 T1 = glue(op, MEMSUFFIX)((void *)T0); \
39 #define PPC_ST_OP(name, op) \
40 PPC_OP(glue(glue(st, name), MEMSUFFIX)) \
42 glue(op, MEMSUFFIX)((void *)T0, T1); \
51 /*** Integer store ***/
56 /*** Integer load and store with byte reverse ***/
57 PPC_LD_OP(hbr, ld16r);
58 PPC_LD_OP(wbr, ld32r);
59 PPC_ST_OP(hbr, st16r);
60 PPC_ST_OP(wbr, st32r);
62 /*** Integer load and store multiple ***/
63 PPC_OP(glue(lmw, MEMSUFFIX))
67 for (; dst < 32; dst++, T0 += 4) {
68 ugpr(dst) = glue(ldl, MEMSUFFIX)((void *)T0);
73 PPC_OP(glue(stmw, MEMSUFFIX))
77 for (; src < 32; src++, T0 += 4) {
78 glue(stl, MEMSUFFIX)((void *)T0, ugpr(src));
83 /*** Integer load and store strings ***/
84 PPC_OP(glue(lswi, MEMSUFFIX))
86 glue(do_lsw, MEMSUFFIX)(PARAM(1));
90 /* PPC32 specification says we must generate an exception if
91 * rA is in the range of registers to be loaded.
92 * In an other hand, IBM says this is valid, but rA won't be loaded.
93 * For now, I'll follow the spec...
95 PPC_OP(glue(lswx, MEMSUFFIX))
98 if ((PARAM(1) < PARAM(2) && (PARAM(1) + T1) > PARAM(2)) ||
99 (PARAM(1) < PARAM(3) && (PARAM(1) + T1) > PARAM(3))) {
100 do_queue_exception_err(EXCP_PROGRAM, EXCP_INVAL | EXCP_INVAL_LSWX);
101 do_process_exceptions();
103 glue(do_lsw, MEMSUFFIX)(PARAM(1));
109 PPC_OP(glue(stsw, MEMSUFFIX))
111 glue(do_stsw, MEMSUFFIX)(PARAM(1));
115 /*** Floating-point store ***/
116 #define PPC_STF_OP(name, op) \
117 PPC_OP(glue(glue(st, name), MEMSUFFIX)) \
119 glue(op, MEMSUFFIX)((void *)T0, FT1); \
123 PPC_STF_OP(fd, stfq);
124 PPC_STF_OP(fs, stfl);
126 /*** Floating-point load ***/
127 #define PPC_LDF_OP(name, op) \
128 PPC_OP(glue(glue(l, name), MEMSUFFIX)) \
130 FT1 = glue(op, MEMSUFFIX)((void *)T0); \
134 PPC_LDF_OP(fd, ldfq);
135 PPC_LDF_OP(fs, ldfl);
137 /* Load and set reservation */
138 PPC_OP(glue(lwarx, MEMSUFFIX))
141 do_queue_exception(EXCP_ALIGN);
142 do_process_exceptions();
144 T1 = glue(ldl, MEMSUFFIX)((void *)T0);
150 /* Store with reservation */
151 PPC_OP(glue(stwcx, MEMSUFFIX))
154 do_queue_exception(EXCP_ALIGN);
155 do_process_exceptions();
157 if (regs->reserve != T0) {
158 env->crf[0] = xer_ov;
160 glue(stl, MEMSUFFIX)((void *)T0, T1);
161 env->crf[0] = xer_ov | 0x02;
168 PPC_OP(glue(dcbz, MEMSUFFIX))
170 glue(stl, MEMSUFFIX)((void *)(T0 + 0x00), 0);
171 glue(stl, MEMSUFFIX)((void *)(T0 + 0x04), 0);
172 glue(stl, MEMSUFFIX)((void *)(T0 + 0x08), 0);
173 glue(stl, MEMSUFFIX)((void *)(T0 + 0x0C), 0);
174 glue(stl, MEMSUFFIX)((void *)(T0 + 0x10), 0);
175 glue(stl, MEMSUFFIX)((void *)(T0 + 0x14), 0);
176 glue(stl, MEMSUFFIX)((void *)(T0 + 0x18), 0);
177 glue(stl, MEMSUFFIX)((void *)(T0 + 0x1C), 0);
181 /* External access */
182 PPC_OP(glue(eciwx, MEMSUFFIX))
184 T1 = glue(ldl, MEMSUFFIX)((void *)T0);
188 PPC_OP(glue(ecowx, MEMSUFFIX))
190 glue(stl, MEMSUFFIX)((void *)T0, T1);