2 * PowerPC emulation micro-operations for qemu.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 static inline uint16_t glue(ld16r, MEMSUFFIX) (target_ulong EA)
23 uint16_t tmp = glue(lduw, MEMSUFFIX)(EA);
24 return ((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
27 static inline int32_t glue(ld16rs, MEMSUFFIX) (target_ulong EA)
29 int16_t tmp = glue(lduw, MEMSUFFIX)(EA);
30 return (int16_t)((tmp & 0xFF00) >> 8) | ((tmp & 0x00FF) << 8);
33 static inline uint32_t glue(ld32r, MEMSUFFIX) (target_ulong EA)
35 uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
36 return ((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
37 ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
40 #if defined(TARGET_PPC64) || defined(TARGET_PPCEMB)
41 static inline uint64_t glue(ld64r, MEMSUFFIX) (target_ulong EA)
43 uint64_t tmp = glue(ldq, MEMSUFFIX)(EA);
44 return ((tmp & 0xFF00000000000000ULL) >> 56) |
45 ((tmp & 0x00FF000000000000ULL) >> 40) |
46 ((tmp & 0x0000FF0000000000ULL) >> 24) |
47 ((tmp & 0x000000FF00000000ULL) >> 8) |
48 ((tmp & 0x00000000FF000000ULL) << 8) |
49 ((tmp & 0x0000000000FF0000ULL) << 24) |
50 ((tmp & 0x000000000000FF00ULL) << 40) |
51 ((tmp & 0x00000000000000FFULL) << 54);
55 #if defined(TARGET_PPC64)
56 static inline int64_t glue(ldsl, MEMSUFFIX) (target_ulong EA)
58 return (int32_t)glue(ldl, MEMSUFFIX)(EA);
61 static inline int64_t glue(ld32rs, MEMSUFFIX) (target_ulong EA)
63 uint32_t tmp = glue(ldl, MEMSUFFIX)(EA);
64 return (int32_t)((tmp & 0xFF000000) >> 24) | ((tmp & 0x00FF0000) >> 8) |
65 ((tmp & 0x0000FF00) << 8) | ((tmp & 0x000000FF) << 24);
69 static inline void glue(st16r, MEMSUFFIX) (target_ulong EA, uint16_t data)
71 uint16_t tmp = ((data & 0xFF00) >> 8) | ((data & 0x00FF) << 8);
72 glue(stw, MEMSUFFIX)(EA, tmp);
75 static inline void glue(st32r, MEMSUFFIX) (target_ulong EA, uint32_t data)
77 uint32_t tmp = ((data & 0xFF000000) >> 24) | ((data & 0x00FF0000) >> 8) |
78 ((data & 0x0000FF00) << 8) | ((data & 0x000000FF) << 24);
79 glue(stl, MEMSUFFIX)(EA, tmp);
82 #if defined(TARGET_PPC64) || defined(TARGET_PPCEMB)
83 static inline void glue(st64r, MEMSUFFIX) (target_ulong EA, uint64_t data)
85 uint64_t tmp = ((data & 0xFF00000000000000ULL) >> 56) |
86 ((data & 0x00FF000000000000ULL) >> 40) |
87 ((data & 0x0000FF0000000000ULL) >> 24) |
88 ((data & 0x000000FF00000000ULL) >> 8) |
89 ((data & 0x00000000FF000000ULL) << 8) |
90 ((data & 0x0000000000FF0000ULL) << 24) |
91 ((data & 0x000000000000FF00ULL) << 40) |
92 ((data & 0x00000000000000FFULL) << 56);
93 glue(stq, MEMSUFFIX)(EA, tmp);
97 /*** Integer load ***/
98 #define PPC_LD_OP(name, op) \
99 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
101 T1 = glue(op, MEMSUFFIX)((uint32_t)T0); \
105 #if defined(TARGET_PPC64)
106 #define PPC_LD_OP_64(name, op) \
107 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
109 T1 = glue(op, MEMSUFFIX)((uint64_t)T0); \
114 #define PPC_ST_OP(name, op) \
115 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
117 glue(op, MEMSUFFIX)((uint32_t)T0, T1); \
121 #if defined(TARGET_PPC64)
122 #define PPC_ST_OP_64(name, op) \
123 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
125 glue(op, MEMSUFFIX)((uint64_t)T0, T1); \
134 #if defined(TARGET_PPC64)
137 PPC_LD_OP_64(d, ldq);
138 PPC_LD_OP_64(wa, ldsl);
139 PPC_LD_OP_64(bz, ldub);
140 PPC_LD_OP_64(ha, ldsw);
141 PPC_LD_OP_64(hz, lduw);
142 PPC_LD_OP_64(wz, ldl);
145 PPC_LD_OP(ha_le, ld16rs);
146 PPC_LD_OP(hz_le, ld16r);
147 PPC_LD_OP(wz_le, ld32r);
148 #if defined(TARGET_PPC64)
149 PPC_LD_OP(d_le, ld64r);
150 PPC_LD_OP(wa_le, ld32rs);
151 PPC_LD_OP_64(d_le, ld64r);
152 PPC_LD_OP_64(wa_le, ld32rs);
153 PPC_LD_OP_64(ha_le, ld16rs);
154 PPC_LD_OP_64(hz_le, ld16r);
155 PPC_LD_OP_64(wz_le, ld32r);
158 /*** Integer store ***/
162 #if defined(TARGET_PPC64)
164 PPC_ST_OP_64(d, stq);
165 PPC_ST_OP_64(b, stb);
166 PPC_ST_OP_64(h, stw);
167 PPC_ST_OP_64(w, stl);
170 PPC_ST_OP(h_le, st16r);
171 PPC_ST_OP(w_le, st32r);
172 #if defined(TARGET_PPC64)
173 PPC_ST_OP(d_le, st64r);
174 PPC_ST_OP_64(d_le, st64r);
175 PPC_ST_OP_64(h_le, st16r);
176 PPC_ST_OP_64(w_le, st32r);
179 /*** Integer load and store with byte reverse ***/
180 PPC_LD_OP(hbr, ld16r);
181 PPC_LD_OP(wbr, ld32r);
182 PPC_ST_OP(hbr, st16r);
183 PPC_ST_OP(wbr, st32r);
184 #if defined(TARGET_PPC64)
185 PPC_LD_OP_64(hbr, ld16r);
186 PPC_LD_OP_64(wbr, ld32r);
187 PPC_ST_OP_64(hbr, st16r);
188 PPC_ST_OP_64(wbr, st32r);
191 PPC_LD_OP(hbr_le, lduw);
192 PPC_LD_OP(wbr_le, ldl);
193 PPC_ST_OP(hbr_le, stw);
194 PPC_ST_OP(wbr_le, stl);
195 #if defined(TARGET_PPC64)
196 PPC_LD_OP_64(hbr_le, lduw);
197 PPC_LD_OP_64(wbr_le, ldl);
198 PPC_ST_OP_64(hbr_le, stw);
199 PPC_ST_OP_64(wbr_le, stl);
202 /*** Integer load and store multiple ***/
203 void OPPROTO glue(op_lmw, MEMSUFFIX) (void)
205 glue(do_lmw, MEMSUFFIX)(PARAM1);
209 #if defined(TARGET_PPC64)
210 void OPPROTO glue(op_lmw_64, MEMSUFFIX) (void)
212 glue(do_lmw_64, MEMSUFFIX)(PARAM1);
217 void OPPROTO glue(op_lmw_le, MEMSUFFIX) (void)
219 glue(do_lmw_le, MEMSUFFIX)(PARAM1);
223 #if defined(TARGET_PPC64)
224 void OPPROTO glue(op_lmw_le_64, MEMSUFFIX) (void)
226 glue(do_lmw_le_64, MEMSUFFIX)(PARAM1);
231 void OPPROTO glue(op_stmw, MEMSUFFIX) (void)
233 glue(do_stmw, MEMSUFFIX)(PARAM1);
237 #if defined(TARGET_PPC64)
238 void OPPROTO glue(op_stmw_64, MEMSUFFIX) (void)
240 glue(do_stmw_64, MEMSUFFIX)(PARAM1);
245 void OPPROTO glue(op_stmw_le, MEMSUFFIX) (void)
247 glue(do_stmw_le, MEMSUFFIX)(PARAM1);
251 #if defined(TARGET_PPC64)
252 void OPPROTO glue(op_stmw_le_64, MEMSUFFIX) (void)
254 glue(do_stmw_le_64, MEMSUFFIX)(PARAM1);
259 /*** Integer load and store strings ***/
260 void OPPROTO glue(op_lswi, MEMSUFFIX) (void)
262 glue(do_lsw, MEMSUFFIX)(PARAM1);
266 #if defined(TARGET_PPC64)
267 void OPPROTO glue(op_lswi_64, MEMSUFFIX) (void)
269 glue(do_lsw_64, MEMSUFFIX)(PARAM1);
274 void OPPROTO glue(op_lswi_le, MEMSUFFIX) (void)
276 glue(do_lsw_le, MEMSUFFIX)(PARAM1);
280 #if defined(TARGET_PPC64)
281 void OPPROTO glue(op_lswi_le_64, MEMSUFFIX) (void)
283 glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
288 /* PPC32 specification says we must generate an exception if
289 * rA is in the range of registers to be loaded.
290 * In an other hand, IBM says this is valid, but rA won't be loaded.
291 * For now, I'll follow the spec...
293 void OPPROTO glue(op_lswx, MEMSUFFIX) (void)
295 /* Note: T1 comes from xer_bc then no cast is needed */
296 if (likely(T1 != 0)) {
297 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
298 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
299 do_raise_exception_err(POWERPC_EXCP_PROGRAM,
301 POWERPC_EXCP_INVAL_LSWX);
303 glue(do_lsw, MEMSUFFIX)(PARAM1);
309 #if defined(TARGET_PPC64)
310 void OPPROTO glue(op_lswx_64, MEMSUFFIX) (void)
312 /* Note: T1 comes from xer_bc then no cast is needed */
313 if (likely(T1 != 0)) {
314 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
315 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
316 do_raise_exception_err(POWERPC_EXCP_PROGRAM,
318 POWERPC_EXCP_INVAL_LSWX);
320 glue(do_lsw_64, MEMSUFFIX)(PARAM1);
327 void OPPROTO glue(op_lswx_le, MEMSUFFIX) (void)
329 /* Note: T1 comes from xer_bc then no cast is needed */
330 if (likely(T1 != 0)) {
331 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
332 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
333 do_raise_exception_err(POWERPC_EXCP_PROGRAM,
335 POWERPC_EXCP_INVAL_LSWX);
337 glue(do_lsw_le, MEMSUFFIX)(PARAM1);
343 #if defined(TARGET_PPC64)
344 void OPPROTO glue(op_lswx_le_64, MEMSUFFIX) (void)
346 /* Note: T1 comes from xer_bc then no cast is needed */
347 if (likely(T1 != 0)) {
348 if (unlikely((PARAM1 < PARAM2 && (PARAM1 + T1) > PARAM2) ||
349 (PARAM1 < PARAM3 && (PARAM1 + T1) > PARAM3))) {
350 do_raise_exception_err(POWERPC_EXCP_PROGRAM,
352 POWERPC_EXCP_INVAL_LSWX);
354 glue(do_lsw_le_64, MEMSUFFIX)(PARAM1);
361 void OPPROTO glue(op_stsw, MEMSUFFIX) (void)
363 glue(do_stsw, MEMSUFFIX)(PARAM1);
367 #if defined(TARGET_PPC64)
368 void OPPROTO glue(op_stsw_64, MEMSUFFIX) (void)
370 glue(do_stsw_64, MEMSUFFIX)(PARAM1);
375 void OPPROTO glue(op_stsw_le, MEMSUFFIX) (void)
377 glue(do_stsw_le, MEMSUFFIX)(PARAM1);
381 #if defined(TARGET_PPC64)
382 void OPPROTO glue(op_stsw_le_64, MEMSUFFIX) (void)
384 glue(do_stsw_le_64, MEMSUFFIX)(PARAM1);
389 /*** Floating-point store ***/
390 #define PPC_STF_OP(name, op) \
391 void OPPROTO glue(glue(op_st, name), MEMSUFFIX) (void) \
393 glue(op, MEMSUFFIX)((uint32_t)T0, FT0); \
397 #if defined(TARGET_PPC64)
398 #define PPC_STF_OP_64(name, op) \
399 void OPPROTO glue(glue(glue(op_st, name), _64), MEMSUFFIX) (void) \
401 glue(op, MEMSUFFIX)((uint64_t)T0, FT0); \
406 static inline void glue(stfs, MEMSUFFIX) (target_ulong EA, double d)
408 glue(stfl, MEMSUFFIX)(EA, float64_to_float32(d, &env->fp_status));
411 static inline void glue(stfiwx, MEMSUFFIX) (target_ulong EA, double d)
418 /* Store the low order 32 bits without any conversion */
420 glue(stl, MEMSUFFIX)(EA, u.u);
423 PPC_STF_OP(fd, stfq);
424 PPC_STF_OP(fs, stfs);
425 PPC_STF_OP(fiwx, stfiwx);
426 #if defined(TARGET_PPC64)
427 PPC_STF_OP_64(fd, stfq);
428 PPC_STF_OP_64(fs, stfs);
429 PPC_STF_OP_64(fiwx, stfiwx);
432 static inline void glue(stfqr, MEMSUFFIX) (target_ulong EA, double d)
440 u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
441 ((u.u & 0x00FF000000000000ULL) >> 40) |
442 ((u.u & 0x0000FF0000000000ULL) >> 24) |
443 ((u.u & 0x000000FF00000000ULL) >> 8) |
444 ((u.u & 0x00000000FF000000ULL) << 8) |
445 ((u.u & 0x0000000000FF0000ULL) << 24) |
446 ((u.u & 0x000000000000FF00ULL) << 40) |
447 ((u.u & 0x00000000000000FFULL) << 56);
448 glue(stfq, MEMSUFFIX)(EA, u.d);
451 static inline void glue(stfsr, MEMSUFFIX) (target_ulong EA, double d)
458 u.f = float64_to_float32(d, &env->fp_status);
459 u.u = ((u.u & 0xFF000000UL) >> 24) |
460 ((u.u & 0x00FF0000ULL) >> 8) |
461 ((u.u & 0x0000FF00UL) << 8) |
462 ((u.u & 0x000000FFULL) << 24);
463 glue(stfl, MEMSUFFIX)(EA, u.f);
466 static inline void glue(stfiwxr, MEMSUFFIX) (target_ulong EA, double d)
473 /* Store the low order 32 bits without any conversion */
475 u.u = ((u.u & 0xFF000000UL) >> 24) |
476 ((u.u & 0x00FF0000ULL) >> 8) |
477 ((u.u & 0x0000FF00UL) << 8) |
478 ((u.u & 0x000000FFULL) << 24);
479 glue(stl, MEMSUFFIX)(EA, u.u);
483 PPC_STF_OP(fd_le, stfqr);
484 PPC_STF_OP(fs_le, stfsr);
485 PPC_STF_OP(fiwx_le, stfiwxr);
486 #if defined(TARGET_PPC64)
487 PPC_STF_OP_64(fd_le, stfqr);
488 PPC_STF_OP_64(fs_le, stfsr);
489 PPC_STF_OP_64(fiwx_le, stfiwxr);
492 /*** Floating-point load ***/
493 #define PPC_LDF_OP(name, op) \
494 void OPPROTO glue(glue(op_l, name), MEMSUFFIX) (void) \
496 FT0 = glue(op, MEMSUFFIX)((uint32_t)T0); \
500 #if defined(TARGET_PPC64)
501 #define PPC_LDF_OP_64(name, op) \
502 void OPPROTO glue(glue(glue(op_l, name), _64), MEMSUFFIX) (void) \
504 FT0 = glue(op, MEMSUFFIX)((uint64_t)T0); \
509 static inline double glue(ldfs, MEMSUFFIX) (target_ulong EA)
511 return float32_to_float64(glue(ldfl, MEMSUFFIX)(EA), &env->fp_status);
514 PPC_LDF_OP(fd, ldfq);
515 PPC_LDF_OP(fs, ldfs);
516 #if defined(TARGET_PPC64)
517 PPC_LDF_OP_64(fd, ldfq);
518 PPC_LDF_OP_64(fs, ldfs);
521 static inline double glue(ldfqr, MEMSUFFIX) (target_ulong EA)
528 u.d = glue(ldfq, MEMSUFFIX)(EA);
529 u.u = ((u.u & 0xFF00000000000000ULL) >> 56) |
530 ((u.u & 0x00FF000000000000ULL) >> 40) |
531 ((u.u & 0x0000FF0000000000ULL) >> 24) |
532 ((u.u & 0x000000FF00000000ULL) >> 8) |
533 ((u.u & 0x00000000FF000000ULL) << 8) |
534 ((u.u & 0x0000000000FF0000ULL) << 24) |
535 ((u.u & 0x000000000000FF00ULL) << 40) |
536 ((u.u & 0x00000000000000FFULL) << 56);
541 static inline double glue(ldfsr, MEMSUFFIX) (target_ulong EA)
548 u.f = glue(ldfl, MEMSUFFIX)(EA);
549 u.u = ((u.u & 0xFF000000UL) >> 24) |
550 ((u.u & 0x00FF0000ULL) >> 8) |
551 ((u.u & 0x0000FF00UL) << 8) |
552 ((u.u & 0x000000FFULL) << 24);
554 return float32_to_float64(u.f, &env->fp_status);
557 PPC_LDF_OP(fd_le, ldfqr);
558 PPC_LDF_OP(fs_le, ldfsr);
559 #if defined(TARGET_PPC64)
560 PPC_LDF_OP_64(fd_le, ldfqr);
561 PPC_LDF_OP_64(fs_le, ldfsr);
564 /* Load and set reservation */
565 void OPPROTO glue(op_lwarx, MEMSUFFIX) (void)
567 if (unlikely(T0 & 0x03)) {
568 do_raise_exception(POWERPC_EXCP_ALIGN);
570 T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
571 env->reserve = (uint32_t)T0;
576 #if defined(TARGET_PPC64)
577 void OPPROTO glue(op_lwarx_64, MEMSUFFIX) (void)
579 if (unlikely(T0 & 0x03)) {
580 do_raise_exception(POWERPC_EXCP_ALIGN);
582 T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
583 env->reserve = (uint64_t)T0;
588 void OPPROTO glue(op_ldarx, MEMSUFFIX) (void)
590 if (unlikely(T0 & 0x03)) {
591 do_raise_exception(POWERPC_EXCP_ALIGN);
593 T1 = glue(ldq, MEMSUFFIX)((uint32_t)T0);
594 env->reserve = (uint32_t)T0;
599 void OPPROTO glue(op_ldarx_64, MEMSUFFIX) (void)
601 if (unlikely(T0 & 0x03)) {
602 do_raise_exception(POWERPC_EXCP_ALIGN);
604 T1 = glue(ldq, MEMSUFFIX)((uint64_t)T0);
605 env->reserve = (uint64_t)T0;
611 void OPPROTO glue(op_lwarx_le, MEMSUFFIX) (void)
613 if (unlikely(T0 & 0x03)) {
614 do_raise_exception(POWERPC_EXCP_ALIGN);
616 T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
617 env->reserve = (uint32_t)T0;
622 #if defined(TARGET_PPC64)
623 void OPPROTO glue(op_lwarx_le_64, MEMSUFFIX) (void)
625 if (unlikely(T0 & 0x03)) {
626 do_raise_exception(POWERPC_EXCP_ALIGN);
628 T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
629 env->reserve = (uint64_t)T0;
634 void OPPROTO glue(op_ldarx_le, MEMSUFFIX) (void)
636 if (unlikely(T0 & 0x03)) {
637 do_raise_exception(POWERPC_EXCP_ALIGN);
639 T1 = glue(ld64r, MEMSUFFIX)((uint32_t)T0);
640 env->reserve = (uint32_t)T0;
645 void OPPROTO glue(op_ldarx_le_64, MEMSUFFIX) (void)
647 if (unlikely(T0 & 0x03)) {
648 do_raise_exception(POWERPC_EXCP_ALIGN);
650 T1 = glue(ld64r, MEMSUFFIX)((uint64_t)T0);
651 env->reserve = (uint64_t)T0;
657 /* Store with reservation */
658 void OPPROTO glue(op_stwcx, MEMSUFFIX) (void)
660 if (unlikely(T0 & 0x03)) {
661 do_raise_exception(POWERPC_EXCP_ALIGN);
663 if (unlikely(env->reserve != (uint32_t)T0)) {
664 env->crf[0] = xer_so;
666 glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
667 env->crf[0] = xer_so | 0x02;
674 #if defined(TARGET_PPC64)
675 void OPPROTO glue(op_stwcx_64, MEMSUFFIX) (void)
677 if (unlikely(T0 & 0x03)) {
678 do_raise_exception(POWERPC_EXCP_ALIGN);
680 if (unlikely(env->reserve != (uint64_t)T0)) {
681 env->crf[0] = xer_so;
683 glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
684 env->crf[0] = xer_so | 0x02;
691 void OPPROTO glue(op_stdcx, MEMSUFFIX) (void)
693 if (unlikely(T0 & 0x03)) {
694 do_raise_exception(POWERPC_EXCP_ALIGN);
696 if (unlikely(env->reserve != (uint32_t)T0)) {
697 env->crf[0] = xer_so;
699 glue(stq, MEMSUFFIX)((uint32_t)T0, T1);
700 env->crf[0] = xer_so | 0x02;
707 void OPPROTO glue(op_stdcx_64, MEMSUFFIX) (void)
709 if (unlikely(T0 & 0x03)) {
710 do_raise_exception(POWERPC_EXCP_ALIGN);
712 if (unlikely(env->reserve != (uint64_t)T0)) {
713 env->crf[0] = xer_so;
715 glue(stq, MEMSUFFIX)((uint64_t)T0, T1);
716 env->crf[0] = xer_so | 0x02;
724 void OPPROTO glue(op_stwcx_le, MEMSUFFIX) (void)
726 if (unlikely(T0 & 0x03)) {
727 do_raise_exception(POWERPC_EXCP_ALIGN);
729 if (unlikely(env->reserve != (uint32_t)T0)) {
730 env->crf[0] = xer_so;
732 glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
733 env->crf[0] = xer_so | 0x02;
740 #if defined(TARGET_PPC64)
741 void OPPROTO glue(op_stwcx_le_64, MEMSUFFIX) (void)
743 if (unlikely(T0 & 0x03)) {
744 do_raise_exception(POWERPC_EXCP_ALIGN);
746 if (unlikely(env->reserve != (uint64_t)T0)) {
747 env->crf[0] = xer_so;
749 glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
750 env->crf[0] = xer_so | 0x02;
757 void OPPROTO glue(op_stdcx_le, MEMSUFFIX) (void)
759 if (unlikely(T0 & 0x03)) {
760 do_raise_exception(POWERPC_EXCP_ALIGN);
762 if (unlikely(env->reserve != (uint32_t)T0)) {
763 env->crf[0] = xer_so;
765 glue(st64r, MEMSUFFIX)((uint32_t)T0, T1);
766 env->crf[0] = xer_so | 0x02;
773 void OPPROTO glue(op_stdcx_le_64, MEMSUFFIX) (void)
775 if (unlikely(T0 & 0x03)) {
776 do_raise_exception(POWERPC_EXCP_ALIGN);
778 if (unlikely(env->reserve != (uint64_t)T0)) {
779 env->crf[0] = xer_so;
781 glue(st64r, MEMSUFFIX)((uint64_t)T0, T1);
782 env->crf[0] = xer_so | 0x02;
790 void OPPROTO glue(op_dcbz_l32, MEMSUFFIX) (void)
792 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
793 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
794 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
795 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
796 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
797 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
798 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
799 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
803 void OPPROTO glue(op_dcbz_l64, MEMSUFFIX) (void)
805 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
806 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
807 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
808 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
809 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
810 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
811 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
812 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
813 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
814 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
815 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
816 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
817 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
818 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
819 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
820 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
824 void OPPROTO glue(op_dcbz_l128, MEMSUFFIX) (void)
826 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x00), 0);
827 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x04), 0);
828 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x08), 0);
829 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x0C), 0);
830 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x10), 0);
831 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x14), 0);
832 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x18), 0);
833 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x1C), 0);
834 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x20UL), 0);
835 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x24UL), 0);
836 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x28UL), 0);
837 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x2CUL), 0);
838 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x30UL), 0);
839 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x34UL), 0);
840 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x38UL), 0);
841 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x3CUL), 0);
842 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x40UL), 0);
843 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x44UL), 0);
844 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x48UL), 0);
845 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x4CUL), 0);
846 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x50UL), 0);
847 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x54UL), 0);
848 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x58UL), 0);
849 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x5CUL), 0);
850 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x60UL), 0);
851 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x64UL), 0);
852 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x68UL), 0);
853 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x6CUL), 0);
854 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x70UL), 0);
855 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x74UL), 0);
856 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x78UL), 0);
857 glue(stl, MEMSUFFIX)((uint32_t)(T0 + 0x7CUL), 0);
861 void OPPROTO glue(op_dcbz, MEMSUFFIX) (void)
863 glue(do_dcbz, MEMSUFFIX)();
867 #if defined(TARGET_PPC64)
868 void OPPROTO glue(op_dcbz_l32_64, MEMSUFFIX) (void)
870 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
871 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
872 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
873 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
874 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
875 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
876 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
877 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
881 void OPPROTO glue(op_dcbz_l64_64, MEMSUFFIX) (void)
883 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
884 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
885 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
886 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
887 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
888 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
889 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
890 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
891 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
892 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
893 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
894 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
895 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
896 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
897 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
898 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
902 void OPPROTO glue(op_dcbz_l128_64, MEMSUFFIX) (void)
904 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x00), 0);
905 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x04), 0);
906 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x08), 0);
907 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x0C), 0);
908 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x10), 0);
909 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x14), 0);
910 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x18), 0);
911 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x1C), 0);
912 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x20UL), 0);
913 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x24UL), 0);
914 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x28UL), 0);
915 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x2CUL), 0);
916 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x30UL), 0);
917 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x34UL), 0);
918 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x38UL), 0);
919 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x3CUL), 0);
920 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x40UL), 0);
921 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x44UL), 0);
922 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x48UL), 0);
923 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x4CUL), 0);
924 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x50UL), 0);
925 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x54UL), 0);
926 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x58UL), 0);
927 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x5CUL), 0);
928 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x60UL), 0);
929 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x64UL), 0);
930 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x68UL), 0);
931 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x6CUL), 0);
932 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x70UL), 0);
933 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x74UL), 0);
934 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x78UL), 0);
935 glue(stl, MEMSUFFIX)((uint64_t)(T0 + 0x7CUL), 0);
939 void OPPROTO glue(op_dcbz_64, MEMSUFFIX) (void)
941 glue(do_dcbz_64, MEMSUFFIX)();
946 /* Instruction cache block invalidate */
947 void OPPROTO glue(op_icbi, MEMSUFFIX) (void)
949 glue(do_icbi, MEMSUFFIX)();
953 #if defined(TARGET_PPC64)
954 void OPPROTO glue(op_icbi_64, MEMSUFFIX) (void)
956 glue(do_icbi_64, MEMSUFFIX)();
961 /* External access */
962 void OPPROTO glue(op_eciwx, MEMSUFFIX) (void)
964 T1 = glue(ldl, MEMSUFFIX)((uint32_t)T0);
968 #if defined(TARGET_PPC64)
969 void OPPROTO glue(op_eciwx_64, MEMSUFFIX) (void)
971 T1 = glue(ldl, MEMSUFFIX)((uint64_t)T0);
976 void OPPROTO glue(op_ecowx, MEMSUFFIX) (void)
978 glue(stl, MEMSUFFIX)((uint32_t)T0, T1);
982 #if defined(TARGET_PPC64)
983 void OPPROTO glue(op_ecowx_64, MEMSUFFIX) (void)
985 glue(stl, MEMSUFFIX)((uint64_t)T0, T1);
990 void OPPROTO glue(op_eciwx_le, MEMSUFFIX) (void)
992 T1 = glue(ld32r, MEMSUFFIX)((uint32_t)T0);
996 #if defined(TARGET_PPC64)
997 void OPPROTO glue(op_eciwx_le_64, MEMSUFFIX) (void)
999 T1 = glue(ld32r, MEMSUFFIX)((uint64_t)T0);
1004 void OPPROTO glue(op_ecowx_le, MEMSUFFIX) (void)
1006 glue(st32r, MEMSUFFIX)((uint32_t)T0, T1);
1010 #if defined(TARGET_PPC64)
1011 void OPPROTO glue(op_ecowx_le_64, MEMSUFFIX) (void)
1013 glue(st32r, MEMSUFFIX)((uint64_t)T0, T1);
1018 /* XXX: those micro-ops need tests ! */
1019 /* PowerPC 601 specific instructions (POWER bridge) */
1020 void OPPROTO glue(op_POWER_lscbx, MEMSUFFIX) (void)
1022 /* When byte count is 0, do nothing */
1023 if (likely(T1 != 0)) {
1024 glue(do_POWER_lscbx, MEMSUFFIX)(PARAM1, PARAM2, PARAM3);
1029 /* POWER2 quad load and store */
1030 /* XXX: TAGs are not managed */
1031 void OPPROTO glue(op_POWER2_lfq, MEMSUFFIX) (void)
1033 glue(do_POWER2_lfq, MEMSUFFIX)();
1037 void glue(op_POWER2_lfq_le, MEMSUFFIX) (void)
1039 glue(do_POWER2_lfq_le, MEMSUFFIX)();
1043 void OPPROTO glue(op_POWER2_stfq, MEMSUFFIX) (void)
1045 glue(do_POWER2_stfq, MEMSUFFIX)();
1049 void OPPROTO glue(op_POWER2_stfq_le, MEMSUFFIX) (void)
1051 glue(do_POWER2_stfq_le, MEMSUFFIX)();
1055 #if defined(TARGET_PPCEMB)
1057 #define _PPC_SPE_LD_OP(name, op) \
1058 void OPPROTO glue(glue(op_spe_l, name), MEMSUFFIX) (void) \
1060 T1_64 = glue(op, MEMSUFFIX)((uint32_t)T0); \
1064 #if defined(TARGET_PPC64)
1065 #define _PPC_SPE_LD_OP_64(name, op) \
1066 void OPPROTO glue(glue(glue(op_spe_l, name), _64), MEMSUFFIX) (void) \
1068 T1_64 = glue(op, MEMSUFFIX)((uint64_t)T0); \
1071 #define PPC_SPE_LD_OP(name, op) \
1072 _PPC_SPE_LD_OP(name, op); \
1073 _PPC_SPE_LD_OP_64(name, op)
1075 #define PPC_SPE_LD_OP(name, op) \
1076 _PPC_SPE_LD_OP(name, op)
1080 #define _PPC_SPE_ST_OP(name, op) \
1081 void OPPROTO glue(glue(op_spe_st, name), MEMSUFFIX) (void) \
1083 glue(op, MEMSUFFIX)((uint32_t)T0, T1_64); \
1087 #if defined(TARGET_PPC64)
1088 #define _PPC_SPE_ST_OP_64(name, op) \
1089 void OPPROTO glue(glue(glue(op_spe_st, name), _64), MEMSUFFIX) (void) \
1091 glue(op, MEMSUFFIX)((uint64_t)T0, T1_64); \
1094 #define PPC_SPE_ST_OP(name, op) \
1095 _PPC_SPE_ST_OP(name, op); \
1096 _PPC_SPE_ST_OP_64(name, op)
1098 #define PPC_SPE_ST_OP(name, op) \
1099 _PPC_SPE_ST_OP(name, op)
1102 #if !defined(TARGET_PPC64)
1103 PPC_SPE_LD_OP(dd, ldq);
1104 PPC_SPE_ST_OP(dd, stq);
1105 PPC_SPE_LD_OP(dd_le, ld64r);
1106 PPC_SPE_ST_OP(dd_le, st64r);
1108 static inline uint64_t glue(spe_ldw, MEMSUFFIX) (target_ulong EA)
1111 ret = (uint64_t)glue(ldl, MEMSUFFIX)(EA) << 32;
1112 ret |= (uint64_t)glue(ldl, MEMSUFFIX)(EA + 4);
1115 PPC_SPE_LD_OP(dw, spe_ldw);
1116 static inline void glue(spe_stdw, MEMSUFFIX) (target_ulong EA, uint64_t data)
1118 glue(stl, MEMSUFFIX)(EA, data >> 32);
1119 glue(stl, MEMSUFFIX)(EA + 4, data);
1121 PPC_SPE_ST_OP(dw, spe_stdw);
1122 static inline uint64_t glue(spe_ldw_le, MEMSUFFIX) (target_ulong EA)
1125 ret = (uint64_t)glue(ld32r, MEMSUFFIX)(EA) << 32;
1126 ret |= (uint64_t)glue(ld32r, MEMSUFFIX)(EA + 4);
1129 PPC_SPE_LD_OP(dw_le, spe_ldw_le);
1130 static inline void glue(spe_stdw_le, MEMSUFFIX) (target_ulong EA,
1133 glue(st32r, MEMSUFFIX)(EA, data >> 32);
1134 glue(st32r, MEMSUFFIX)(EA + 4, data);
1136 PPC_SPE_ST_OP(dw_le, spe_stdw_le);
1137 static inline uint64_t glue(spe_ldh, MEMSUFFIX) (target_ulong EA)
1140 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
1141 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2) << 32;
1142 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 4) << 16;
1143 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 6);
1146 PPC_SPE_LD_OP(dh, spe_ldh);
1147 static inline void glue(spe_stdh, MEMSUFFIX) (target_ulong EA, uint64_t data)
1149 glue(stw, MEMSUFFIX)(EA, data >> 48);
1150 glue(stw, MEMSUFFIX)(EA + 2, data >> 32);
1151 glue(stw, MEMSUFFIX)(EA + 4, data >> 16);
1152 glue(stw, MEMSUFFIX)(EA + 6, data);
1154 PPC_SPE_ST_OP(dh, spe_stdh);
1155 static inline uint64_t glue(spe_ldh_le, MEMSUFFIX) (target_ulong EA)
1158 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
1159 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2) << 32;
1160 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 4) << 16;
1161 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 6);
1164 PPC_SPE_LD_OP(dh_le, spe_ldh_le);
1165 static inline void glue(spe_stdh_le, MEMSUFFIX) (target_ulong EA,
1168 glue(st16r, MEMSUFFIX)(EA, data >> 48);
1169 glue(st16r, MEMSUFFIX)(EA + 2, data >> 32);
1170 glue(st16r, MEMSUFFIX)(EA + 4, data >> 16);
1171 glue(st16r, MEMSUFFIX)(EA + 6, data);
1173 PPC_SPE_ST_OP(dh_le, spe_stdh_le);
1174 static inline uint64_t glue(spe_lwhe, MEMSUFFIX) (target_ulong EA)
1177 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 48;
1178 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2) << 16;
1181 PPC_SPE_LD_OP(whe, spe_lwhe);
1182 static inline void glue(spe_stwhe, MEMSUFFIX) (target_ulong EA, uint64_t data)
1184 glue(stw, MEMSUFFIX)(EA, data >> 48);
1185 glue(stw, MEMSUFFIX)(EA + 2, data >> 16);
1187 PPC_SPE_ST_OP(whe, spe_stwhe);
1188 static inline uint64_t glue(spe_lwhe_le, MEMSUFFIX) (target_ulong EA)
1191 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 48;
1192 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2) << 16;
1195 PPC_SPE_LD_OP(whe_le, spe_lwhe_le);
1196 static inline void glue(spe_stwhe_le, MEMSUFFIX) (target_ulong EA,
1199 glue(st16r, MEMSUFFIX)(EA, data >> 48);
1200 glue(st16r, MEMSUFFIX)(EA + 2, data >> 16);
1202 PPC_SPE_ST_OP(whe_le, spe_stwhe_le);
1203 static inline uint64_t glue(spe_lwhou, MEMSUFFIX) (target_ulong EA)
1206 ret = (uint64_t)glue(lduw, MEMSUFFIX)(EA) << 32;
1207 ret |= (uint64_t)glue(lduw, MEMSUFFIX)(EA + 2);
1210 PPC_SPE_LD_OP(whou, spe_lwhou);
1211 static inline uint64_t glue(spe_lwhos, MEMSUFFIX) (target_ulong EA)
1214 ret = ((uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA))) << 32;
1215 ret |= (uint64_t)((int32_t)glue(ldsw, MEMSUFFIX)(EA + 2));
1218 PPC_SPE_LD_OP(whos, spe_lwhos);
1219 static inline void glue(spe_stwho, MEMSUFFIX) (target_ulong EA, uint64_t data)
1221 glue(stw, MEMSUFFIX)(EA, data >> 32);
1222 glue(stw, MEMSUFFIX)(EA + 2, data);
1224 PPC_SPE_ST_OP(who, spe_stwho);
1225 static inline uint64_t glue(spe_lwhou_le, MEMSUFFIX) (target_ulong EA)
1228 ret = (uint64_t)glue(ld16r, MEMSUFFIX)(EA) << 32;
1229 ret |= (uint64_t)glue(ld16r, MEMSUFFIX)(EA + 2);
1232 PPC_SPE_LD_OP(whou_le, spe_lwhou_le);
1233 static inline uint64_t glue(spe_lwhos_le, MEMSUFFIX) (target_ulong EA)
1236 ret = ((uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA))) << 32;
1237 ret |= (uint64_t)((int32_t)glue(ld16rs, MEMSUFFIX)(EA + 2));
1240 PPC_SPE_LD_OP(whos_le, spe_lwhos_le);
1241 static inline void glue(spe_stwho_le, MEMSUFFIX) (target_ulong EA,
1244 glue(st16r, MEMSUFFIX)(EA, data >> 32);
1245 glue(st16r, MEMSUFFIX)(EA + 2, data);
1247 PPC_SPE_ST_OP(who_le, spe_stwho_le);
1248 #if !defined(TARGET_PPC64)
1249 static inline void glue(spe_stwwo, MEMSUFFIX) (target_ulong EA, uint64_t data)
1251 glue(stl, MEMSUFFIX)(EA, data);
1253 PPC_SPE_ST_OP(wwo, spe_stwwo);
1254 static inline void glue(spe_stwwo_le, MEMSUFFIX) (target_ulong EA,
1257 glue(st32r, MEMSUFFIX)(EA, data);
1259 PPC_SPE_ST_OP(wwo_le, spe_stwwo_le);
1261 static inline uint64_t glue(spe_lh, MEMSUFFIX) (target_ulong EA)
1264 tmp = glue(lduw, MEMSUFFIX)(EA);
1265 return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1267 PPC_SPE_LD_OP(h, spe_lh);
1268 static inline uint64_t glue(spe_lh_le, MEMSUFFIX) (target_ulong EA)
1271 tmp = glue(ld16r, MEMSUFFIX)(EA);
1272 return ((uint64_t)tmp << 48) | ((uint64_t)tmp << 16);
1274 PPC_SPE_LD_OP(h_le, spe_lh_le);
1275 static inline uint64_t glue(spe_lwwsplat, MEMSUFFIX) (target_ulong EA)
1278 tmp = glue(ldl, MEMSUFFIX)(EA);
1279 return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1281 PPC_SPE_LD_OP(wwsplat, spe_lwwsplat);
1282 static inline uint64_t glue(spe_lwwsplat_le, MEMSUFFIX) (target_ulong EA)
1285 tmp = glue(ld32r, MEMSUFFIX)(EA);
1286 return ((uint64_t)tmp << 32) | (uint64_t)tmp;
1288 PPC_SPE_LD_OP(wwsplat_le, spe_lwwsplat_le);
1289 static inline uint64_t glue(spe_lwhsplat, MEMSUFFIX) (target_ulong EA)
1293 tmp = glue(lduw, MEMSUFFIX)(EA);
1294 ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1295 tmp = glue(lduw, MEMSUFFIX)(EA + 2);
1296 ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1299 PPC_SPE_LD_OP(whsplat, spe_lwhsplat);
1300 static inline uint64_t glue(spe_lwhsplat_le, MEMSUFFIX) (target_ulong EA)
1304 tmp = glue(ld16r, MEMSUFFIX)(EA);
1305 ret = ((uint64_t)tmp << 48) | ((uint64_t)tmp << 32);
1306 tmp = glue(ld16r, MEMSUFFIX)(EA + 2);
1307 ret |= ((uint64_t)tmp << 16) | (uint64_t)tmp;
1310 PPC_SPE_LD_OP(whsplat_le, spe_lwhsplat_le);
1311 #endif /* defined(TARGET_PPCEMB) */