|
1 /* |
|
2 * PowerPC emulation for qemu: main translation routines. |
|
3 * |
|
4 * Copyright (c) 2003-2007 Jocelyn Mayer |
|
5 * |
|
6 * This library is free software; you can redistribute it and/or |
|
7 * modify it under the terms of the GNU Lesser General Public |
|
8 * License as published by the Free Software Foundation; either |
|
9 * version 2 of the License, or (at your option) any later version. |
|
10 * |
|
11 * This library is distributed in the hope that it will be useful, |
|
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
|
14 * Lesser General Public License for more details. |
|
15 * |
|
16 * You should have received a copy of the GNU Lesser General Public |
|
17 * License along with this library; if not, write to the Free Software |
|
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA |
|
19 */ |
|
20 #include <stdarg.h> |
|
21 #include <stdlib.h> |
|
22 #include <stdio.h> |
|
23 #include <string.h> |
|
24 #include <inttypes.h> |
|
25 |
|
26 #include "cpu.h" |
|
27 #include "exec-all.h" |
|
28 #include "disas.h" |
|
29 #include "tcg-op.h" |
|
30 #include "qemu-common.h" |
|
31 |
|
32 #include "helper.h" |
|
33 #define GEN_HELPER 1 |
|
34 #include "helper.h" |
|
35 |
|
36 #define CPU_SINGLE_STEP 0x1 |
|
37 #define CPU_BRANCH_STEP 0x2 |
|
38 #define GDBSTUB_SINGLE_STEP 0x4 |
|
39 |
|
40 /* Include definitions for instructions classes and implementations flags */ |
|
41 //#define DO_SINGLE_STEP |
|
42 //#define PPC_DEBUG_DISAS |
|
43 //#define DO_PPC_STATISTICS |
|
44 |
|
45 /*****************************************************************************/ |
|
46 /* Code translation helpers */ |
|
47 |
|
48 /* global register indexes */ |
|
49 static TCGv_ptr cpu_env; |
|
50 static char cpu_reg_names[10*3 + 22*4 /* GPR */ |
|
51 #if !defined(TARGET_PPC64) |
|
52 + 10*4 + 22*5 /* SPE GPRh */ |
|
53 #endif |
|
54 + 10*4 + 22*5 /* FPR */ |
|
55 + 2*(10*6 + 22*7) /* AVRh, AVRl */ |
|
56 + 8*5 /* CRF */]; |
|
57 static TCGv cpu_gpr[32]; |
|
58 #if !defined(TARGET_PPC64) |
|
59 static TCGv cpu_gprh[32]; |
|
60 #endif |
|
61 static TCGv_i64 cpu_fpr[32]; |
|
62 static TCGv_i64 cpu_avrh[32], cpu_avrl[32]; |
|
63 static TCGv_i32 cpu_crf[8]; |
|
64 static TCGv cpu_nip; |
|
65 static TCGv cpu_msr; |
|
66 static TCGv cpu_ctr; |
|
67 static TCGv cpu_lr; |
|
68 static TCGv cpu_xer; |
|
69 static TCGv cpu_reserve; |
|
70 static TCGv_i32 cpu_fpscr; |
|
71 static TCGv_i32 cpu_access_type; |
|
72 |
|
73 #include "gen-icount.h" |
|
74 |
|
75 void ppc_translate_init(void) |
|
76 { |
|
77 int i; |
|
78 char* p; |
|
79 static int done_init = 0; |
|
80 |
|
81 if (done_init) |
|
82 return; |
|
83 |
|
84 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env"); |
|
85 |
|
86 p = cpu_reg_names; |
|
87 |
|
88 for (i = 0; i < 8; i++) { |
|
89 sprintf(p, "crf%d", i); |
|
90 cpu_crf[i] = tcg_global_mem_new_i32(TCG_AREG0, |
|
91 offsetof(CPUState, crf[i]), p); |
|
92 p += 5; |
|
93 } |
|
94 |
|
95 for (i = 0; i < 32; i++) { |
|
96 sprintf(p, "r%d", i); |
|
97 cpu_gpr[i] = tcg_global_mem_new(TCG_AREG0, |
|
98 offsetof(CPUState, gpr[i]), p); |
|
99 p += (i < 10) ? 3 : 4; |
|
100 #if !defined(TARGET_PPC64) |
|
101 sprintf(p, "r%dH", i); |
|
102 cpu_gprh[i] = tcg_global_mem_new_i32(TCG_AREG0, |
|
103 offsetof(CPUState, gprh[i]), p); |
|
104 p += (i < 10) ? 4 : 5; |
|
105 #endif |
|
106 |
|
107 sprintf(p, "fp%d", i); |
|
108 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0, |
|
109 offsetof(CPUState, fpr[i]), p); |
|
110 p += (i < 10) ? 4 : 5; |
|
111 |
|
112 sprintf(p, "avr%dH", i); |
|
113 #ifdef WORDS_BIGENDIAN |
|
114 cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0, |
|
115 offsetof(CPUState, avr[i].u64[0]), p); |
|
116 #else |
|
117 cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0, |
|
118 offsetof(CPUState, avr[i].u64[1]), p); |
|
119 #endif |
|
120 p += (i < 10) ? 6 : 7; |
|
121 |
|
122 sprintf(p, "avr%dL", i); |
|
123 #ifdef WORDS_BIGENDIAN |
|
124 cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0, |
|
125 offsetof(CPUState, avr[i].u64[1]), p); |
|
126 #else |
|
127 cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0, |
|
128 offsetof(CPUState, avr[i].u64[0]), p); |
|
129 #endif |
|
130 p += (i < 10) ? 6 : 7; |
|
131 } |
|
132 |
|
133 cpu_nip = tcg_global_mem_new(TCG_AREG0, |
|
134 offsetof(CPUState, nip), "nip"); |
|
135 |
|
136 cpu_msr = tcg_global_mem_new(TCG_AREG0, |
|
137 offsetof(CPUState, msr), "msr"); |
|
138 |
|
139 cpu_ctr = tcg_global_mem_new(TCG_AREG0, |
|
140 offsetof(CPUState, ctr), "ctr"); |
|
141 |
|
142 cpu_lr = tcg_global_mem_new(TCG_AREG0, |
|
143 offsetof(CPUState, lr), "lr"); |
|
144 |
|
145 cpu_xer = tcg_global_mem_new(TCG_AREG0, |
|
146 offsetof(CPUState, xer), "xer"); |
|
147 |
|
148 cpu_reserve = tcg_global_mem_new(TCG_AREG0, |
|
149 offsetof(CPUState, reserve), "reserve"); |
|
150 |
|
151 cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0, |
|
152 offsetof(CPUState, fpscr), "fpscr"); |
|
153 |
|
154 cpu_access_type = tcg_global_mem_new_i32(TCG_AREG0, |
|
155 offsetof(CPUState, access_type), "access_type"); |
|
156 |
|
157 /* register helpers */ |
|
158 #define GEN_HELPER 2 |
|
159 #include "helper.h" |
|
160 |
|
161 done_init = 1; |
|
162 } |
|
163 |
|
164 /* internal defines */ |
|
165 typedef struct DisasContext { |
|
166 struct TranslationBlock *tb; |
|
167 target_ulong nip; |
|
168 uint32_t opcode; |
|
169 uint32_t exception; |
|
170 /* Routine used to access memory */ |
|
171 int mem_idx; |
|
172 int access_type; |
|
173 /* Translation flags */ |
|
174 int le_mode; |
|
175 #if defined(TARGET_PPC64) |
|
176 int sf_mode; |
|
177 #endif |
|
178 int fpu_enabled; |
|
179 int altivec_enabled; |
|
180 int spe_enabled; |
|
181 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */ |
|
182 int singlestep_enabled; |
|
183 } DisasContext; |
|
184 |
|
185 struct opc_handler_t { |
|
186 /* invalid bits */ |
|
187 uint32_t inval; |
|
188 /* instruction type */ |
|
189 uint64_t type; |
|
190 /* handler */ |
|
191 void (*handler)(DisasContext *ctx); |
|
192 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU) |
|
193 const char *oname; |
|
194 #endif |
|
195 #if defined(DO_PPC_STATISTICS) |
|
196 uint64_t count; |
|
197 #endif |
|
198 }; |
|
199 |
|
200 static always_inline void gen_reset_fpstatus (void) |
|
201 { |
|
202 #ifdef CONFIG_SOFTFLOAT |
|
203 gen_helper_reset_fpstatus(); |
|
204 #endif |
|
205 } |
|
206 |
|
207 static always_inline void gen_compute_fprf (TCGv_i64 arg, int set_fprf, int set_rc) |
|
208 { |
|
209 TCGv_i32 t0 = tcg_temp_new_i32(); |
|
210 |
|
211 if (set_fprf != 0) { |
|
212 /* This case might be optimized later */ |
|
213 tcg_gen_movi_i32(t0, 1); |
|
214 gen_helper_compute_fprf(t0, arg, t0); |
|
215 if (unlikely(set_rc)) { |
|
216 tcg_gen_mov_i32(cpu_crf[1], t0); |
|
217 } |
|
218 gen_helper_float_check_status(); |
|
219 } else if (unlikely(set_rc)) { |
|
220 /* We always need to compute fpcc */ |
|
221 tcg_gen_movi_i32(t0, 0); |
|
222 gen_helper_compute_fprf(t0, arg, t0); |
|
223 tcg_gen_mov_i32(cpu_crf[1], t0); |
|
224 } |
|
225 |
|
226 tcg_temp_free_i32(t0); |
|
227 } |
|
228 |
|
229 static always_inline void gen_set_access_type (DisasContext *ctx, int access_type) |
|
230 { |
|
231 if (ctx->access_type != access_type) { |
|
232 tcg_gen_movi_i32(cpu_access_type, access_type); |
|
233 ctx->access_type = access_type; |
|
234 } |
|
235 } |
|
236 |
|
237 static always_inline void gen_update_nip (DisasContext *ctx, target_ulong nip) |
|
238 { |
|
239 #if defined(TARGET_PPC64) |
|
240 if (ctx->sf_mode) |
|
241 tcg_gen_movi_tl(cpu_nip, nip); |
|
242 else |
|
243 #endif |
|
244 tcg_gen_movi_tl(cpu_nip, (uint32_t)nip); |
|
245 } |
|
246 |
|
247 static always_inline void gen_exception_err (DisasContext *ctx, uint32_t excp, uint32_t error) |
|
248 { |
|
249 TCGv_i32 t0, t1; |
|
250 if (ctx->exception == POWERPC_EXCP_NONE) { |
|
251 gen_update_nip(ctx, ctx->nip); |
|
252 } |
|
253 t0 = tcg_const_i32(excp); |
|
254 t1 = tcg_const_i32(error); |
|
255 gen_helper_raise_exception_err(t0, t1); |
|
256 tcg_temp_free_i32(t0); |
|
257 tcg_temp_free_i32(t1); |
|
258 ctx->exception = (excp); |
|
259 } |
|
260 |
|
261 static always_inline void gen_exception (DisasContext *ctx, uint32_t excp) |
|
262 { |
|
263 TCGv_i32 t0; |
|
264 if (ctx->exception == POWERPC_EXCP_NONE) { |
|
265 gen_update_nip(ctx, ctx->nip); |
|
266 } |
|
267 t0 = tcg_const_i32(excp); |
|
268 gen_helper_raise_exception(t0); |
|
269 tcg_temp_free_i32(t0); |
|
270 ctx->exception = (excp); |
|
271 } |
|
272 |
|
273 static always_inline void gen_debug_exception (DisasContext *ctx) |
|
274 { |
|
275 TCGv_i32 t0; |
|
276 gen_update_nip(ctx, ctx->nip); |
|
277 t0 = tcg_const_i32(EXCP_DEBUG); |
|
278 gen_helper_raise_exception(t0); |
|
279 tcg_temp_free_i32(t0); |
|
280 } |
|
281 |
|
282 static always_inline void gen_inval_exception (DisasContext *ctx, uint32_t error) |
|
283 { |
|
284 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_INVAL | error); |
|
285 } |
|
286 |
|
287 /* Stop translation */ |
|
288 static always_inline void gen_stop_exception (DisasContext *ctx) |
|
289 { |
|
290 gen_update_nip(ctx, ctx->nip); |
|
291 ctx->exception = POWERPC_EXCP_STOP; |
|
292 } |
|
293 |
|
294 /* No need to update nip here, as execution flow will change */ |
|
295 static always_inline void gen_sync_exception (DisasContext *ctx) |
|
296 { |
|
297 ctx->exception = POWERPC_EXCP_SYNC; |
|
298 } |
|
299 |
|
300 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \ |
|
301 static void gen_##name (DisasContext *ctx); \ |
|
302 GEN_OPCODE(name, opc1, opc2, opc3, inval, type); \ |
|
303 static void gen_##name (DisasContext *ctx) |
|
304 |
|
305 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \ |
|
306 static void gen_##name (DisasContext *ctx); \ |
|
307 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type); \ |
|
308 static void gen_##name (DisasContext *ctx) |
|
309 |
|
310 typedef struct opcode_t { |
|
311 unsigned char opc1, opc2, opc3; |
|
312 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */ |
|
313 unsigned char pad[5]; |
|
314 #else |
|
315 unsigned char pad[1]; |
|
316 #endif |
|
317 opc_handler_t handler; |
|
318 const char *oname; |
|
319 } opcode_t; |
|
320 |
|
321 /*****************************************************************************/ |
|
322 /*** Instruction decoding ***/ |
|
323 #define EXTRACT_HELPER(name, shift, nb) \ |
|
324 static always_inline uint32_t name (uint32_t opcode) \ |
|
325 { \ |
|
326 return (opcode >> (shift)) & ((1 << (nb)) - 1); \ |
|
327 } |
|
328 |
|
329 #define EXTRACT_SHELPER(name, shift, nb) \ |
|
330 static always_inline int32_t name (uint32_t opcode) \ |
|
331 { \ |
|
332 return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \ |
|
333 } |
|
334 |
|
335 /* Opcode part 1 */ |
|
336 EXTRACT_HELPER(opc1, 26, 6); |
|
337 /* Opcode part 2 */ |
|
338 EXTRACT_HELPER(opc2, 1, 5); |
|
339 /* Opcode part 3 */ |
|
340 EXTRACT_HELPER(opc3, 6, 5); |
|
341 /* Update Cr0 flags */ |
|
342 EXTRACT_HELPER(Rc, 0, 1); |
|
343 /* Destination */ |
|
344 EXTRACT_HELPER(rD, 21, 5); |
|
345 /* Source */ |
|
346 EXTRACT_HELPER(rS, 21, 5); |
|
347 /* First operand */ |
|
348 EXTRACT_HELPER(rA, 16, 5); |
|
349 /* Second operand */ |
|
350 EXTRACT_HELPER(rB, 11, 5); |
|
351 /* Third operand */ |
|
352 EXTRACT_HELPER(rC, 6, 5); |
|
353 /*** Get CRn ***/ |
|
354 EXTRACT_HELPER(crfD, 23, 3); |
|
355 EXTRACT_HELPER(crfS, 18, 3); |
|
356 EXTRACT_HELPER(crbD, 21, 5); |
|
357 EXTRACT_HELPER(crbA, 16, 5); |
|
358 EXTRACT_HELPER(crbB, 11, 5); |
|
359 /* SPR / TBL */ |
|
360 EXTRACT_HELPER(_SPR, 11, 10); |
|
361 static always_inline uint32_t SPR (uint32_t opcode) |
|
362 { |
|
363 uint32_t sprn = _SPR(opcode); |
|
364 |
|
365 return ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5); |
|
366 } |
|
367 /*** Get constants ***/ |
|
368 EXTRACT_HELPER(IMM, 12, 8); |
|
369 /* 16 bits signed immediate value */ |
|
370 EXTRACT_SHELPER(SIMM, 0, 16); |
|
371 /* 16 bits unsigned immediate value */ |
|
372 EXTRACT_HELPER(UIMM, 0, 16); |
|
373 /* 5 bits signed immediate value */ |
|
374 EXTRACT_HELPER(SIMM5, 16, 5); |
|
375 /* 5 bits unsigned immediate value */ |
|
376 EXTRACT_HELPER(UIMM5, 16, 5); |
|
377 /* Bit count */ |
|
378 EXTRACT_HELPER(NB, 11, 5); |
|
379 /* Shift count */ |
|
380 EXTRACT_HELPER(SH, 11, 5); |
|
381 /* Vector shift count */ |
|
382 EXTRACT_HELPER(VSH, 6, 4); |
|
383 /* Mask start */ |
|
384 EXTRACT_HELPER(MB, 6, 5); |
|
385 /* Mask end */ |
|
386 EXTRACT_HELPER(ME, 1, 5); |
|
387 /* Trap operand */ |
|
388 EXTRACT_HELPER(TO, 21, 5); |
|
389 |
|
390 EXTRACT_HELPER(CRM, 12, 8); |
|
391 EXTRACT_HELPER(FM, 17, 8); |
|
392 EXTRACT_HELPER(SR, 16, 4); |
|
393 EXTRACT_HELPER(FPIMM, 12, 4); |
|
394 |
|
395 /*** Jump target decoding ***/ |
|
396 /* Displacement */ |
|
397 EXTRACT_SHELPER(d, 0, 16); |
|
398 /* Immediate address */ |
|
399 static always_inline target_ulong LI (uint32_t opcode) |
|
400 { |
|
401 return (opcode >> 0) & 0x03FFFFFC; |
|
402 } |
|
403 |
|
404 static always_inline uint32_t BD (uint32_t opcode) |
|
405 { |
|
406 return (opcode >> 0) & 0xFFFC; |
|
407 } |
|
408 |
|
409 EXTRACT_HELPER(BO, 21, 5); |
|
410 EXTRACT_HELPER(BI, 16, 5); |
|
411 /* Absolute/relative address */ |
|
412 EXTRACT_HELPER(AA, 1, 1); |
|
413 /* Link */ |
|
414 EXTRACT_HELPER(LK, 0, 1); |
|
415 |
|
416 /* Create a mask between <start> and <end> bits */ |
|
417 static always_inline target_ulong MASK (uint32_t start, uint32_t end) |
|
418 { |
|
419 target_ulong ret; |
|
420 |
|
421 #if defined(TARGET_PPC64) |
|
422 if (likely(start == 0)) { |
|
423 ret = UINT64_MAX << (63 - end); |
|
424 } else if (likely(end == 63)) { |
|
425 ret = UINT64_MAX >> start; |
|
426 } |
|
427 #else |
|
428 if (likely(start == 0)) { |
|
429 ret = UINT32_MAX << (31 - end); |
|
430 } else if (likely(end == 31)) { |
|
431 ret = UINT32_MAX >> start; |
|
432 } |
|
433 #endif |
|
434 else { |
|
435 ret = (((target_ulong)(-1ULL)) >> (start)) ^ |
|
436 (((target_ulong)(-1ULL) >> (end)) >> 1); |
|
437 if (unlikely(start > end)) |
|
438 return ~ret; |
|
439 } |
|
440 |
|
441 return ret; |
|
442 } |
|
443 |
|
444 /*****************************************************************************/ |
|
445 /* PowerPC Instructions types definitions */ |
|
446 enum { |
|
447 PPC_NONE = 0x0000000000000000ULL, |
|
448 /* PowerPC base instructions set */ |
|
449 PPC_INSNS_BASE = 0x0000000000000001ULL, |
|
450 /* integer operations instructions */ |
|
451 #define PPC_INTEGER PPC_INSNS_BASE |
|
452 /* flow control instructions */ |
|
453 #define PPC_FLOW PPC_INSNS_BASE |
|
454 /* virtual memory instructions */ |
|
455 #define PPC_MEM PPC_INSNS_BASE |
|
456 /* ld/st with reservation instructions */ |
|
457 #define PPC_RES PPC_INSNS_BASE |
|
458 /* spr/msr access instructions */ |
|
459 #define PPC_MISC PPC_INSNS_BASE |
|
460 /* Deprecated instruction sets */ |
|
461 /* Original POWER instruction set */ |
|
462 PPC_POWER = 0x0000000000000002ULL, |
|
463 /* POWER2 instruction set extension */ |
|
464 PPC_POWER2 = 0x0000000000000004ULL, |
|
465 /* Power RTC support */ |
|
466 PPC_POWER_RTC = 0x0000000000000008ULL, |
|
467 /* Power-to-PowerPC bridge (601) */ |
|
468 PPC_POWER_BR = 0x0000000000000010ULL, |
|
469 /* 64 bits PowerPC instruction set */ |
|
470 PPC_64B = 0x0000000000000020ULL, |
|
471 /* New 64 bits extensions (PowerPC 2.0x) */ |
|
472 PPC_64BX = 0x0000000000000040ULL, |
|
473 /* 64 bits hypervisor extensions */ |
|
474 PPC_64H = 0x0000000000000080ULL, |
|
475 /* New wait instruction (PowerPC 2.0x) */ |
|
476 PPC_WAIT = 0x0000000000000100ULL, |
|
477 /* Time base mftb instruction */ |
|
478 PPC_MFTB = 0x0000000000000200ULL, |
|
479 |
|
480 /* Fixed-point unit extensions */ |
|
481 /* PowerPC 602 specific */ |
|
482 PPC_602_SPEC = 0x0000000000000400ULL, |
|
483 /* isel instruction */ |
|
484 PPC_ISEL = 0x0000000000000800ULL, |
|
485 /* popcntb instruction */ |
|
486 PPC_POPCNTB = 0x0000000000001000ULL, |
|
487 /* string load / store */ |
|
488 PPC_STRING = 0x0000000000002000ULL, |
|
489 |
|
490 /* Floating-point unit extensions */ |
|
491 /* Optional floating point instructions */ |
|
492 PPC_FLOAT = 0x0000000000010000ULL, |
|
493 /* New floating-point extensions (PowerPC 2.0x) */ |
|
494 PPC_FLOAT_EXT = 0x0000000000020000ULL, |
|
495 PPC_FLOAT_FSQRT = 0x0000000000040000ULL, |
|
496 PPC_FLOAT_FRES = 0x0000000000080000ULL, |
|
497 PPC_FLOAT_FRSQRTE = 0x0000000000100000ULL, |
|
498 PPC_FLOAT_FRSQRTES = 0x0000000000200000ULL, |
|
499 PPC_FLOAT_FSEL = 0x0000000000400000ULL, |
|
500 PPC_FLOAT_STFIWX = 0x0000000000800000ULL, |
|
501 |
|
502 /* Vector/SIMD extensions */ |
|
503 /* Altivec support */ |
|
504 PPC_ALTIVEC = 0x0000000001000000ULL, |
|
505 /* PowerPC 2.03 SPE extension */ |
|
506 PPC_SPE = 0x0000000002000000ULL, |
|
507 /* PowerPC 2.03 SPE floating-point extension */ |
|
508 PPC_SPEFPU = 0x0000000004000000ULL, |
|
509 |
|
510 /* Optional memory control instructions */ |
|
511 PPC_MEM_TLBIA = 0x0000000010000000ULL, |
|
512 PPC_MEM_TLBIE = 0x0000000020000000ULL, |
|
513 PPC_MEM_TLBSYNC = 0x0000000040000000ULL, |
|
514 /* sync instruction */ |
|
515 PPC_MEM_SYNC = 0x0000000080000000ULL, |
|
516 /* eieio instruction */ |
|
517 PPC_MEM_EIEIO = 0x0000000100000000ULL, |
|
518 |
|
519 /* Cache control instructions */ |
|
520 PPC_CACHE = 0x0000000200000000ULL, |
|
521 /* icbi instruction */ |
|
522 PPC_CACHE_ICBI = 0x0000000400000000ULL, |
|
523 /* dcbz instruction with fixed cache line size */ |
|
524 PPC_CACHE_DCBZ = 0x0000000800000000ULL, |
|
525 /* dcbz instruction with tunable cache line size */ |
|
526 PPC_CACHE_DCBZT = 0x0000001000000000ULL, |
|
527 /* dcba instruction */ |
|
528 PPC_CACHE_DCBA = 0x0000002000000000ULL, |
|
529 /* Freescale cache locking instructions */ |
|
530 PPC_CACHE_LOCK = 0x0000004000000000ULL, |
|
531 |
|
532 /* MMU related extensions */ |
|
533 /* external control instructions */ |
|
534 PPC_EXTERN = 0x0000010000000000ULL, |
|
535 /* segment register access instructions */ |
|
536 PPC_SEGMENT = 0x0000020000000000ULL, |
|
537 /* PowerPC 6xx TLB management instructions */ |
|
538 PPC_6xx_TLB = 0x0000040000000000ULL, |
|
539 /* PowerPC 74xx TLB management instructions */ |
|
540 PPC_74xx_TLB = 0x0000080000000000ULL, |
|
541 /* PowerPC 40x TLB management instructions */ |
|
542 PPC_40x_TLB = 0x0000100000000000ULL, |
|
543 /* segment register access instructions for PowerPC 64 "bridge" */ |
|
544 PPC_SEGMENT_64B = 0x0000200000000000ULL, |
|
545 /* SLB management */ |
|
546 PPC_SLBI = 0x0000400000000000ULL, |
|
547 |
|
548 /* Embedded PowerPC dedicated instructions */ |
|
549 PPC_WRTEE = 0x0001000000000000ULL, |
|
550 /* PowerPC 40x exception model */ |
|
551 PPC_40x_EXCP = 0x0002000000000000ULL, |
|
552 /* PowerPC 405 Mac instructions */ |
|
553 PPC_405_MAC = 0x0004000000000000ULL, |
|
554 /* PowerPC 440 specific instructions */ |
|
555 PPC_440_SPEC = 0x0008000000000000ULL, |
|
556 /* BookE (embedded) PowerPC specification */ |
|
557 PPC_BOOKE = 0x0010000000000000ULL, |
|
558 /* mfapidi instruction */ |
|
559 PPC_MFAPIDI = 0x0020000000000000ULL, |
|
560 /* tlbiva instruction */ |
|
561 PPC_TLBIVA = 0x0040000000000000ULL, |
|
562 /* tlbivax instruction */ |
|
563 PPC_TLBIVAX = 0x0080000000000000ULL, |
|
564 /* PowerPC 4xx dedicated instructions */ |
|
565 PPC_4xx_COMMON = 0x0100000000000000ULL, |
|
566 /* PowerPC 40x ibct instructions */ |
|
567 PPC_40x_ICBT = 0x0200000000000000ULL, |
|
568 /* rfmci is not implemented in all BookE PowerPC */ |
|
569 PPC_RFMCI = 0x0400000000000000ULL, |
|
570 /* rfdi instruction */ |
|
571 PPC_RFDI = 0x0800000000000000ULL, |
|
572 /* DCR accesses */ |
|
573 PPC_DCR = 0x1000000000000000ULL, |
|
574 /* DCR extended accesse */ |
|
575 PPC_DCRX = 0x2000000000000000ULL, |
|
576 /* user-mode DCR access, implemented in PowerPC 460 */ |
|
577 PPC_DCRUX = 0x4000000000000000ULL, |
|
578 }; |
|
579 |
|
580 /*****************************************************************************/ |
|
581 /* PowerPC instructions table */ |
|
582 #if HOST_LONG_BITS == 64 |
|
583 #define OPC_ALIGN 8 |
|
584 #else |
|
585 #define OPC_ALIGN 4 |
|
586 #endif |
|
587 #if defined(__APPLE__) |
|
588 #define OPCODES_SECTION \ |
|
589 __attribute__ ((section("__TEXT,__opcodes"), unused, aligned (OPC_ALIGN) )) |
|
590 #else |
|
591 #define OPCODES_SECTION \ |
|
592 __attribute__ ((section(".opcodes"), unused, aligned (OPC_ALIGN) )) |
|
593 #endif |
|
594 |
|
595 #if defined(DO_PPC_STATISTICS) |
|
596 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ) \ |
|
597 OPCODES_SECTION opcode_t opc_##name = { \ |
|
598 .opc1 = op1, \ |
|
599 .opc2 = op2, \ |
|
600 .opc3 = op3, \ |
|
601 .pad = { 0, }, \ |
|
602 .handler = { \ |
|
603 .inval = invl, \ |
|
604 .type = _typ, \ |
|
605 .handler = &gen_##name, \ |
|
606 .oname = stringify(name), \ |
|
607 }, \ |
|
608 .oname = stringify(name), \ |
|
609 } |
|
610 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ) \ |
|
611 OPCODES_SECTION opcode_t opc_##name = { \ |
|
612 .opc1 = op1, \ |
|
613 .opc2 = op2, \ |
|
614 .opc3 = op3, \ |
|
615 .pad = { 0, }, \ |
|
616 .handler = { \ |
|
617 .inval = invl, \ |
|
618 .type = _typ, \ |
|
619 .handler = &gen_##name, \ |
|
620 .oname = onam, \ |
|
621 }, \ |
|
622 .oname = onam, \ |
|
623 } |
|
624 #else |
|
625 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ) \ |
|
626 OPCODES_SECTION opcode_t opc_##name = { \ |
|
627 .opc1 = op1, \ |
|
628 .opc2 = op2, \ |
|
629 .opc3 = op3, \ |
|
630 .pad = { 0, }, \ |
|
631 .handler = { \ |
|
632 .inval = invl, \ |
|
633 .type = _typ, \ |
|
634 .handler = &gen_##name, \ |
|
635 }, \ |
|
636 .oname = stringify(name), \ |
|
637 } |
|
638 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ) \ |
|
639 OPCODES_SECTION opcode_t opc_##name = { \ |
|
640 .opc1 = op1, \ |
|
641 .opc2 = op2, \ |
|
642 .opc3 = op3, \ |
|
643 .pad = { 0, }, \ |
|
644 .handler = { \ |
|
645 .inval = invl, \ |
|
646 .type = _typ, \ |
|
647 .handler = &gen_##name, \ |
|
648 }, \ |
|
649 .oname = onam, \ |
|
650 } |
|
651 #endif |
|
652 |
|
653 #define GEN_OPCODE_MARK(name) \ |
|
654 OPCODES_SECTION opcode_t opc_##name = { \ |
|
655 .opc1 = 0xFF, \ |
|
656 .opc2 = 0xFF, \ |
|
657 .opc3 = 0xFF, \ |
|
658 .pad = { 0, }, \ |
|
659 .handler = { \ |
|
660 .inval = 0x00000000, \ |
|
661 .type = 0x00, \ |
|
662 .handler = NULL, \ |
|
663 }, \ |
|
664 .oname = stringify(name), \ |
|
665 } |
|
666 |
|
667 /* SPR load/store helpers */ |
|
668 static always_inline void gen_load_spr(TCGv t, int reg) |
|
669 { |
|
670 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUState, spr[reg])); |
|
671 } |
|
672 |
|
673 static always_inline void gen_store_spr(int reg, TCGv t) |
|
674 { |
|
675 tcg_gen_st_tl(t, cpu_env, offsetof(CPUState, spr[reg])); |
|
676 } |
|
677 |
|
678 /* Start opcode list */ |
|
679 GEN_OPCODE_MARK(start); |
|
680 |
|
681 /* Invalid instruction */ |
|
682 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE) |
|
683 { |
|
684 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); |
|
685 } |
|
686 |
|
687 static opc_handler_t invalid_handler = { |
|
688 .inval = 0xFFFFFFFF, |
|
689 .type = PPC_NONE, |
|
690 .handler = gen_invalid, |
|
691 }; |
|
692 |
|
693 /*** Integer comparison ***/ |
|
694 |
|
695 static always_inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf) |
|
696 { |
|
697 int l1, l2, l3; |
|
698 |
|
699 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_xer); |
|
700 tcg_gen_shri_i32(cpu_crf[crf], cpu_crf[crf], XER_SO); |
|
701 tcg_gen_andi_i32(cpu_crf[crf], cpu_crf[crf], 1); |
|
702 |
|
703 l1 = gen_new_label(); |
|
704 l2 = gen_new_label(); |
|
705 l3 = gen_new_label(); |
|
706 if (s) { |
|
707 tcg_gen_brcond_tl(TCG_COND_LT, arg0, arg1, l1); |
|
708 tcg_gen_brcond_tl(TCG_COND_GT, arg0, arg1, l2); |
|
709 } else { |
|
710 tcg_gen_brcond_tl(TCG_COND_LTU, arg0, arg1, l1); |
|
711 tcg_gen_brcond_tl(TCG_COND_GTU, arg0, arg1, l2); |
|
712 } |
|
713 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_EQ); |
|
714 tcg_gen_br(l3); |
|
715 gen_set_label(l1); |
|
716 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_LT); |
|
717 tcg_gen_br(l3); |
|
718 gen_set_label(l2); |
|
719 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_GT); |
|
720 gen_set_label(l3); |
|
721 } |
|
722 |
|
723 static always_inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf) |
|
724 { |
|
725 TCGv t0 = tcg_const_local_tl(arg1); |
|
726 gen_op_cmp(arg0, t0, s, crf); |
|
727 tcg_temp_free(t0); |
|
728 } |
|
729 |
|
730 #if defined(TARGET_PPC64) |
|
731 static always_inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf) |
|
732 { |
|
733 TCGv t0, t1; |
|
734 t0 = tcg_temp_local_new(); |
|
735 t1 = tcg_temp_local_new(); |
|
736 if (s) { |
|
737 tcg_gen_ext32s_tl(t0, arg0); |
|
738 tcg_gen_ext32s_tl(t1, arg1); |
|
739 } else { |
|
740 tcg_gen_ext32u_tl(t0, arg0); |
|
741 tcg_gen_ext32u_tl(t1, arg1); |
|
742 } |
|
743 gen_op_cmp(t0, t1, s, crf); |
|
744 tcg_temp_free(t1); |
|
745 tcg_temp_free(t0); |
|
746 } |
|
747 |
|
748 static always_inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf) |
|
749 { |
|
750 TCGv t0 = tcg_const_local_tl(arg1); |
|
751 gen_op_cmp32(arg0, t0, s, crf); |
|
752 tcg_temp_free(t0); |
|
753 } |
|
754 #endif |
|
755 |
|
756 static always_inline void gen_set_Rc0 (DisasContext *ctx, TCGv reg) |
|
757 { |
|
758 #if defined(TARGET_PPC64) |
|
759 if (!(ctx->sf_mode)) |
|
760 gen_op_cmpi32(reg, 0, 1, 0); |
|
761 else |
|
762 #endif |
|
763 gen_op_cmpi(reg, 0, 1, 0); |
|
764 } |
|
765 |
|
766 /* cmp */ |
|
767 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER) |
|
768 { |
|
769 #if defined(TARGET_PPC64) |
|
770 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000))) |
|
771 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], |
|
772 1, crfD(ctx->opcode)); |
|
773 else |
|
774 #endif |
|
775 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], |
|
776 1, crfD(ctx->opcode)); |
|
777 } |
|
778 |
|
779 /* cmpi */ |
|
780 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER) |
|
781 { |
|
782 #if defined(TARGET_PPC64) |
|
783 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000))) |
|
784 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), |
|
785 1, crfD(ctx->opcode)); |
|
786 else |
|
787 #endif |
|
788 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode), |
|
789 1, crfD(ctx->opcode)); |
|
790 } |
|
791 |
|
792 /* cmpl */ |
|
793 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400000, PPC_INTEGER) |
|
794 { |
|
795 #if defined(TARGET_PPC64) |
|
796 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000))) |
|
797 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], |
|
798 0, crfD(ctx->opcode)); |
|
799 else |
|
800 #endif |
|
801 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], |
|
802 0, crfD(ctx->opcode)); |
|
803 } |
|
804 |
|
805 /* cmpli */ |
|
806 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER) |
|
807 { |
|
808 #if defined(TARGET_PPC64) |
|
809 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000))) |
|
810 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), |
|
811 0, crfD(ctx->opcode)); |
|
812 else |
|
813 #endif |
|
814 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode), |
|
815 0, crfD(ctx->opcode)); |
|
816 } |
|
817 |
|
818 /* isel (PowerPC 2.03 specification) */ |
|
819 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL) |
|
820 { |
|
821 int l1, l2; |
|
822 uint32_t bi = rC(ctx->opcode); |
|
823 uint32_t mask; |
|
824 TCGv_i32 t0; |
|
825 |
|
826 l1 = gen_new_label(); |
|
827 l2 = gen_new_label(); |
|
828 |
|
829 mask = 1 << (3 - (bi & 0x03)); |
|
830 t0 = tcg_temp_new_i32(); |
|
831 tcg_gen_andi_i32(t0, cpu_crf[bi >> 2], mask); |
|
832 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1); |
|
833 if (rA(ctx->opcode) == 0) |
|
834 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); |
|
835 else |
|
836 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); |
|
837 tcg_gen_br(l2); |
|
838 gen_set_label(l1); |
|
839 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); |
|
840 gen_set_label(l2); |
|
841 tcg_temp_free_i32(t0); |
|
842 } |
|
843 |
|
844 /*** Integer arithmetic ***/ |
|
845 |
|
846 static always_inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0, TCGv arg1, TCGv arg2, int sub) |
|
847 { |
|
848 int l1; |
|
849 TCGv t0; |
|
850 |
|
851 l1 = gen_new_label(); |
|
852 /* Start with XER OV disabled, the most likely case */ |
|
853 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV)); |
|
854 t0 = tcg_temp_local_new(); |
|
855 tcg_gen_xor_tl(t0, arg0, arg1); |
|
856 #if defined(TARGET_PPC64) |
|
857 if (!ctx->sf_mode) |
|
858 tcg_gen_ext32s_tl(t0, t0); |
|
859 #endif |
|
860 if (sub) |
|
861 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0, l1); |
|
862 else |
|
863 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1); |
|
864 tcg_gen_xor_tl(t0, arg1, arg2); |
|
865 #if defined(TARGET_PPC64) |
|
866 if (!ctx->sf_mode) |
|
867 tcg_gen_ext32s_tl(t0, t0); |
|
868 #endif |
|
869 if (sub) |
|
870 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1); |
|
871 else |
|
872 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0, l1); |
|
873 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO)); |
|
874 gen_set_label(l1); |
|
875 tcg_temp_free(t0); |
|
876 } |
|
877 |
|
878 static always_inline void gen_op_arith_compute_ca(DisasContext *ctx, TCGv arg1, TCGv arg2, int sub) |
|
879 { |
|
880 int l1 = gen_new_label(); |
|
881 |
|
882 #if defined(TARGET_PPC64) |
|
883 if (!(ctx->sf_mode)) { |
|
884 TCGv t0, t1; |
|
885 t0 = tcg_temp_new(); |
|
886 t1 = tcg_temp_new(); |
|
887 |
|
888 tcg_gen_ext32u_tl(t0, arg1); |
|
889 tcg_gen_ext32u_tl(t1, arg2); |
|
890 if (sub) { |
|
891 tcg_gen_brcond_tl(TCG_COND_GTU, t0, t1, l1); |
|
892 } else { |
|
893 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1); |
|
894 } |
|
895 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA); |
|
896 gen_set_label(l1); |
|
897 tcg_temp_free(t0); |
|
898 tcg_temp_free(t1); |
|
899 } else |
|
900 #endif |
|
901 { |
|
902 if (sub) { |
|
903 tcg_gen_brcond_tl(TCG_COND_GTU, arg1, arg2, l1); |
|
904 } else { |
|
905 tcg_gen_brcond_tl(TCG_COND_GEU, arg1, arg2, l1); |
|
906 } |
|
907 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA); |
|
908 gen_set_label(l1); |
|
909 } |
|
910 } |
|
911 |
|
912 /* Common add function */ |
|
913 static always_inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1, TCGv arg2, |
|
914 int add_ca, int compute_ca, int compute_ov) |
|
915 { |
|
916 TCGv t0, t1; |
|
917 |
|
918 if ((!compute_ca && !compute_ov) || |
|
919 (!TCGV_EQUAL(ret,arg1) && !TCGV_EQUAL(ret, arg2))) { |
|
920 t0 = ret; |
|
921 } else { |
|
922 t0 = tcg_temp_local_new(); |
|
923 } |
|
924 |
|
925 if (add_ca) { |
|
926 t1 = tcg_temp_local_new(); |
|
927 tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA)); |
|
928 tcg_gen_shri_tl(t1, t1, XER_CA); |
|
929 } |
|
930 |
|
931 if (compute_ca && compute_ov) { |
|
932 /* Start with XER CA and OV disabled, the most likely case */ |
|
933 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~((1 << XER_CA) | (1 << XER_OV))); |
|
934 } else if (compute_ca) { |
|
935 /* Start with XER CA disabled, the most likely case */ |
|
936 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA)); |
|
937 } else if (compute_ov) { |
|
938 /* Start with XER OV disabled, the most likely case */ |
|
939 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV)); |
|
940 } |
|
941 |
|
942 tcg_gen_add_tl(t0, arg1, arg2); |
|
943 |
|
944 if (compute_ca) { |
|
945 gen_op_arith_compute_ca(ctx, t0, arg1, 0); |
|
946 } |
|
947 if (add_ca) { |
|
948 tcg_gen_add_tl(t0, t0, t1); |
|
949 gen_op_arith_compute_ca(ctx, t0, t1, 0); |
|
950 tcg_temp_free(t1); |
|
951 } |
|
952 if (compute_ov) { |
|
953 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0); |
|
954 } |
|
955 |
|
956 if (unlikely(Rc(ctx->opcode) != 0)) |
|
957 gen_set_Rc0(ctx, t0); |
|
958 |
|
959 if (!TCGV_EQUAL(t0, ret)) { |
|
960 tcg_gen_mov_tl(ret, t0); |
|
961 tcg_temp_free(t0); |
|
962 } |
|
963 } |
|
964 /* Add functions with two operands */ |
|
965 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ |
|
966 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER) \ |
|
967 { \ |
|
968 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ |
|
969 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ |
|
970 add_ca, compute_ca, compute_ov); \ |
|
971 } |
|
972 /* Add functions with one operand and one immediate */ |
|
973 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ |
|
974 add_ca, compute_ca, compute_ov) \ |
|
975 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER) \ |
|
976 { \ |
|
977 TCGv t0 = tcg_const_local_tl(const_val); \ |
|
978 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ |
|
979 cpu_gpr[rA(ctx->opcode)], t0, \ |
|
980 add_ca, compute_ca, compute_ov); \ |
|
981 tcg_temp_free(t0); \ |
|
982 } |
|
983 |
|
984 /* add add. addo addo. */ |
|
985 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) |
|
986 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) |
|
987 /* addc addc. addco addco. */ |
|
988 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) |
|
989 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) |
|
990 /* adde adde. addeo addeo. */ |
|
991 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) |
|
992 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) |
|
993 /* addme addme. addmeo addmeo. */ |
|
994 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) |
|
995 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) |
|
996 /* addze addze. addzeo addzeo.*/ |
|
997 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) |
|
998 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) |
|
999 /* addi */ |
|
1000 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) |
|
1001 { |
|
1002 target_long simm = SIMM(ctx->opcode); |
|
1003 |
|
1004 if (rA(ctx->opcode) == 0) { |
|
1005 /* li case */ |
|
1006 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm); |
|
1007 } else { |
|
1008 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], simm); |
|
1009 } |
|
1010 } |
|
1011 /* addic addic.*/ |
|
1012 static always_inline void gen_op_addic (DisasContext *ctx, TCGv ret, TCGv arg1, |
|
1013 int compute_Rc0) |
|
1014 { |
|
1015 target_long simm = SIMM(ctx->opcode); |
|
1016 |
|
1017 /* Start with XER CA and OV disabled, the most likely case */ |
|
1018 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA)); |
|
1019 |
|
1020 if (likely(simm != 0)) { |
|
1021 TCGv t0 = tcg_temp_local_new(); |
|
1022 tcg_gen_addi_tl(t0, arg1, simm); |
|
1023 gen_op_arith_compute_ca(ctx, t0, arg1, 0); |
|
1024 tcg_gen_mov_tl(ret, t0); |
|
1025 tcg_temp_free(t0); |
|
1026 } else { |
|
1027 tcg_gen_mov_tl(ret, arg1); |
|
1028 } |
|
1029 if (compute_Rc0) { |
|
1030 gen_set_Rc0(ctx, ret); |
|
1031 } |
|
1032 } |
|
1033 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) |
|
1034 { |
|
1035 gen_op_addic(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0); |
|
1036 } |
|
1037 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) |
|
1038 { |
|
1039 gen_op_addic(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1); |
|
1040 } |
|
1041 /* addis */ |
|
1042 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) |
|
1043 { |
|
1044 target_long simm = SIMM(ctx->opcode); |
|
1045 |
|
1046 if (rA(ctx->opcode) == 0) { |
|
1047 /* lis case */ |
|
1048 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16); |
|
1049 } else { |
|
1050 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], simm << 16); |
|
1051 } |
|
1052 } |
|
1053 |
|
1054 static always_inline void gen_op_arith_divw (DisasContext *ctx, TCGv ret, TCGv arg1, TCGv arg2, |
|
1055 int sign, int compute_ov) |
|
1056 { |
|
1057 int l1 = gen_new_label(); |
|
1058 int l2 = gen_new_label(); |
|
1059 TCGv_i32 t0 = tcg_temp_local_new_i32(); |
|
1060 TCGv_i32 t1 = tcg_temp_local_new_i32(); |
|
1061 |
|
1062 tcg_gen_trunc_tl_i32(t0, arg1); |
|
1063 tcg_gen_trunc_tl_i32(t1, arg2); |
|
1064 tcg_gen_brcondi_i32(TCG_COND_EQ, t1, 0, l1); |
|
1065 if (sign) { |
|
1066 int l3 = gen_new_label(); |
|
1067 tcg_gen_brcondi_i32(TCG_COND_NE, t1, -1, l3); |
|
1068 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, INT32_MIN, l1); |
|
1069 gen_set_label(l3); |
|
1070 tcg_gen_div_i32(t0, t0, t1); |
|
1071 } else { |
|
1072 tcg_gen_divu_i32(t0, t0, t1); |
|
1073 } |
|
1074 if (compute_ov) { |
|
1075 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV)); |
|
1076 } |
|
1077 tcg_gen_br(l2); |
|
1078 gen_set_label(l1); |
|
1079 if (sign) { |
|
1080 tcg_gen_sari_i32(t0, t0, 31); |
|
1081 } else { |
|
1082 tcg_gen_movi_i32(t0, 0); |
|
1083 } |
|
1084 if (compute_ov) { |
|
1085 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO)); |
|
1086 } |
|
1087 gen_set_label(l2); |
|
1088 tcg_gen_extu_i32_tl(ret, t0); |
|
1089 tcg_temp_free_i32(t0); |
|
1090 tcg_temp_free_i32(t1); |
|
1091 if (unlikely(Rc(ctx->opcode) != 0)) |
|
1092 gen_set_Rc0(ctx, ret); |
|
1093 } |
|
1094 /* Div functions */ |
|
1095 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ |
|
1096 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER) \ |
|
1097 { \ |
|
1098 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \ |
|
1099 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ |
|
1100 sign, compute_ov); \ |
|
1101 } |
|
1102 /* divwu divwu. divwuo divwuo. */ |
|
1103 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0); |
|
1104 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1); |
|
1105 /* divw divw. divwo divwo. */ |
|
1106 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0); |
|
1107 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1); |
|
1108 #if defined(TARGET_PPC64) |
|
1109 static always_inline void gen_op_arith_divd (DisasContext *ctx, TCGv ret, TCGv arg1, TCGv arg2, |
|
1110 int sign, int compute_ov) |
|
1111 { |
|
1112 int l1 = gen_new_label(); |
|
1113 int l2 = gen_new_label(); |
|
1114 |
|
1115 tcg_gen_brcondi_i64(TCG_COND_EQ, arg2, 0, l1); |
|
1116 if (sign) { |
|
1117 int l3 = gen_new_label(); |
|
1118 tcg_gen_brcondi_i64(TCG_COND_NE, arg2, -1, l3); |
|
1119 tcg_gen_brcondi_i64(TCG_COND_EQ, arg1, INT64_MIN, l1); |
|
1120 gen_set_label(l3); |
|
1121 tcg_gen_div_i64(ret, arg1, arg2); |
|
1122 } else { |
|
1123 tcg_gen_divu_i64(ret, arg1, arg2); |
|
1124 } |
|
1125 if (compute_ov) { |
|
1126 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV)); |
|
1127 } |
|
1128 tcg_gen_br(l2); |
|
1129 gen_set_label(l1); |
|
1130 if (sign) { |
|
1131 tcg_gen_sari_i64(ret, arg1, 63); |
|
1132 } else { |
|
1133 tcg_gen_movi_i64(ret, 0); |
|
1134 } |
|
1135 if (compute_ov) { |
|
1136 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO)); |
|
1137 } |
|
1138 gen_set_label(l2); |
|
1139 if (unlikely(Rc(ctx->opcode) != 0)) |
|
1140 gen_set_Rc0(ctx, ret); |
|
1141 } |
|
1142 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ |
|
1143 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) \ |
|
1144 { \ |
|
1145 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \ |
|
1146 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ |
|
1147 sign, compute_ov); \ |
|
1148 } |
|
1149 /* divwu divwu. divwuo divwuo. */ |
|
1150 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0); |
|
1151 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1); |
|
1152 /* divw divw. divwo divwo. */ |
|
1153 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0); |
|
1154 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1); |
|
1155 #endif |
|
1156 |
|
1157 /* mulhw mulhw. */ |
|
1158 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER) |
|
1159 { |
|
1160 TCGv_i64 t0, t1; |
|
1161 |
|
1162 t0 = tcg_temp_new_i64(); |
|
1163 t1 = tcg_temp_new_i64(); |
|
1164 #if defined(TARGET_PPC64) |
|
1165 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]); |
|
1166 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]); |
|
1167 tcg_gen_mul_i64(t0, t0, t1); |
|
1168 tcg_gen_shri_i64(cpu_gpr[rD(ctx->opcode)], t0, 32); |
|
1169 #else |
|
1170 tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); |
|
1171 tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); |
|
1172 tcg_gen_mul_i64(t0, t0, t1); |
|
1173 tcg_gen_shri_i64(t0, t0, 32); |
|
1174 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0); |
|
1175 #endif |
|
1176 tcg_temp_free_i64(t0); |
|
1177 tcg_temp_free_i64(t1); |
|
1178 if (unlikely(Rc(ctx->opcode) != 0)) |
|
1179 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); |
|
1180 } |
|
1181 /* mulhwu mulhwu. */ |
|
1182 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER) |
|
1183 { |
|
1184 TCGv_i64 t0, t1; |
|
1185 |
|
1186 t0 = tcg_temp_new_i64(); |
|
1187 t1 = tcg_temp_new_i64(); |
|
1188 #if defined(TARGET_PPC64) |
|
1189 tcg_gen_ext32u_i64(t0, cpu_gpr[rA(ctx->opcode)]); |
|
1190 tcg_gen_ext32u_i64(t1, cpu_gpr[rB(ctx->opcode)]); |
|
1191 tcg_gen_mul_i64(t0, t0, t1); |
|
1192 tcg_gen_shri_i64(cpu_gpr[rD(ctx->opcode)], t0, 32); |
|
1193 #else |
|
1194 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); |
|
1195 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); |
|
1196 tcg_gen_mul_i64(t0, t0, t1); |
|
1197 tcg_gen_shri_i64(t0, t0, 32); |
|
1198 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0); |
|
1199 #endif |
|
1200 tcg_temp_free_i64(t0); |
|
1201 tcg_temp_free_i64(t1); |
|
1202 if (unlikely(Rc(ctx->opcode) != 0)) |
|
1203 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); |
|
1204 } |
|
1205 /* mullw mullw. */ |
|
1206 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER) |
|
1207 { |
|
1208 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], |
|
1209 cpu_gpr[rB(ctx->opcode)]); |
|
1210 tcg_gen_ext32s_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)]); |
|
1211 if (unlikely(Rc(ctx->opcode) != 0)) |
|
1212 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); |
|
1213 } |
|
1214 /* mullwo mullwo. */ |
|
1215 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER) |
|
1216 { |
|
1217 int l1; |
|
1218 TCGv_i64 t0, t1; |
|
1219 |
|
1220 t0 = tcg_temp_new_i64(); |
|
1221 t1 = tcg_temp_new_i64(); |
|
1222 l1 = gen_new_label(); |
|
1223 /* Start with XER OV disabled, the most likely case */ |
|
1224 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV)); |
|
1225 #if defined(TARGET_PPC64) |
|
1226 tcg_gen_ext32s_i64(t0, cpu_gpr[rA(ctx->opcode)]); |
|
1227 tcg_gen_ext32s_i64(t1, cpu_gpr[rB(ctx->opcode)]); |
|
1228 #else |
|
1229 tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); |
|
1230 tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); |
|
1231 #endif |
|
1232 tcg_gen_mul_i64(t0, t0, t1); |
|
1233 #if defined(TARGET_PPC64) |
|
1234 tcg_gen_ext32s_i64(cpu_gpr[rD(ctx->opcode)], t0); |
|
1235 tcg_gen_brcond_i64(TCG_COND_EQ, t0, cpu_gpr[rD(ctx->opcode)], l1); |
|
1236 #else |
|
1237 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0); |
|
1238 tcg_gen_ext32s_i64(t1, t0); |
|
1239 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1); |
|
1240 #endif |
|
1241 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO)); |
|
1242 gen_set_label(l1); |
|
1243 tcg_temp_free_i64(t0); |
|
1244 tcg_temp_free_i64(t1); |
|
1245 if (unlikely(Rc(ctx->opcode) != 0)) |
|
1246 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); |
|
1247 } |
|
1248 /* mulli */ |
|
1249 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) |
|
1250 { |
|
1251 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], |
|
1252 SIMM(ctx->opcode)); |
|
1253 } |
|
1254 #if defined(TARGET_PPC64) |
|
1255 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \ |
|
1256 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) \ |
|
1257 { \ |
|
1258 gen_helper_##name (cpu_gpr[rD(ctx->opcode)], \ |
|
1259 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \ |
|
1260 if (unlikely(Rc(ctx->opcode) != 0)) \ |
|
1261 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \ |
|
1262 } |
|
1263 /* mulhd mulhd. */ |
|
1264 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00); |
|
1265 /* mulhdu mulhdu. */ |
|
1266 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02); |
|
1267 /* mulld mulld. */ |
|
1268 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B) |
|
1269 { |
|
1270 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], |
|
1271 cpu_gpr[rB(ctx->opcode)]); |
|
1272 if (unlikely(Rc(ctx->opcode) != 0)) |
|
1273 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); |
|
1274 } |
|
1275 /* mulldo mulldo. */ |
|
1276 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17); |
|
1277 #endif |
|
1278 |
|
1279 /* neg neg. nego nego. */ |
|
1280 static always_inline void gen_op_arith_neg (DisasContext *ctx, TCGv ret, TCGv arg1, int ov_check) |
|
1281 { |
|
1282 int l1 = gen_new_label(); |
|
1283 int l2 = gen_new_label(); |
|
1284 TCGv t0 = tcg_temp_local_new(); |
|
1285 #if defined(TARGET_PPC64) |
|
1286 if (ctx->sf_mode) { |
|
1287 tcg_gen_mov_tl(t0, arg1); |
|
1288 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, INT64_MIN, l1); |
|
1289 } else |
|
1290 #endif |
|
1291 { |
|
1292 tcg_gen_ext32s_tl(t0, arg1); |
|
1293 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, INT32_MIN, l1); |
|
1294 } |
|
1295 tcg_gen_neg_tl(ret, arg1); |
|
1296 if (ov_check) { |
|
1297 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV)); |
|
1298 } |
|
1299 tcg_gen_br(l2); |
|
1300 gen_set_label(l1); |
|
1301 tcg_gen_mov_tl(ret, t0); |
|
1302 if (ov_check) { |
|
1303 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO)); |
|
1304 } |
|
1305 gen_set_label(l2); |
|
1306 tcg_temp_free(t0); |
|
1307 if (unlikely(Rc(ctx->opcode) != 0)) |
|
1308 gen_set_Rc0(ctx, ret); |
|
1309 } |
|
1310 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER) |
|
1311 { |
|
1312 gen_op_arith_neg(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0); |
|
1313 } |
|
1314 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER) |
|
1315 { |
|
1316 gen_op_arith_neg(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1); |
|
1317 } |
|
1318 |
|
1319 /* Common subf function */ |
|
1320 static always_inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1, TCGv arg2, |
|
1321 int add_ca, int compute_ca, int compute_ov) |
|
1322 { |
|
1323 TCGv t0, t1; |
|
1324 |
|
1325 if ((!compute_ca && !compute_ov) || |
|
1326 (!TCGV_EQUAL(ret, arg1) && !TCGV_EQUAL(ret, arg2))) { |
|
1327 t0 = ret; |
|
1328 } else { |
|
1329 t0 = tcg_temp_local_new(); |
|
1330 } |
|
1331 |
|
1332 if (add_ca) { |
|
1333 t1 = tcg_temp_local_new(); |
|
1334 tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA)); |
|
1335 tcg_gen_shri_tl(t1, t1, XER_CA); |
|
1336 } |
|
1337 |
|
1338 if (compute_ca && compute_ov) { |
|
1339 /* Start with XER CA and OV disabled, the most likely case */ |
|
1340 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~((1 << XER_CA) | (1 << XER_OV))); |
|
1341 } else if (compute_ca) { |
|
1342 /* Start with XER CA disabled, the most likely case */ |
|
1343 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA)); |
|
1344 } else if (compute_ov) { |
|
1345 /* Start with XER OV disabled, the most likely case */ |
|
1346 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV)); |
|
1347 } |
|
1348 |
|
1349 if (add_ca) { |
|
1350 tcg_gen_not_tl(t0, arg1); |
|
1351 tcg_gen_add_tl(t0, t0, arg2); |
|
1352 gen_op_arith_compute_ca(ctx, t0, arg2, 0); |
|
1353 tcg_gen_add_tl(t0, t0, t1); |
|
1354 gen_op_arith_compute_ca(ctx, t0, t1, 0); |
|
1355 tcg_temp_free(t1); |
|
1356 } else { |
|
1357 tcg_gen_sub_tl(t0, arg2, arg1); |
|
1358 if (compute_ca) { |
|
1359 gen_op_arith_compute_ca(ctx, t0, arg2, 1); |
|
1360 } |
|
1361 } |
|
1362 if (compute_ov) { |
|
1363 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1); |
|
1364 } |
|
1365 |
|
1366 if (unlikely(Rc(ctx->opcode) != 0)) |
|
1367 gen_set_Rc0(ctx, t0); |
|
1368 |
|
1369 if (!TCGV_EQUAL(t0, ret)) { |
|
1370 tcg_gen_mov_tl(ret, t0); |
|
1371 tcg_temp_free(t0); |
|
1372 } |
|
1373 } |
|
1374 /* Sub functions with Two operands functions */ |
|
1375 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ |
|
1376 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER) \ |
|
1377 { \ |
|
1378 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ |
|
1379 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ |
|
1380 add_ca, compute_ca, compute_ov); \ |
|
1381 } |
|
1382 /* Sub functions with one operand and one immediate */ |
|
1383 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ |
|
1384 add_ca, compute_ca, compute_ov) \ |
|
1385 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER) \ |
|
1386 { \ |
|
1387 TCGv t0 = tcg_const_local_tl(const_val); \ |
|
1388 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ |
|
1389 cpu_gpr[rA(ctx->opcode)], t0, \ |
|
1390 add_ca, compute_ca, compute_ov); \ |
|
1391 tcg_temp_free(t0); \ |
|
1392 } |
|
1393 /* subf subf. subfo subfo. */ |
|
1394 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) |
|
1395 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) |
|
1396 /* subfc subfc. subfco subfco. */ |
|
1397 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) |
|
1398 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) |
|
1399 /* subfe subfe. subfeo subfo. */ |
|
1400 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) |
|
1401 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) |
|
1402 /* subfme subfme. subfmeo subfmeo. */ |
|
1403 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) |
|
1404 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) |
|
1405 /* subfze subfze. subfzeo subfzeo.*/ |
|
1406 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) |
|
1407 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) |
|
1408 /* subfic */ |
|
1409 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) |
|
1410 { |
|
1411 /* Start with XER CA and OV disabled, the most likely case */ |
|
1412 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA)); |
|
1413 TCGv t0 = tcg_temp_local_new(); |
|
1414 TCGv t1 = tcg_const_local_tl(SIMM(ctx->opcode)); |
|
1415 tcg_gen_sub_tl(t0, t1, cpu_gpr[rA(ctx->opcode)]); |
|
1416 gen_op_arith_compute_ca(ctx, t0, t1, 1); |
|
1417 tcg_temp_free(t1); |
|
1418 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); |
|
1419 tcg_temp_free(t0); |
|
1420 } |
|
1421 |
|
1422 /*** Integer logical ***/ |
|
1423 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ |
|
1424 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type) \ |
|
1425 { \ |
|
1426 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \ |
|
1427 cpu_gpr[rB(ctx->opcode)]); \ |
|
1428 if (unlikely(Rc(ctx->opcode) != 0)) \ |
|
1429 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ |
|
1430 } |
|
1431 |
|
1432 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ |
|
1433 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) \ |
|
1434 { \ |
|
1435 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \ |
|
1436 if (unlikely(Rc(ctx->opcode) != 0)) \ |
|
1437 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ |
|
1438 } |
|
1439 |
|
1440 /* and & and. */ |
|
1441 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER); |
|
1442 /* andc & andc. */ |
|
1443 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER); |
|
1444 /* andi. */ |
|
1445 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) |
|
1446 { |
|
1447 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode)); |
|
1448 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
1449 } |
|
1450 /* andis. */ |
|
1451 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) |
|
1452 { |
|
1453 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16); |
|
1454 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
1455 } |
|
1456 /* cntlzw */ |
|
1457 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER) |
|
1458 { |
|
1459 gen_helper_cntlzw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); |
|
1460 if (unlikely(Rc(ctx->opcode) != 0)) |
|
1461 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
1462 } |
|
1463 /* eqv & eqv. */ |
|
1464 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER); |
|
1465 /* extsb & extsb. */ |
|
1466 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER); |
|
1467 /* extsh & extsh. */ |
|
1468 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER); |
|
1469 /* nand & nand. */ |
|
1470 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER); |
|
1471 /* nor & nor. */ |
|
1472 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER); |
|
1473 /* or & or. */ |
|
1474 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER) |
|
1475 { |
|
1476 int rs, ra, rb; |
|
1477 |
|
1478 rs = rS(ctx->opcode); |
|
1479 ra = rA(ctx->opcode); |
|
1480 rb = rB(ctx->opcode); |
|
1481 /* Optimisation for mr. ri case */ |
|
1482 if (rs != ra || rs != rb) { |
|
1483 if (rs != rb) |
|
1484 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]); |
|
1485 else |
|
1486 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]); |
|
1487 if (unlikely(Rc(ctx->opcode) != 0)) |
|
1488 gen_set_Rc0(ctx, cpu_gpr[ra]); |
|
1489 } else if (unlikely(Rc(ctx->opcode) != 0)) { |
|
1490 gen_set_Rc0(ctx, cpu_gpr[rs]); |
|
1491 #if defined(TARGET_PPC64) |
|
1492 } else { |
|
1493 int prio = 0; |
|
1494 |
|
1495 switch (rs) { |
|
1496 case 1: |
|
1497 /* Set process priority to low */ |
|
1498 prio = 2; |
|
1499 break; |
|
1500 case 6: |
|
1501 /* Set process priority to medium-low */ |
|
1502 prio = 3; |
|
1503 break; |
|
1504 case 2: |
|
1505 /* Set process priority to normal */ |
|
1506 prio = 4; |
|
1507 break; |
|
1508 #if !defined(CONFIG_USER_ONLY) |
|
1509 case 31: |
|
1510 if (ctx->mem_idx > 0) { |
|
1511 /* Set process priority to very low */ |
|
1512 prio = 1; |
|
1513 } |
|
1514 break; |
|
1515 case 5: |
|
1516 if (ctx->mem_idx > 0) { |
|
1517 /* Set process priority to medium-hight */ |
|
1518 prio = 5; |
|
1519 } |
|
1520 break; |
|
1521 case 3: |
|
1522 if (ctx->mem_idx > 0) { |
|
1523 /* Set process priority to high */ |
|
1524 prio = 6; |
|
1525 } |
|
1526 break; |
|
1527 case 7: |
|
1528 if (ctx->mem_idx > 1) { |
|
1529 /* Set process priority to very high */ |
|
1530 prio = 7; |
|
1531 } |
|
1532 break; |
|
1533 #endif |
|
1534 default: |
|
1535 /* nop */ |
|
1536 break; |
|
1537 } |
|
1538 if (prio) { |
|
1539 TCGv t0 = tcg_temp_new(); |
|
1540 gen_load_spr(t0, SPR_PPR); |
|
1541 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL); |
|
1542 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50); |
|
1543 gen_store_spr(SPR_PPR, t0); |
|
1544 tcg_temp_free(t0); |
|
1545 } |
|
1546 #endif |
|
1547 } |
|
1548 } |
|
1549 /* orc & orc. */ |
|
1550 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER); |
|
1551 /* xor & xor. */ |
|
1552 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER) |
|
1553 { |
|
1554 /* Optimisation for "set to zero" case */ |
|
1555 if (rS(ctx->opcode) != rB(ctx->opcode)) |
|
1556 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); |
|
1557 else |
|
1558 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); |
|
1559 if (unlikely(Rc(ctx->opcode) != 0)) |
|
1560 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
1561 } |
|
1562 /* ori */ |
|
1563 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) |
|
1564 { |
|
1565 target_ulong uimm = UIMM(ctx->opcode); |
|
1566 |
|
1567 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { |
|
1568 /* NOP */ |
|
1569 /* XXX: should handle special NOPs for POWER series */ |
|
1570 return; |
|
1571 } |
|
1572 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); |
|
1573 } |
|
1574 /* oris */ |
|
1575 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) |
|
1576 { |
|
1577 target_ulong uimm = UIMM(ctx->opcode); |
|
1578 |
|
1579 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { |
|
1580 /* NOP */ |
|
1581 return; |
|
1582 } |
|
1583 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); |
|
1584 } |
|
1585 /* xori */ |
|
1586 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) |
|
1587 { |
|
1588 target_ulong uimm = UIMM(ctx->opcode); |
|
1589 |
|
1590 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { |
|
1591 /* NOP */ |
|
1592 return; |
|
1593 } |
|
1594 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); |
|
1595 } |
|
1596 /* xoris */ |
|
1597 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) |
|
1598 { |
|
1599 target_ulong uimm = UIMM(ctx->opcode); |
|
1600 |
|
1601 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { |
|
1602 /* NOP */ |
|
1603 return; |
|
1604 } |
|
1605 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16); |
|
1606 } |
|
1607 /* popcntb : PowerPC 2.03 specification */ |
|
1608 GEN_HANDLER(popcntb, 0x1F, 0x03, 0x03, 0x0000F801, PPC_POPCNTB) |
|
1609 { |
|
1610 #if defined(TARGET_PPC64) |
|
1611 if (ctx->sf_mode) |
|
1612 gen_helper_popcntb_64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); |
|
1613 else |
|
1614 #endif |
|
1615 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); |
|
1616 } |
|
1617 |
|
1618 #if defined(TARGET_PPC64) |
|
1619 /* extsw & extsw. */ |
|
1620 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B); |
|
1621 /* cntlzd */ |
|
1622 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B) |
|
1623 { |
|
1624 gen_helper_cntlzd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); |
|
1625 if (unlikely(Rc(ctx->opcode) != 0)) |
|
1626 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
1627 } |
|
1628 #endif |
|
1629 |
|
1630 /*** Integer rotate ***/ |
|
1631 /* rlwimi & rlwimi. */ |
|
1632 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) |
|
1633 { |
|
1634 uint32_t mb, me, sh; |
|
1635 |
|
1636 mb = MB(ctx->opcode); |
|
1637 me = ME(ctx->opcode); |
|
1638 sh = SH(ctx->opcode); |
|
1639 if (likely(sh == 0 && mb == 0 && me == 31)) { |
|
1640 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); |
|
1641 } else { |
|
1642 target_ulong mask; |
|
1643 TCGv t1; |
|
1644 TCGv t0 = tcg_temp_new(); |
|
1645 #if defined(TARGET_PPC64) |
|
1646 TCGv_i32 t2 = tcg_temp_new_i32(); |
|
1647 tcg_gen_trunc_i64_i32(t2, cpu_gpr[rS(ctx->opcode)]); |
|
1648 tcg_gen_rotli_i32(t2, t2, sh); |
|
1649 tcg_gen_extu_i32_i64(t0, t2); |
|
1650 tcg_temp_free_i32(t2); |
|
1651 #else |
|
1652 tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh); |
|
1653 #endif |
|
1654 #if defined(TARGET_PPC64) |
|
1655 mb += 32; |
|
1656 me += 32; |
|
1657 #endif |
|
1658 mask = MASK(mb, me); |
|
1659 t1 = tcg_temp_new(); |
|
1660 tcg_gen_andi_tl(t0, t0, mask); |
|
1661 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask); |
|
1662 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); |
|
1663 tcg_temp_free(t0); |
|
1664 tcg_temp_free(t1); |
|
1665 } |
|
1666 if (unlikely(Rc(ctx->opcode) != 0)) |
|
1667 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
1668 } |
|
1669 /* rlwinm & rlwinm. */ |
|
1670 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) |
|
1671 { |
|
1672 uint32_t mb, me, sh; |
|
1673 |
|
1674 sh = SH(ctx->opcode); |
|
1675 mb = MB(ctx->opcode); |
|
1676 me = ME(ctx->opcode); |
|
1677 |
|
1678 if (likely(mb == 0 && me == (31 - sh))) { |
|
1679 if (likely(sh == 0)) { |
|
1680 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); |
|
1681 } else { |
|
1682 TCGv t0 = tcg_temp_new(); |
|
1683 tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]); |
|
1684 tcg_gen_shli_tl(t0, t0, sh); |
|
1685 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0); |
|
1686 tcg_temp_free(t0); |
|
1687 } |
|
1688 } else if (likely(sh != 0 && me == 31 && sh == (32 - mb))) { |
|
1689 TCGv t0 = tcg_temp_new(); |
|
1690 tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]); |
|
1691 tcg_gen_shri_tl(t0, t0, mb); |
|
1692 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0); |
|
1693 tcg_temp_free(t0); |
|
1694 } else { |
|
1695 TCGv t0 = tcg_temp_new(); |
|
1696 #if defined(TARGET_PPC64) |
|
1697 TCGv_i32 t1 = tcg_temp_new_i32(); |
|
1698 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]); |
|
1699 tcg_gen_rotli_i32(t1, t1, sh); |
|
1700 tcg_gen_extu_i32_i64(t0, t1); |
|
1701 tcg_temp_free_i32(t1); |
|
1702 #else |
|
1703 tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh); |
|
1704 #endif |
|
1705 #if defined(TARGET_PPC64) |
|
1706 mb += 32; |
|
1707 me += 32; |
|
1708 #endif |
|
1709 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me)); |
|
1710 tcg_temp_free(t0); |
|
1711 } |
|
1712 if (unlikely(Rc(ctx->opcode) != 0)) |
|
1713 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
1714 } |
|
1715 /* rlwnm & rlwnm. */ |
|
1716 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) |
|
1717 { |
|
1718 uint32_t mb, me; |
|
1719 TCGv t0; |
|
1720 #if defined(TARGET_PPC64) |
|
1721 TCGv_i32 t1, t2; |
|
1722 #endif |
|
1723 |
|
1724 mb = MB(ctx->opcode); |
|
1725 me = ME(ctx->opcode); |
|
1726 t0 = tcg_temp_new(); |
|
1727 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1f); |
|
1728 #if defined(TARGET_PPC64) |
|
1729 t1 = tcg_temp_new_i32(); |
|
1730 t2 = tcg_temp_new_i32(); |
|
1731 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]); |
|
1732 tcg_gen_trunc_i64_i32(t2, t0); |
|
1733 tcg_gen_rotl_i32(t1, t1, t2); |
|
1734 tcg_gen_extu_i32_i64(t0, t1); |
|
1735 tcg_temp_free_i32(t1); |
|
1736 tcg_temp_free_i32(t2); |
|
1737 #else |
|
1738 tcg_gen_rotl_i32(t0, cpu_gpr[rS(ctx->opcode)], t0); |
|
1739 #endif |
|
1740 if (unlikely(mb != 0 || me != 31)) { |
|
1741 #if defined(TARGET_PPC64) |
|
1742 mb += 32; |
|
1743 me += 32; |
|
1744 #endif |
|
1745 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me)); |
|
1746 } else { |
|
1747 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); |
|
1748 } |
|
1749 tcg_temp_free(t0); |
|
1750 if (unlikely(Rc(ctx->opcode) != 0)) |
|
1751 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
1752 } |
|
1753 |
|
1754 #if defined(TARGET_PPC64) |
|
1755 #define GEN_PPC64_R2(name, opc1, opc2) \ |
|
1756 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B) \ |
|
1757 { \ |
|
1758 gen_##name(ctx, 0); \ |
|
1759 } \ |
|
1760 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ |
|
1761 PPC_64B) \ |
|
1762 { \ |
|
1763 gen_##name(ctx, 1); \ |
|
1764 } |
|
1765 #define GEN_PPC64_R4(name, opc1, opc2) \ |
|
1766 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B) \ |
|
1767 { \ |
|
1768 gen_##name(ctx, 0, 0); \ |
|
1769 } \ |
|
1770 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \ |
|
1771 PPC_64B) \ |
|
1772 { \ |
|
1773 gen_##name(ctx, 0, 1); \ |
|
1774 } \ |
|
1775 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ |
|
1776 PPC_64B) \ |
|
1777 { \ |
|
1778 gen_##name(ctx, 1, 0); \ |
|
1779 } \ |
|
1780 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \ |
|
1781 PPC_64B) \ |
|
1782 { \ |
|
1783 gen_##name(ctx, 1, 1); \ |
|
1784 } |
|
1785 |
|
1786 static always_inline void gen_rldinm (DisasContext *ctx, uint32_t mb, |
|
1787 uint32_t me, uint32_t sh) |
|
1788 { |
|
1789 if (likely(sh != 0 && mb == 0 && me == (63 - sh))) { |
|
1790 tcg_gen_shli_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh); |
|
1791 } else if (likely(sh != 0 && me == 63 && sh == (64 - mb))) { |
|
1792 tcg_gen_shri_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], mb); |
|
1793 } else { |
|
1794 TCGv t0 = tcg_temp_new(); |
|
1795 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); |
|
1796 if (likely(mb == 0 && me == 63)) { |
|
1797 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); |
|
1798 } else { |
|
1799 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me)); |
|
1800 } |
|
1801 tcg_temp_free(t0); |
|
1802 } |
|
1803 if (unlikely(Rc(ctx->opcode) != 0)) |
|
1804 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
1805 } |
|
1806 /* rldicl - rldicl. */ |
|
1807 static always_inline void gen_rldicl (DisasContext *ctx, int mbn, int shn) |
|
1808 { |
|
1809 uint32_t sh, mb; |
|
1810 |
|
1811 sh = SH(ctx->opcode) | (shn << 5); |
|
1812 mb = MB(ctx->opcode) | (mbn << 5); |
|
1813 gen_rldinm(ctx, mb, 63, sh); |
|
1814 } |
|
1815 GEN_PPC64_R4(rldicl, 0x1E, 0x00); |
|
1816 /* rldicr - rldicr. */ |
|
1817 static always_inline void gen_rldicr (DisasContext *ctx, int men, int shn) |
|
1818 { |
|
1819 uint32_t sh, me; |
|
1820 |
|
1821 sh = SH(ctx->opcode) | (shn << 5); |
|
1822 me = MB(ctx->opcode) | (men << 5); |
|
1823 gen_rldinm(ctx, 0, me, sh); |
|
1824 } |
|
1825 GEN_PPC64_R4(rldicr, 0x1E, 0x02); |
|
1826 /* rldic - rldic. */ |
|
1827 static always_inline void gen_rldic (DisasContext *ctx, int mbn, int shn) |
|
1828 { |
|
1829 uint32_t sh, mb; |
|
1830 |
|
1831 sh = SH(ctx->opcode) | (shn << 5); |
|
1832 mb = MB(ctx->opcode) | (mbn << 5); |
|
1833 gen_rldinm(ctx, mb, 63 - sh, sh); |
|
1834 } |
|
1835 GEN_PPC64_R4(rldic, 0x1E, 0x04); |
|
1836 |
|
1837 static always_inline void gen_rldnm (DisasContext *ctx, uint32_t mb, |
|
1838 uint32_t me) |
|
1839 { |
|
1840 TCGv t0; |
|
1841 |
|
1842 mb = MB(ctx->opcode); |
|
1843 me = ME(ctx->opcode); |
|
1844 t0 = tcg_temp_new(); |
|
1845 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f); |
|
1846 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); |
|
1847 if (unlikely(mb != 0 || me != 63)) { |
|
1848 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me)); |
|
1849 } else { |
|
1850 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); |
|
1851 } |
|
1852 tcg_temp_free(t0); |
|
1853 if (unlikely(Rc(ctx->opcode) != 0)) |
|
1854 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
1855 } |
|
1856 |
|
1857 /* rldcl - rldcl. */ |
|
1858 static always_inline void gen_rldcl (DisasContext *ctx, int mbn) |
|
1859 { |
|
1860 uint32_t mb; |
|
1861 |
|
1862 mb = MB(ctx->opcode) | (mbn << 5); |
|
1863 gen_rldnm(ctx, mb, 63); |
|
1864 } |
|
1865 GEN_PPC64_R2(rldcl, 0x1E, 0x08); |
|
1866 /* rldcr - rldcr. */ |
|
1867 static always_inline void gen_rldcr (DisasContext *ctx, int men) |
|
1868 { |
|
1869 uint32_t me; |
|
1870 |
|
1871 me = MB(ctx->opcode) | (men << 5); |
|
1872 gen_rldnm(ctx, 0, me); |
|
1873 } |
|
1874 GEN_PPC64_R2(rldcr, 0x1E, 0x09); |
|
1875 /* rldimi - rldimi. */ |
|
1876 static always_inline void gen_rldimi (DisasContext *ctx, int mbn, int shn) |
|
1877 { |
|
1878 uint32_t sh, mb, me; |
|
1879 |
|
1880 sh = SH(ctx->opcode) | (shn << 5); |
|
1881 mb = MB(ctx->opcode) | (mbn << 5); |
|
1882 me = 63 - sh; |
|
1883 if (unlikely(sh == 0 && mb == 0)) { |
|
1884 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); |
|
1885 } else { |
|
1886 TCGv t0, t1; |
|
1887 target_ulong mask; |
|
1888 |
|
1889 t0 = tcg_temp_new(); |
|
1890 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); |
|
1891 t1 = tcg_temp_new(); |
|
1892 mask = MASK(mb, me); |
|
1893 tcg_gen_andi_tl(t0, t0, mask); |
|
1894 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask); |
|
1895 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); |
|
1896 tcg_temp_free(t0); |
|
1897 tcg_temp_free(t1); |
|
1898 } |
|
1899 if (unlikely(Rc(ctx->opcode) != 0)) |
|
1900 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
1901 } |
|
1902 GEN_PPC64_R4(rldimi, 0x1E, 0x06); |
|
1903 #endif |
|
1904 |
|
1905 /*** Integer shift ***/ |
|
1906 /* slw & slw. */ |
|
1907 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER) |
|
1908 { |
|
1909 TCGv t0; |
|
1910 int l1, l2; |
|
1911 l1 = gen_new_label(); |
|
1912 l2 = gen_new_label(); |
|
1913 |
|
1914 t0 = tcg_temp_local_new(); |
|
1915 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f); |
|
1916 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x20, l1); |
|
1917 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); |
|
1918 tcg_gen_br(l2); |
|
1919 gen_set_label(l1); |
|
1920 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t0); |
|
1921 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); |
|
1922 gen_set_label(l2); |
|
1923 tcg_temp_free(t0); |
|
1924 if (unlikely(Rc(ctx->opcode) != 0)) |
|
1925 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
1926 } |
|
1927 /* sraw & sraw. */ |
|
1928 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER) |
|
1929 { |
|
1930 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], |
|
1931 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); |
|
1932 if (unlikely(Rc(ctx->opcode) != 0)) |
|
1933 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
1934 } |
|
1935 /* srawi & srawi. */ |
|
1936 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER) |
|
1937 { |
|
1938 int sh = SH(ctx->opcode); |
|
1939 if (sh != 0) { |
|
1940 int l1, l2; |
|
1941 TCGv t0; |
|
1942 l1 = gen_new_label(); |
|
1943 l2 = gen_new_label(); |
|
1944 t0 = tcg_temp_local_new(); |
|
1945 tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]); |
|
1946 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1); |
|
1947 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1); |
|
1948 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); |
|
1949 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA); |
|
1950 tcg_gen_br(l2); |
|
1951 gen_set_label(l1); |
|
1952 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA)); |
|
1953 gen_set_label(l2); |
|
1954 tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]); |
|
1955 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], t0, sh); |
|
1956 tcg_temp_free(t0); |
|
1957 } else { |
|
1958 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); |
|
1959 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA)); |
|
1960 } |
|
1961 if (unlikely(Rc(ctx->opcode) != 0)) |
|
1962 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
1963 } |
|
1964 /* srw & srw. */ |
|
1965 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER) |
|
1966 { |
|
1967 TCGv t0, t1; |
|
1968 int l1, l2; |
|
1969 l1 = gen_new_label(); |
|
1970 l2 = gen_new_label(); |
|
1971 |
|
1972 t0 = tcg_temp_local_new(); |
|
1973 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f); |
|
1974 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x20, l1); |
|
1975 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); |
|
1976 tcg_gen_br(l2); |
|
1977 gen_set_label(l1); |
|
1978 t1 = tcg_temp_new(); |
|
1979 tcg_gen_ext32u_tl(t1, cpu_gpr[rS(ctx->opcode)]); |
|
1980 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t1, t0); |
|
1981 tcg_temp_free(t1); |
|
1982 gen_set_label(l2); |
|
1983 tcg_temp_free(t0); |
|
1984 if (unlikely(Rc(ctx->opcode) != 0)) |
|
1985 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
1986 } |
|
1987 #if defined(TARGET_PPC64) |
|
1988 /* sld & sld. */ |
|
1989 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B) |
|
1990 { |
|
1991 TCGv t0; |
|
1992 int l1, l2; |
|
1993 l1 = gen_new_label(); |
|
1994 l2 = gen_new_label(); |
|
1995 |
|
1996 t0 = tcg_temp_local_new(); |
|
1997 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x7f); |
|
1998 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x40, l1); |
|
1999 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); |
|
2000 tcg_gen_br(l2); |
|
2001 gen_set_label(l1); |
|
2002 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t0); |
|
2003 gen_set_label(l2); |
|
2004 tcg_temp_free(t0); |
|
2005 if (unlikely(Rc(ctx->opcode) != 0)) |
|
2006 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
2007 } |
|
2008 /* srad & srad. */ |
|
2009 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B) |
|
2010 { |
|
2011 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], |
|
2012 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); |
|
2013 if (unlikely(Rc(ctx->opcode) != 0)) |
|
2014 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
2015 } |
|
2016 /* sradi & sradi. */ |
|
2017 static always_inline void gen_sradi (DisasContext *ctx, int n) |
|
2018 { |
|
2019 int sh = SH(ctx->opcode) + (n << 5); |
|
2020 if (sh != 0) { |
|
2021 int l1, l2; |
|
2022 TCGv t0; |
|
2023 l1 = gen_new_label(); |
|
2024 l2 = gen_new_label(); |
|
2025 t0 = tcg_temp_local_new(); |
|
2026 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1); |
|
2027 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1); |
|
2028 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); |
|
2029 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA); |
|
2030 tcg_gen_br(l2); |
|
2031 gen_set_label(l1); |
|
2032 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA)); |
|
2033 gen_set_label(l2); |
|
2034 tcg_temp_free(t0); |
|
2035 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh); |
|
2036 } else { |
|
2037 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); |
|
2038 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA)); |
|
2039 } |
|
2040 if (unlikely(Rc(ctx->opcode) != 0)) |
|
2041 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
2042 } |
|
2043 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B) |
|
2044 { |
|
2045 gen_sradi(ctx, 0); |
|
2046 } |
|
2047 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B) |
|
2048 { |
|
2049 gen_sradi(ctx, 1); |
|
2050 } |
|
2051 /* srd & srd. */ |
|
2052 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B) |
|
2053 { |
|
2054 TCGv t0; |
|
2055 int l1, l2; |
|
2056 l1 = gen_new_label(); |
|
2057 l2 = gen_new_label(); |
|
2058 |
|
2059 t0 = tcg_temp_local_new(); |
|
2060 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x7f); |
|
2061 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x40, l1); |
|
2062 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); |
|
2063 tcg_gen_br(l2); |
|
2064 gen_set_label(l1); |
|
2065 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t0); |
|
2066 gen_set_label(l2); |
|
2067 tcg_temp_free(t0); |
|
2068 if (unlikely(Rc(ctx->opcode) != 0)) |
|
2069 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
2070 } |
|
2071 #endif |
|
2072 |
|
2073 /*** Floating-Point arithmetic ***/ |
|
2074 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \ |
|
2075 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x00000000, type) \ |
|
2076 { \ |
|
2077 if (unlikely(!ctx->fpu_enabled)) { \ |
|
2078 gen_exception(ctx, POWERPC_EXCP_FPU); \ |
|
2079 return; \ |
|
2080 } \ |
|
2081 /* NIP cannot be restored if the memory exception comes from an helper */ \ |
|
2082 gen_update_nip(ctx, ctx->nip - 4); \ |
|
2083 gen_reset_fpstatus(); \ |
|
2084 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \ |
|
2085 cpu_fpr[rC(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \ |
|
2086 if (isfloat) { \ |
|
2087 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \ |
|
2088 } \ |
|
2089 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], set_fprf, \ |
|
2090 Rc(ctx->opcode) != 0); \ |
|
2091 } |
|
2092 |
|
2093 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \ |
|
2094 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \ |
|
2095 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type); |
|
2096 |
|
2097 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \ |
|
2098 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type) \ |
|
2099 { \ |
|
2100 if (unlikely(!ctx->fpu_enabled)) { \ |
|
2101 gen_exception(ctx, POWERPC_EXCP_FPU); \ |
|
2102 return; \ |
|
2103 } \ |
|
2104 /* NIP cannot be restored if the memory exception comes from an helper */ \ |
|
2105 gen_update_nip(ctx, ctx->nip - 4); \ |
|
2106 gen_reset_fpstatus(); \ |
|
2107 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \ |
|
2108 cpu_fpr[rB(ctx->opcode)]); \ |
|
2109 if (isfloat) { \ |
|
2110 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \ |
|
2111 } \ |
|
2112 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \ |
|
2113 set_fprf, Rc(ctx->opcode) != 0); \ |
|
2114 } |
|
2115 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \ |
|
2116 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \ |
|
2117 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type); |
|
2118 |
|
2119 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \ |
|
2120 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type) \ |
|
2121 { \ |
|
2122 if (unlikely(!ctx->fpu_enabled)) { \ |
|
2123 gen_exception(ctx, POWERPC_EXCP_FPU); \ |
|
2124 return; \ |
|
2125 } \ |
|
2126 /* NIP cannot be restored if the memory exception comes from an helper */ \ |
|
2127 gen_update_nip(ctx, ctx->nip - 4); \ |
|
2128 gen_reset_fpstatus(); \ |
|
2129 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \ |
|
2130 cpu_fpr[rC(ctx->opcode)]); \ |
|
2131 if (isfloat) { \ |
|
2132 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \ |
|
2133 } \ |
|
2134 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \ |
|
2135 set_fprf, Rc(ctx->opcode) != 0); \ |
|
2136 } |
|
2137 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \ |
|
2138 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \ |
|
2139 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type); |
|
2140 |
|
2141 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \ |
|
2142 GEN_HANDLER(f##name, 0x3F, op2, op3, 0x001F0000, type) \ |
|
2143 { \ |
|
2144 if (unlikely(!ctx->fpu_enabled)) { \ |
|
2145 gen_exception(ctx, POWERPC_EXCP_FPU); \ |
|
2146 return; \ |
|
2147 } \ |
|
2148 /* NIP cannot be restored if the memory exception comes from an helper */ \ |
|
2149 gen_update_nip(ctx, ctx->nip - 4); \ |
|
2150 gen_reset_fpstatus(); \ |
|
2151 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \ |
|
2152 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \ |
|
2153 set_fprf, Rc(ctx->opcode) != 0); \ |
|
2154 } |
|
2155 |
|
2156 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \ |
|
2157 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x001F07C0, type) \ |
|
2158 { \ |
|
2159 if (unlikely(!ctx->fpu_enabled)) { \ |
|
2160 gen_exception(ctx, POWERPC_EXCP_FPU); \ |
|
2161 return; \ |
|
2162 } \ |
|
2163 /* NIP cannot be restored if the memory exception comes from an helper */ \ |
|
2164 gen_update_nip(ctx, ctx->nip - 4); \ |
|
2165 gen_reset_fpstatus(); \ |
|
2166 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \ |
|
2167 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \ |
|
2168 set_fprf, Rc(ctx->opcode) != 0); \ |
|
2169 } |
|
2170 |
|
2171 /* fadd - fadds */ |
|
2172 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT); |
|
2173 /* fdiv - fdivs */ |
|
2174 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT); |
|
2175 /* fmul - fmuls */ |
|
2176 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT); |
|
2177 |
|
2178 /* fre */ |
|
2179 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT); |
|
2180 |
|
2181 /* fres */ |
|
2182 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES); |
|
2183 |
|
2184 /* frsqrte */ |
|
2185 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE); |
|
2186 |
|
2187 /* frsqrtes */ |
|
2188 GEN_HANDLER(frsqrtes, 0x3B, 0x1A, 0xFF, 0x001F07C0, PPC_FLOAT_FRSQRTES) |
|
2189 { |
|
2190 if (unlikely(!ctx->fpu_enabled)) { |
|
2191 gen_exception(ctx, POWERPC_EXCP_FPU); |
|
2192 return; |
|
2193 } |
|
2194 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
2195 gen_update_nip(ctx, ctx->nip - 4); |
|
2196 gen_reset_fpstatus(); |
|
2197 gen_helper_frsqrte(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); |
|
2198 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); |
|
2199 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0); |
|
2200 } |
|
2201 |
|
2202 /* fsel */ |
|
2203 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL); |
|
2204 /* fsub - fsubs */ |
|
2205 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT); |
|
2206 /* Optional: */ |
|
2207 /* fsqrt */ |
|
2208 GEN_HANDLER(fsqrt, 0x3F, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT) |
|
2209 { |
|
2210 if (unlikely(!ctx->fpu_enabled)) { |
|
2211 gen_exception(ctx, POWERPC_EXCP_FPU); |
|
2212 return; |
|
2213 } |
|
2214 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
2215 gen_update_nip(ctx, ctx->nip - 4); |
|
2216 gen_reset_fpstatus(); |
|
2217 gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); |
|
2218 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0); |
|
2219 } |
|
2220 |
|
2221 GEN_HANDLER(fsqrts, 0x3B, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT) |
|
2222 { |
|
2223 if (unlikely(!ctx->fpu_enabled)) { |
|
2224 gen_exception(ctx, POWERPC_EXCP_FPU); |
|
2225 return; |
|
2226 } |
|
2227 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
2228 gen_update_nip(ctx, ctx->nip - 4); |
|
2229 gen_reset_fpstatus(); |
|
2230 gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); |
|
2231 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); |
|
2232 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0); |
|
2233 } |
|
2234 |
|
2235 /*** Floating-Point multiply-and-add ***/ |
|
2236 /* fmadd - fmadds */ |
|
2237 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT); |
|
2238 /* fmsub - fmsubs */ |
|
2239 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT); |
|
2240 /* fnmadd - fnmadds */ |
|
2241 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT); |
|
2242 /* fnmsub - fnmsubs */ |
|
2243 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT); |
|
2244 |
|
2245 /*** Floating-Point round & convert ***/ |
|
2246 /* fctiw */ |
|
2247 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT); |
|
2248 /* fctiwz */ |
|
2249 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT); |
|
2250 /* frsp */ |
|
2251 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT); |
|
2252 #if defined(TARGET_PPC64) |
|
2253 /* fcfid */ |
|
2254 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B); |
|
2255 /* fctid */ |
|
2256 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B); |
|
2257 /* fctidz */ |
|
2258 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B); |
|
2259 #endif |
|
2260 |
|
2261 /* frin */ |
|
2262 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT); |
|
2263 /* friz */ |
|
2264 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT); |
|
2265 /* frip */ |
|
2266 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT); |
|
2267 /* frim */ |
|
2268 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT); |
|
2269 |
|
2270 /*** Floating-Point compare ***/ |
|
2271 /* fcmpo */ |
|
2272 GEN_HANDLER(fcmpo, 0x3F, 0x00, 0x01, 0x00600001, PPC_FLOAT) |
|
2273 { |
|
2274 TCGv_i32 crf; |
|
2275 if (unlikely(!ctx->fpu_enabled)) { |
|
2276 gen_exception(ctx, POWERPC_EXCP_FPU); |
|
2277 return; |
|
2278 } |
|
2279 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
2280 gen_update_nip(ctx, ctx->nip - 4); |
|
2281 gen_reset_fpstatus(); |
|
2282 crf = tcg_const_i32(crfD(ctx->opcode)); |
|
2283 gen_helper_fcmpo(cpu_fpr[rA(ctx->opcode)], cpu_fpr[rB(ctx->opcode)], crf); |
|
2284 tcg_temp_free_i32(crf); |
|
2285 gen_helper_float_check_status(); |
|
2286 } |
|
2287 |
|
2288 /* fcmpu */ |
|
2289 GEN_HANDLER(fcmpu, 0x3F, 0x00, 0x00, 0x00600001, PPC_FLOAT) |
|
2290 { |
|
2291 TCGv_i32 crf; |
|
2292 if (unlikely(!ctx->fpu_enabled)) { |
|
2293 gen_exception(ctx, POWERPC_EXCP_FPU); |
|
2294 return; |
|
2295 } |
|
2296 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
2297 gen_update_nip(ctx, ctx->nip - 4); |
|
2298 gen_reset_fpstatus(); |
|
2299 crf = tcg_const_i32(crfD(ctx->opcode)); |
|
2300 gen_helper_fcmpu(cpu_fpr[rA(ctx->opcode)], cpu_fpr[rB(ctx->opcode)], crf); |
|
2301 tcg_temp_free_i32(crf); |
|
2302 gen_helper_float_check_status(); |
|
2303 } |
|
2304 |
|
2305 /*** Floating-point move ***/ |
|
2306 /* fabs */ |
|
2307 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */ |
|
2308 GEN_FLOAT_B(abs, 0x08, 0x08, 0, PPC_FLOAT); |
|
2309 |
|
2310 /* fmr - fmr. */ |
|
2311 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */ |
|
2312 GEN_HANDLER(fmr, 0x3F, 0x08, 0x02, 0x001F0000, PPC_FLOAT) |
|
2313 { |
|
2314 if (unlikely(!ctx->fpu_enabled)) { |
|
2315 gen_exception(ctx, POWERPC_EXCP_FPU); |
|
2316 return; |
|
2317 } |
|
2318 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); |
|
2319 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0); |
|
2320 } |
|
2321 |
|
2322 /* fnabs */ |
|
2323 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */ |
|
2324 GEN_FLOAT_B(nabs, 0x08, 0x04, 0, PPC_FLOAT); |
|
2325 /* fneg */ |
|
2326 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */ |
|
2327 GEN_FLOAT_B(neg, 0x08, 0x01, 0, PPC_FLOAT); |
|
2328 |
|
2329 /*** Floating-Point status & ctrl register ***/ |
|
2330 /* mcrfs */ |
|
2331 GEN_HANDLER(mcrfs, 0x3F, 0x00, 0x02, 0x0063F801, PPC_FLOAT) |
|
2332 { |
|
2333 int bfa; |
|
2334 |
|
2335 if (unlikely(!ctx->fpu_enabled)) { |
|
2336 gen_exception(ctx, POWERPC_EXCP_FPU); |
|
2337 return; |
|
2338 } |
|
2339 bfa = 4 * (7 - crfS(ctx->opcode)); |
|
2340 tcg_gen_shri_i32(cpu_crf[crfD(ctx->opcode)], cpu_fpscr, bfa); |
|
2341 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], 0xf); |
|
2342 tcg_gen_andi_i32(cpu_fpscr, cpu_fpscr, ~(0xF << bfa)); |
|
2343 } |
|
2344 |
|
2345 /* mffs */ |
|
2346 GEN_HANDLER(mffs, 0x3F, 0x07, 0x12, 0x001FF800, PPC_FLOAT) |
|
2347 { |
|
2348 if (unlikely(!ctx->fpu_enabled)) { |
|
2349 gen_exception(ctx, POWERPC_EXCP_FPU); |
|
2350 return; |
|
2351 } |
|
2352 gen_reset_fpstatus(); |
|
2353 tcg_gen_extu_i32_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpscr); |
|
2354 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0); |
|
2355 } |
|
2356 |
|
2357 /* mtfsb0 */ |
|
2358 GEN_HANDLER(mtfsb0, 0x3F, 0x06, 0x02, 0x001FF800, PPC_FLOAT) |
|
2359 { |
|
2360 uint8_t crb; |
|
2361 |
|
2362 if (unlikely(!ctx->fpu_enabled)) { |
|
2363 gen_exception(ctx, POWERPC_EXCP_FPU); |
|
2364 return; |
|
2365 } |
|
2366 crb = 31 - crbD(ctx->opcode); |
|
2367 gen_reset_fpstatus(); |
|
2368 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX)) { |
|
2369 TCGv_i32 t0; |
|
2370 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
2371 gen_update_nip(ctx, ctx->nip - 4); |
|
2372 t0 = tcg_const_i32(crb); |
|
2373 gen_helper_fpscr_clrbit(t0); |
|
2374 tcg_temp_free_i32(t0); |
|
2375 } |
|
2376 if (unlikely(Rc(ctx->opcode) != 0)) { |
|
2377 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX); |
|
2378 } |
|
2379 } |
|
2380 |
|
2381 /* mtfsb1 */ |
|
2382 GEN_HANDLER(mtfsb1, 0x3F, 0x06, 0x01, 0x001FF800, PPC_FLOAT) |
|
2383 { |
|
2384 uint8_t crb; |
|
2385 |
|
2386 if (unlikely(!ctx->fpu_enabled)) { |
|
2387 gen_exception(ctx, POWERPC_EXCP_FPU); |
|
2388 return; |
|
2389 } |
|
2390 crb = 31 - crbD(ctx->opcode); |
|
2391 gen_reset_fpstatus(); |
|
2392 /* XXX: we pretend we can only do IEEE floating-point computations */ |
|
2393 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI)) { |
|
2394 TCGv_i32 t0; |
|
2395 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
2396 gen_update_nip(ctx, ctx->nip - 4); |
|
2397 t0 = tcg_const_i32(crb); |
|
2398 gen_helper_fpscr_setbit(t0); |
|
2399 tcg_temp_free_i32(t0); |
|
2400 } |
|
2401 if (unlikely(Rc(ctx->opcode) != 0)) { |
|
2402 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX); |
|
2403 } |
|
2404 /* We can raise a differed exception */ |
|
2405 gen_helper_float_check_status(); |
|
2406 } |
|
2407 |
|
2408 /* mtfsf */ |
|
2409 GEN_HANDLER(mtfsf, 0x3F, 0x07, 0x16, 0x02010000, PPC_FLOAT) |
|
2410 { |
|
2411 TCGv_i32 t0; |
|
2412 |
|
2413 if (unlikely(!ctx->fpu_enabled)) { |
|
2414 gen_exception(ctx, POWERPC_EXCP_FPU); |
|
2415 return; |
|
2416 } |
|
2417 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
2418 gen_update_nip(ctx, ctx->nip - 4); |
|
2419 gen_reset_fpstatus(); |
|
2420 t0 = tcg_const_i32(FM(ctx->opcode)); |
|
2421 gen_helper_store_fpscr(cpu_fpr[rB(ctx->opcode)], t0); |
|
2422 tcg_temp_free_i32(t0); |
|
2423 if (unlikely(Rc(ctx->opcode) != 0)) { |
|
2424 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX); |
|
2425 } |
|
2426 /* We can raise a differed exception */ |
|
2427 gen_helper_float_check_status(); |
|
2428 } |
|
2429 |
|
2430 /* mtfsfi */ |
|
2431 GEN_HANDLER(mtfsfi, 0x3F, 0x06, 0x04, 0x006f0800, PPC_FLOAT) |
|
2432 { |
|
2433 int bf, sh; |
|
2434 TCGv_i64 t0; |
|
2435 TCGv_i32 t1; |
|
2436 |
|
2437 if (unlikely(!ctx->fpu_enabled)) { |
|
2438 gen_exception(ctx, POWERPC_EXCP_FPU); |
|
2439 return; |
|
2440 } |
|
2441 bf = crbD(ctx->opcode) >> 2; |
|
2442 sh = 7 - bf; |
|
2443 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
2444 gen_update_nip(ctx, ctx->nip - 4); |
|
2445 gen_reset_fpstatus(); |
|
2446 t0 = tcg_const_i64(FPIMM(ctx->opcode) << (4 * sh)); |
|
2447 t1 = tcg_const_i32(1 << sh); |
|
2448 gen_helper_store_fpscr(t0, t1); |
|
2449 tcg_temp_free_i64(t0); |
|
2450 tcg_temp_free_i32(t1); |
|
2451 if (unlikely(Rc(ctx->opcode) != 0)) { |
|
2452 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX); |
|
2453 } |
|
2454 /* We can raise a differed exception */ |
|
2455 gen_helper_float_check_status(); |
|
2456 } |
|
2457 |
|
2458 /*** Addressing modes ***/ |
|
2459 /* Register indirect with immediate index : EA = (rA|0) + SIMM */ |
|
2460 static always_inline void gen_addr_imm_index (DisasContext *ctx, TCGv EA, target_long maskl) |
|
2461 { |
|
2462 target_long simm = SIMM(ctx->opcode); |
|
2463 |
|
2464 simm &= ~maskl; |
|
2465 if (rA(ctx->opcode) == 0) { |
|
2466 #if defined(TARGET_PPC64) |
|
2467 if (!ctx->sf_mode) { |
|
2468 tcg_gen_movi_tl(EA, (uint32_t)simm); |
|
2469 } else |
|
2470 #endif |
|
2471 tcg_gen_movi_tl(EA, simm); |
|
2472 } else if (likely(simm != 0)) { |
|
2473 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm); |
|
2474 #if defined(TARGET_PPC64) |
|
2475 if (!ctx->sf_mode) { |
|
2476 tcg_gen_ext32u_tl(EA, EA); |
|
2477 } |
|
2478 #endif |
|
2479 } else { |
|
2480 #if defined(TARGET_PPC64) |
|
2481 if (!ctx->sf_mode) { |
|
2482 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); |
|
2483 } else |
|
2484 #endif |
|
2485 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); |
|
2486 } |
|
2487 } |
|
2488 |
|
2489 static always_inline void gen_addr_reg_index (DisasContext *ctx, TCGv EA) |
|
2490 { |
|
2491 if (rA(ctx->opcode) == 0) { |
|
2492 #if defined(TARGET_PPC64) |
|
2493 if (!ctx->sf_mode) { |
|
2494 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]); |
|
2495 } else |
|
2496 #endif |
|
2497 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]); |
|
2498 } else { |
|
2499 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); |
|
2500 #if defined(TARGET_PPC64) |
|
2501 if (!ctx->sf_mode) { |
|
2502 tcg_gen_ext32u_tl(EA, EA); |
|
2503 } |
|
2504 #endif |
|
2505 } |
|
2506 } |
|
2507 |
|
2508 static always_inline void gen_addr_register (DisasContext *ctx, TCGv EA) |
|
2509 { |
|
2510 if (rA(ctx->opcode) == 0) { |
|
2511 tcg_gen_movi_tl(EA, 0); |
|
2512 } else { |
|
2513 #if defined(TARGET_PPC64) |
|
2514 if (!ctx->sf_mode) { |
|
2515 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); |
|
2516 } else |
|
2517 #endif |
|
2518 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); |
|
2519 } |
|
2520 } |
|
2521 |
|
2522 static always_inline void gen_addr_add (DisasContext *ctx, TCGv ret, TCGv arg1, target_long val) |
|
2523 { |
|
2524 tcg_gen_addi_tl(ret, arg1, val); |
|
2525 #if defined(TARGET_PPC64) |
|
2526 if (!ctx->sf_mode) { |
|
2527 tcg_gen_ext32u_tl(ret, ret); |
|
2528 } |
|
2529 #endif |
|
2530 } |
|
2531 |
|
2532 static always_inline void gen_check_align (DisasContext *ctx, TCGv EA, int mask) |
|
2533 { |
|
2534 int l1 = gen_new_label(); |
|
2535 TCGv t0 = tcg_temp_new(); |
|
2536 TCGv_i32 t1, t2; |
|
2537 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
2538 gen_update_nip(ctx, ctx->nip - 4); |
|
2539 tcg_gen_andi_tl(t0, EA, mask); |
|
2540 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); |
|
2541 t1 = tcg_const_i32(POWERPC_EXCP_ALIGN); |
|
2542 t2 = tcg_const_i32(0); |
|
2543 gen_helper_raise_exception_err(t1, t2); |
|
2544 tcg_temp_free_i32(t1); |
|
2545 tcg_temp_free_i32(t2); |
|
2546 gen_set_label(l1); |
|
2547 tcg_temp_free(t0); |
|
2548 } |
|
2549 |
|
2550 /*** Integer load ***/ |
|
2551 static always_inline void gen_qemu_ld8u(DisasContext *ctx, TCGv arg1, TCGv arg2) |
|
2552 { |
|
2553 tcg_gen_qemu_ld8u(arg1, arg2, ctx->mem_idx); |
|
2554 } |
|
2555 |
|
2556 static always_inline void gen_qemu_ld8s(DisasContext *ctx, TCGv arg1, TCGv arg2) |
|
2557 { |
|
2558 tcg_gen_qemu_ld8s(arg1, arg2, ctx->mem_idx); |
|
2559 } |
|
2560 |
|
2561 static always_inline void gen_qemu_ld16u(DisasContext *ctx, TCGv arg1, TCGv arg2) |
|
2562 { |
|
2563 tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx); |
|
2564 if (unlikely(ctx->le_mode)) { |
|
2565 #if defined(TARGET_PPC64) |
|
2566 TCGv_i32 t0 = tcg_temp_new_i32(); |
|
2567 tcg_gen_trunc_tl_i32(t0, arg1); |
|
2568 tcg_gen_bswap16_i32(t0, t0); |
|
2569 tcg_gen_extu_i32_tl(arg1, t0); |
|
2570 tcg_temp_free_i32(t0); |
|
2571 #else |
|
2572 tcg_gen_bswap16_i32(arg1, arg1); |
|
2573 #endif |
|
2574 } |
|
2575 } |
|
2576 |
|
2577 static always_inline void gen_qemu_ld16s(DisasContext *ctx, TCGv arg1, TCGv arg2) |
|
2578 { |
|
2579 if (unlikely(ctx->le_mode)) { |
|
2580 #if defined(TARGET_PPC64) |
|
2581 TCGv_i32 t0; |
|
2582 tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx); |
|
2583 t0 = tcg_temp_new_i32(); |
|
2584 tcg_gen_trunc_tl_i32(t0, arg1); |
|
2585 tcg_gen_bswap16_i32(t0, t0); |
|
2586 tcg_gen_extu_i32_tl(arg1, t0); |
|
2587 tcg_gen_ext16s_tl(arg1, arg1); |
|
2588 tcg_temp_free_i32(t0); |
|
2589 #else |
|
2590 tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx); |
|
2591 tcg_gen_bswap16_i32(arg1, arg1); |
|
2592 tcg_gen_ext16s_i32(arg1, arg1); |
|
2593 #endif |
|
2594 } else { |
|
2595 tcg_gen_qemu_ld16s(arg1, arg2, ctx->mem_idx); |
|
2596 } |
|
2597 } |
|
2598 |
|
2599 static always_inline void gen_qemu_ld32u(DisasContext *ctx, TCGv arg1, TCGv arg2) |
|
2600 { |
|
2601 tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx); |
|
2602 if (unlikely(ctx->le_mode)) { |
|
2603 #if defined(TARGET_PPC64) |
|
2604 TCGv_i32 t0 = tcg_temp_new_i32(); |
|
2605 tcg_gen_trunc_tl_i32(t0, arg1); |
|
2606 tcg_gen_bswap_i32(t0, t0); |
|
2607 tcg_gen_extu_i32_tl(arg1, t0); |
|
2608 tcg_temp_free_i32(t0); |
|
2609 #else |
|
2610 tcg_gen_bswap_i32(arg1, arg1); |
|
2611 #endif |
|
2612 } |
|
2613 } |
|
2614 |
|
2615 #if defined(TARGET_PPC64) |
|
2616 static always_inline void gen_qemu_ld32s(DisasContext *ctx, TCGv arg1, TCGv arg2) |
|
2617 { |
|
2618 if (unlikely(ctx->mem_idx)) { |
|
2619 TCGv_i32 t0; |
|
2620 tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx); |
|
2621 t0 = tcg_temp_new_i32(); |
|
2622 tcg_gen_trunc_tl_i32(t0, arg1); |
|
2623 tcg_gen_bswap_i32(t0, t0); |
|
2624 tcg_gen_ext_i32_tl(arg1, t0); |
|
2625 tcg_temp_free_i32(t0); |
|
2626 } else |
|
2627 tcg_gen_qemu_ld32s(arg1, arg2, ctx->mem_idx); |
|
2628 } |
|
2629 #endif |
|
2630 |
|
2631 static always_inline void gen_qemu_ld64(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2) |
|
2632 { |
|
2633 tcg_gen_qemu_ld64(arg1, arg2, ctx->mem_idx); |
|
2634 if (unlikely(ctx->le_mode)) { |
|
2635 tcg_gen_bswap_i64(arg1, arg1); |
|
2636 } |
|
2637 } |
|
2638 |
|
2639 static always_inline void gen_qemu_st8(DisasContext *ctx, TCGv arg1, TCGv arg2) |
|
2640 { |
|
2641 tcg_gen_qemu_st8(arg1, arg2, ctx->mem_idx); |
|
2642 } |
|
2643 |
|
2644 static always_inline void gen_qemu_st16(DisasContext *ctx, TCGv arg1, TCGv arg2) |
|
2645 { |
|
2646 if (unlikely(ctx->le_mode)) { |
|
2647 #if defined(TARGET_PPC64) |
|
2648 TCGv_i32 t0; |
|
2649 TCGv t1; |
|
2650 t0 = tcg_temp_new_i32(); |
|
2651 tcg_gen_trunc_tl_i32(t0, arg1); |
|
2652 tcg_gen_ext16u_i32(t0, t0); |
|
2653 tcg_gen_bswap16_i32(t0, t0); |
|
2654 t1 = tcg_temp_new(); |
|
2655 tcg_gen_extu_i32_tl(t1, t0); |
|
2656 tcg_temp_free_i32(t0); |
|
2657 tcg_gen_qemu_st16(t1, arg2, ctx->mem_idx); |
|
2658 tcg_temp_free(t1); |
|
2659 #else |
|
2660 TCGv t0 = tcg_temp_new(); |
|
2661 tcg_gen_ext16u_tl(t0, arg1); |
|
2662 tcg_gen_bswap16_i32(t0, t0); |
|
2663 tcg_gen_qemu_st16(t0, arg2, ctx->mem_idx); |
|
2664 tcg_temp_free(t0); |
|
2665 #endif |
|
2666 } else { |
|
2667 tcg_gen_qemu_st16(arg1, arg2, ctx->mem_idx); |
|
2668 } |
|
2669 } |
|
2670 |
|
2671 static always_inline void gen_qemu_st32(DisasContext *ctx, TCGv arg1, TCGv arg2) |
|
2672 { |
|
2673 if (unlikely(ctx->le_mode)) { |
|
2674 #if defined(TARGET_PPC64) |
|
2675 TCGv_i32 t0; |
|
2676 TCGv t1; |
|
2677 t0 = tcg_temp_new_i32(); |
|
2678 tcg_gen_trunc_tl_i32(t0, arg1); |
|
2679 tcg_gen_bswap_i32(t0, t0); |
|
2680 t1 = tcg_temp_new(); |
|
2681 tcg_gen_extu_i32_tl(t1, t0); |
|
2682 tcg_temp_free_i32(t0); |
|
2683 tcg_gen_qemu_st32(t1, arg2, ctx->mem_idx); |
|
2684 tcg_temp_free(t1); |
|
2685 #else |
|
2686 TCGv t0 = tcg_temp_new_i32(); |
|
2687 tcg_gen_bswap_i32(t0, arg1); |
|
2688 tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx); |
|
2689 tcg_temp_free(t0); |
|
2690 #endif |
|
2691 } else { |
|
2692 tcg_gen_qemu_st32(arg1, arg2, ctx->mem_idx); |
|
2693 } |
|
2694 } |
|
2695 |
|
2696 static always_inline void gen_qemu_st64(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2) |
|
2697 { |
|
2698 if (unlikely(ctx->le_mode)) { |
|
2699 TCGv_i64 t0 = tcg_temp_new_i64(); |
|
2700 tcg_gen_bswap_i64(t0, arg1); |
|
2701 tcg_gen_qemu_st64(t0, arg2, ctx->mem_idx); |
|
2702 tcg_temp_free_i64(t0); |
|
2703 } else |
|
2704 tcg_gen_qemu_st64(arg1, arg2, ctx->mem_idx); |
|
2705 } |
|
2706 |
|
2707 #define GEN_LD(name, ldop, opc, type) \ |
|
2708 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type) \ |
|
2709 { \ |
|
2710 TCGv EA; \ |
|
2711 gen_set_access_type(ctx, ACCESS_INT); \ |
|
2712 EA = tcg_temp_new(); \ |
|
2713 gen_addr_imm_index(ctx, EA, 0); \ |
|
2714 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ |
|
2715 tcg_temp_free(EA); \ |
|
2716 } |
|
2717 |
|
2718 #define GEN_LDU(name, ldop, opc, type) \ |
|
2719 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type) \ |
|
2720 { \ |
|
2721 TCGv EA; \ |
|
2722 if (unlikely(rA(ctx->opcode) == 0 || \ |
|
2723 rA(ctx->opcode) == rD(ctx->opcode))) { \ |
|
2724 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ |
|
2725 return; \ |
|
2726 } \ |
|
2727 gen_set_access_type(ctx, ACCESS_INT); \ |
|
2728 EA = tcg_temp_new(); \ |
|
2729 if (type == PPC_64B) \ |
|
2730 gen_addr_imm_index(ctx, EA, 0x03); \ |
|
2731 else \ |
|
2732 gen_addr_imm_index(ctx, EA, 0); \ |
|
2733 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ |
|
2734 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ |
|
2735 tcg_temp_free(EA); \ |
|
2736 } |
|
2737 |
|
2738 #define GEN_LDUX(name, ldop, opc2, opc3, type) \ |
|
2739 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type) \ |
|
2740 { \ |
|
2741 TCGv EA; \ |
|
2742 if (unlikely(rA(ctx->opcode) == 0 || \ |
|
2743 rA(ctx->opcode) == rD(ctx->opcode))) { \ |
|
2744 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ |
|
2745 return; \ |
|
2746 } \ |
|
2747 gen_set_access_type(ctx, ACCESS_INT); \ |
|
2748 EA = tcg_temp_new(); \ |
|
2749 gen_addr_reg_index(ctx, EA); \ |
|
2750 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ |
|
2751 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ |
|
2752 tcg_temp_free(EA); \ |
|
2753 } |
|
2754 |
|
2755 #define GEN_LDX(name, ldop, opc2, opc3, type) \ |
|
2756 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type) \ |
|
2757 { \ |
|
2758 TCGv EA; \ |
|
2759 gen_set_access_type(ctx, ACCESS_INT); \ |
|
2760 EA = tcg_temp_new(); \ |
|
2761 gen_addr_reg_index(ctx, EA); \ |
|
2762 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ |
|
2763 tcg_temp_free(EA); \ |
|
2764 } |
|
2765 |
|
2766 #define GEN_LDS(name, ldop, op, type) \ |
|
2767 GEN_LD(name, ldop, op | 0x20, type); \ |
|
2768 GEN_LDU(name, ldop, op | 0x21, type); \ |
|
2769 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \ |
|
2770 GEN_LDX(name, ldop, 0x17, op | 0x00, type) |
|
2771 |
|
2772 /* lbz lbzu lbzux lbzx */ |
|
2773 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER); |
|
2774 /* lha lhau lhaux lhax */ |
|
2775 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER); |
|
2776 /* lhz lhzu lhzux lhzx */ |
|
2777 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER); |
|
2778 /* lwz lwzu lwzux lwzx */ |
|
2779 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER); |
|
2780 #if defined(TARGET_PPC64) |
|
2781 /* lwaux */ |
|
2782 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B); |
|
2783 /* lwax */ |
|
2784 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B); |
|
2785 /* ldux */ |
|
2786 GEN_LDUX(ld, ld64, 0x15, 0x01, PPC_64B); |
|
2787 /* ldx */ |
|
2788 GEN_LDX(ld, ld64, 0x15, 0x00, PPC_64B); |
|
2789 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B) |
|
2790 { |
|
2791 TCGv EA; |
|
2792 if (Rc(ctx->opcode)) { |
|
2793 if (unlikely(rA(ctx->opcode) == 0 || |
|
2794 rA(ctx->opcode) == rD(ctx->opcode))) { |
|
2795 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); |
|
2796 return; |
|
2797 } |
|
2798 } |
|
2799 gen_set_access_type(ctx, ACCESS_INT); |
|
2800 EA = tcg_temp_new(); |
|
2801 gen_addr_imm_index(ctx, EA, 0x03); |
|
2802 if (ctx->opcode & 0x02) { |
|
2803 /* lwa (lwau is undefined) */ |
|
2804 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA); |
|
2805 } else { |
|
2806 /* ld - ldu */ |
|
2807 gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], EA); |
|
2808 } |
|
2809 if (Rc(ctx->opcode)) |
|
2810 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); |
|
2811 tcg_temp_free(EA); |
|
2812 } |
|
2813 /* lq */ |
|
2814 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX) |
|
2815 { |
|
2816 #if defined(CONFIG_USER_ONLY) |
|
2817 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
2818 #else |
|
2819 int ra, rd; |
|
2820 TCGv EA; |
|
2821 |
|
2822 /* Restore CPU state */ |
|
2823 if (unlikely(ctx->mem_idx == 0)) { |
|
2824 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
2825 return; |
|
2826 } |
|
2827 ra = rA(ctx->opcode); |
|
2828 rd = rD(ctx->opcode); |
|
2829 if (unlikely((rd & 1) || rd == ra)) { |
|
2830 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); |
|
2831 return; |
|
2832 } |
|
2833 if (unlikely(ctx->le_mode)) { |
|
2834 /* Little-endian mode is not handled */ |
|
2835 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE); |
|
2836 return; |
|
2837 } |
|
2838 gen_set_access_type(ctx, ACCESS_INT); |
|
2839 EA = tcg_temp_new(); |
|
2840 gen_addr_imm_index(ctx, EA, 0x0F); |
|
2841 gen_qemu_ld64(ctx, cpu_gpr[rd], EA); |
|
2842 gen_addr_add(ctx, EA, EA, 8); |
|
2843 gen_qemu_ld64(ctx, cpu_gpr[rd+1], EA); |
|
2844 tcg_temp_free(EA); |
|
2845 #endif |
|
2846 } |
|
2847 #endif |
|
2848 |
|
2849 /*** Integer store ***/ |
|
2850 #define GEN_ST(name, stop, opc, type) \ |
|
2851 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type) \ |
|
2852 { \ |
|
2853 TCGv EA; \ |
|
2854 gen_set_access_type(ctx, ACCESS_INT); \ |
|
2855 EA = tcg_temp_new(); \ |
|
2856 gen_addr_imm_index(ctx, EA, 0); \ |
|
2857 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ |
|
2858 tcg_temp_free(EA); \ |
|
2859 } |
|
2860 |
|
2861 #define GEN_STU(name, stop, opc, type) \ |
|
2862 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type) \ |
|
2863 { \ |
|
2864 TCGv EA; \ |
|
2865 if (unlikely(rA(ctx->opcode) == 0)) { \ |
|
2866 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ |
|
2867 return; \ |
|
2868 } \ |
|
2869 gen_set_access_type(ctx, ACCESS_INT); \ |
|
2870 EA = tcg_temp_new(); \ |
|
2871 if (type == PPC_64B) \ |
|
2872 gen_addr_imm_index(ctx, EA, 0x03); \ |
|
2873 else \ |
|
2874 gen_addr_imm_index(ctx, EA, 0); \ |
|
2875 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ |
|
2876 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ |
|
2877 tcg_temp_free(EA); \ |
|
2878 } |
|
2879 |
|
2880 #define GEN_STUX(name, stop, opc2, opc3, type) \ |
|
2881 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type) \ |
|
2882 { \ |
|
2883 TCGv EA; \ |
|
2884 if (unlikely(rA(ctx->opcode) == 0)) { \ |
|
2885 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ |
|
2886 return; \ |
|
2887 } \ |
|
2888 gen_set_access_type(ctx, ACCESS_INT); \ |
|
2889 EA = tcg_temp_new(); \ |
|
2890 gen_addr_reg_index(ctx, EA); \ |
|
2891 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ |
|
2892 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ |
|
2893 tcg_temp_free(EA); \ |
|
2894 } |
|
2895 |
|
2896 #define GEN_STX(name, stop, opc2, opc3, type) \ |
|
2897 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type) \ |
|
2898 { \ |
|
2899 TCGv EA; \ |
|
2900 gen_set_access_type(ctx, ACCESS_INT); \ |
|
2901 EA = tcg_temp_new(); \ |
|
2902 gen_addr_reg_index(ctx, EA); \ |
|
2903 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ |
|
2904 tcg_temp_free(EA); \ |
|
2905 } |
|
2906 |
|
2907 #define GEN_STS(name, stop, op, type) \ |
|
2908 GEN_ST(name, stop, op | 0x20, type); \ |
|
2909 GEN_STU(name, stop, op | 0x21, type); \ |
|
2910 GEN_STUX(name, stop, 0x17, op | 0x01, type); \ |
|
2911 GEN_STX(name, stop, 0x17, op | 0x00, type) |
|
2912 |
|
2913 /* stb stbu stbux stbx */ |
|
2914 GEN_STS(stb, st8, 0x06, PPC_INTEGER); |
|
2915 /* sth sthu sthux sthx */ |
|
2916 GEN_STS(sth, st16, 0x0C, PPC_INTEGER); |
|
2917 /* stw stwu stwux stwx */ |
|
2918 GEN_STS(stw, st32, 0x04, PPC_INTEGER); |
|
2919 #if defined(TARGET_PPC64) |
|
2920 GEN_STUX(std, st64, 0x15, 0x05, PPC_64B); |
|
2921 GEN_STX(std, st64, 0x15, 0x04, PPC_64B); |
|
2922 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B) |
|
2923 { |
|
2924 int rs; |
|
2925 TCGv EA; |
|
2926 |
|
2927 rs = rS(ctx->opcode); |
|
2928 if ((ctx->opcode & 0x3) == 0x2) { |
|
2929 #if defined(CONFIG_USER_ONLY) |
|
2930 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
2931 #else |
|
2932 /* stq */ |
|
2933 if (unlikely(ctx->mem_idx == 0)) { |
|
2934 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
2935 return; |
|
2936 } |
|
2937 if (unlikely(rs & 1)) { |
|
2938 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); |
|
2939 return; |
|
2940 } |
|
2941 if (unlikely(ctx->le_mode)) { |
|
2942 /* Little-endian mode is not handled */ |
|
2943 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE); |
|
2944 return; |
|
2945 } |
|
2946 gen_set_access_type(ctx, ACCESS_INT); |
|
2947 EA = tcg_temp_new(); |
|
2948 gen_addr_imm_index(ctx, EA, 0x03); |
|
2949 gen_qemu_st64(ctx, cpu_gpr[rs], EA); |
|
2950 gen_addr_add(ctx, EA, EA, 8); |
|
2951 gen_qemu_st64(ctx, cpu_gpr[rs+1], EA); |
|
2952 tcg_temp_free(EA); |
|
2953 #endif |
|
2954 } else { |
|
2955 /* std / stdu */ |
|
2956 if (Rc(ctx->opcode)) { |
|
2957 if (unlikely(rA(ctx->opcode) == 0)) { |
|
2958 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); |
|
2959 return; |
|
2960 } |
|
2961 } |
|
2962 gen_set_access_type(ctx, ACCESS_INT); |
|
2963 EA = tcg_temp_new(); |
|
2964 gen_addr_imm_index(ctx, EA, 0x03); |
|
2965 gen_qemu_st64(ctx, cpu_gpr[rs], EA); |
|
2966 if (Rc(ctx->opcode)) |
|
2967 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); |
|
2968 tcg_temp_free(EA); |
|
2969 } |
|
2970 } |
|
2971 #endif |
|
2972 /*** Integer load and store with byte reverse ***/ |
|
2973 /* lhbrx */ |
|
2974 static void always_inline gen_qemu_ld16ur(DisasContext *ctx, TCGv arg1, TCGv arg2) |
|
2975 { |
|
2976 tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx); |
|
2977 if (likely(!ctx->le_mode)) { |
|
2978 #if defined(TARGET_PPC64) |
|
2979 TCGv_i32 t0 = tcg_temp_new_i32(); |
|
2980 tcg_gen_trunc_tl_i32(t0, arg1); |
|
2981 tcg_gen_bswap16_i32(t0, t0); |
|
2982 tcg_gen_extu_i32_tl(arg1, t0); |
|
2983 tcg_temp_free_i32(t0); |
|
2984 #else |
|
2985 tcg_gen_bswap16_i32(arg1, arg1); |
|
2986 #endif |
|
2987 } |
|
2988 } |
|
2989 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER); |
|
2990 |
|
2991 /* lwbrx */ |
|
2992 static void always_inline gen_qemu_ld32ur(DisasContext *ctx, TCGv arg1, TCGv arg2) |
|
2993 { |
|
2994 tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx); |
|
2995 if (likely(!ctx->le_mode)) { |
|
2996 #if defined(TARGET_PPC64) |
|
2997 TCGv_i32 t0 = tcg_temp_new_i32(); |
|
2998 tcg_gen_trunc_tl_i32(t0, arg1); |
|
2999 tcg_gen_bswap_i32(t0, t0); |
|
3000 tcg_gen_extu_i32_tl(arg1, t0); |
|
3001 tcg_temp_free_i32(t0); |
|
3002 #else |
|
3003 tcg_gen_bswap_i32(arg1, arg1); |
|
3004 #endif |
|
3005 } |
|
3006 } |
|
3007 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER); |
|
3008 |
|
3009 /* sthbrx */ |
|
3010 static void always_inline gen_qemu_st16r(DisasContext *ctx, TCGv arg1, TCGv arg2) |
|
3011 { |
|
3012 if (likely(!ctx->le_mode)) { |
|
3013 #if defined(TARGET_PPC64) |
|
3014 TCGv_i32 t0; |
|
3015 TCGv t1; |
|
3016 t0 = tcg_temp_new_i32(); |
|
3017 tcg_gen_trunc_tl_i32(t0, arg1); |
|
3018 tcg_gen_ext16u_i32(t0, t0); |
|
3019 tcg_gen_bswap16_i32(t0, t0); |
|
3020 t1 = tcg_temp_new(); |
|
3021 tcg_gen_extu_i32_tl(t1, t0); |
|
3022 tcg_temp_free_i32(t0); |
|
3023 tcg_gen_qemu_st16(t1, arg2, ctx->mem_idx); |
|
3024 tcg_temp_free(t1); |
|
3025 #else |
|
3026 TCGv t0 = tcg_temp_new(); |
|
3027 tcg_gen_ext16u_tl(t0, arg1); |
|
3028 tcg_gen_bswap16_i32(t0, t0); |
|
3029 tcg_gen_qemu_st16(t0, arg2, ctx->mem_idx); |
|
3030 tcg_temp_free(t0); |
|
3031 #endif |
|
3032 } else { |
|
3033 tcg_gen_qemu_st16(arg1, arg2, ctx->mem_idx); |
|
3034 } |
|
3035 } |
|
3036 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER); |
|
3037 |
|
3038 /* stwbrx */ |
|
3039 static void always_inline gen_qemu_st32r(DisasContext *ctx, TCGv arg1, TCGv arg2) |
|
3040 { |
|
3041 if (likely(!ctx->le_mode)) { |
|
3042 #if defined(TARGET_PPC64) |
|
3043 TCGv_i32 t0; |
|
3044 TCGv t1; |
|
3045 t0 = tcg_temp_new_i32(); |
|
3046 tcg_gen_trunc_tl_i32(t0, arg1); |
|
3047 tcg_gen_bswap_i32(t0, t0); |
|
3048 t1 = tcg_temp_new(); |
|
3049 tcg_gen_extu_i32_tl(t1, t0); |
|
3050 tcg_temp_free_i32(t0); |
|
3051 tcg_gen_qemu_st32(t1, arg2, ctx->mem_idx); |
|
3052 tcg_temp_free(t1); |
|
3053 #else |
|
3054 TCGv t0 = tcg_temp_new_i32(); |
|
3055 tcg_gen_bswap_i32(t0, arg1); |
|
3056 tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx); |
|
3057 tcg_temp_free(t0); |
|
3058 #endif |
|
3059 } else { |
|
3060 tcg_gen_qemu_st32(arg1, arg2, ctx->mem_idx); |
|
3061 } |
|
3062 } |
|
3063 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER); |
|
3064 |
|
3065 /*** Integer load and store multiple ***/ |
|
3066 /* lmw */ |
|
3067 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) |
|
3068 { |
|
3069 TCGv t0; |
|
3070 TCGv_i32 t1; |
|
3071 gen_set_access_type(ctx, ACCESS_INT); |
|
3072 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
3073 gen_update_nip(ctx, ctx->nip - 4); |
|
3074 t0 = tcg_temp_new(); |
|
3075 t1 = tcg_const_i32(rD(ctx->opcode)); |
|
3076 gen_addr_imm_index(ctx, t0, 0); |
|
3077 gen_helper_lmw(t0, t1); |
|
3078 tcg_temp_free(t0); |
|
3079 tcg_temp_free_i32(t1); |
|
3080 } |
|
3081 |
|
3082 /* stmw */ |
|
3083 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER) |
|
3084 { |
|
3085 TCGv t0; |
|
3086 TCGv_i32 t1; |
|
3087 gen_set_access_type(ctx, ACCESS_INT); |
|
3088 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
3089 gen_update_nip(ctx, ctx->nip - 4); |
|
3090 t0 = tcg_temp_new(); |
|
3091 t1 = tcg_const_i32(rS(ctx->opcode)); |
|
3092 gen_addr_imm_index(ctx, t0, 0); |
|
3093 gen_helper_stmw(t0, t1); |
|
3094 tcg_temp_free(t0); |
|
3095 tcg_temp_free_i32(t1); |
|
3096 } |
|
3097 |
|
3098 /*** Integer load and store strings ***/ |
|
3099 /* lswi */ |
|
3100 /* PowerPC32 specification says we must generate an exception if |
|
3101 * rA is in the range of registers to be loaded. |
|
3102 * In an other hand, IBM says this is valid, but rA won't be loaded. |
|
3103 * For now, I'll follow the spec... |
|
3104 */ |
|
3105 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING) |
|
3106 { |
|
3107 TCGv t0; |
|
3108 TCGv_i32 t1, t2; |
|
3109 int nb = NB(ctx->opcode); |
|
3110 int start = rD(ctx->opcode); |
|
3111 int ra = rA(ctx->opcode); |
|
3112 int nr; |
|
3113 |
|
3114 if (nb == 0) |
|
3115 nb = 32; |
|
3116 nr = nb / 4; |
|
3117 if (unlikely(((start + nr) > 32 && |
|
3118 start <= ra && (start + nr - 32) > ra) || |
|
3119 ((start + nr) <= 32 && start <= ra && (start + nr) > ra))) { |
|
3120 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX); |
|
3121 return; |
|
3122 } |
|
3123 gen_set_access_type(ctx, ACCESS_INT); |
|
3124 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
3125 gen_update_nip(ctx, ctx->nip - 4); |
|
3126 t0 = tcg_temp_new(); |
|
3127 gen_addr_register(ctx, t0); |
|
3128 t1 = tcg_const_i32(nb); |
|
3129 t2 = tcg_const_i32(start); |
|
3130 gen_helper_lsw(t0, t1, t2); |
|
3131 tcg_temp_free(t0); |
|
3132 tcg_temp_free_i32(t1); |
|
3133 tcg_temp_free_i32(t2); |
|
3134 } |
|
3135 |
|
3136 /* lswx */ |
|
3137 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING) |
|
3138 { |
|
3139 TCGv t0; |
|
3140 TCGv_i32 t1, t2, t3; |
|
3141 gen_set_access_type(ctx, ACCESS_INT); |
|
3142 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
3143 gen_update_nip(ctx, ctx->nip - 4); |
|
3144 t0 = tcg_temp_new(); |
|
3145 gen_addr_reg_index(ctx, t0); |
|
3146 t1 = tcg_const_i32(rD(ctx->opcode)); |
|
3147 t2 = tcg_const_i32(rA(ctx->opcode)); |
|
3148 t3 = tcg_const_i32(rB(ctx->opcode)); |
|
3149 gen_helper_lswx(t0, t1, t2, t3); |
|
3150 tcg_temp_free(t0); |
|
3151 tcg_temp_free_i32(t1); |
|
3152 tcg_temp_free_i32(t2); |
|
3153 tcg_temp_free_i32(t3); |
|
3154 } |
|
3155 |
|
3156 /* stswi */ |
|
3157 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING) |
|
3158 { |
|
3159 TCGv t0; |
|
3160 TCGv_i32 t1, t2; |
|
3161 int nb = NB(ctx->opcode); |
|
3162 gen_set_access_type(ctx, ACCESS_INT); |
|
3163 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
3164 gen_update_nip(ctx, ctx->nip - 4); |
|
3165 t0 = tcg_temp_new(); |
|
3166 gen_addr_register(ctx, t0); |
|
3167 if (nb == 0) |
|
3168 nb = 32; |
|
3169 t1 = tcg_const_i32(nb); |
|
3170 t2 = tcg_const_i32(rS(ctx->opcode)); |
|
3171 gen_helper_stsw(t0, t1, t2); |
|
3172 tcg_temp_free(t0); |
|
3173 tcg_temp_free_i32(t1); |
|
3174 tcg_temp_free_i32(t2); |
|
3175 } |
|
3176 |
|
3177 /* stswx */ |
|
3178 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING) |
|
3179 { |
|
3180 TCGv t0; |
|
3181 TCGv_i32 t1, t2; |
|
3182 gen_set_access_type(ctx, ACCESS_INT); |
|
3183 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
3184 gen_update_nip(ctx, ctx->nip - 4); |
|
3185 t0 = tcg_temp_new(); |
|
3186 gen_addr_reg_index(ctx, t0); |
|
3187 t1 = tcg_temp_new_i32(); |
|
3188 tcg_gen_trunc_tl_i32(t1, cpu_xer); |
|
3189 tcg_gen_andi_i32(t1, t1, 0x7F); |
|
3190 t2 = tcg_const_i32(rS(ctx->opcode)); |
|
3191 gen_helper_stsw(t0, t1, t2); |
|
3192 tcg_temp_free(t0); |
|
3193 tcg_temp_free_i32(t1); |
|
3194 tcg_temp_free_i32(t2); |
|
3195 } |
|
3196 |
|
3197 /*** Memory synchronisation ***/ |
|
3198 /* eieio */ |
|
3199 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO) |
|
3200 { |
|
3201 } |
|
3202 |
|
3203 /* isync */ |
|
3204 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM) |
|
3205 { |
|
3206 gen_stop_exception(ctx); |
|
3207 } |
|
3208 |
|
3209 /* lwarx */ |
|
3210 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000001, PPC_RES) |
|
3211 { |
|
3212 TCGv t0; |
|
3213 gen_set_access_type(ctx, ACCESS_RES); |
|
3214 t0 = tcg_temp_local_new(); |
|
3215 gen_addr_reg_index(ctx, t0); |
|
3216 gen_check_align(ctx, t0, 0x03); |
|
3217 gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], t0); |
|
3218 tcg_gen_mov_tl(cpu_reserve, t0); |
|
3219 tcg_temp_free(t0); |
|
3220 } |
|
3221 |
|
3222 /* stwcx. */ |
|
3223 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES) |
|
3224 { |
|
3225 int l1; |
|
3226 TCGv t0; |
|
3227 gen_set_access_type(ctx, ACCESS_RES); |
|
3228 t0 = tcg_temp_local_new(); |
|
3229 gen_addr_reg_index(ctx, t0); |
|
3230 gen_check_align(ctx, t0, 0x03); |
|
3231 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer); |
|
3232 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO); |
|
3233 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1); |
|
3234 l1 = gen_new_label(); |
|
3235 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1); |
|
3236 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ); |
|
3237 gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], t0); |
|
3238 gen_set_label(l1); |
|
3239 tcg_gen_movi_tl(cpu_reserve, -1); |
|
3240 tcg_temp_free(t0); |
|
3241 } |
|
3242 |
|
3243 #if defined(TARGET_PPC64) |
|
3244 /* ldarx */ |
|
3245 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000001, PPC_64B) |
|
3246 { |
|
3247 TCGv t0; |
|
3248 gen_set_access_type(ctx, ACCESS_RES); |
|
3249 t0 = tcg_temp_local_new(); |
|
3250 gen_addr_reg_index(ctx, t0); |
|
3251 gen_check_align(ctx, t0, 0x07); |
|
3252 gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], t0); |
|
3253 tcg_gen_mov_tl(cpu_reserve, t0); |
|
3254 tcg_temp_free(t0); |
|
3255 } |
|
3256 |
|
3257 /* stdcx. */ |
|
3258 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B) |
|
3259 { |
|
3260 int l1; |
|
3261 TCGv t0; |
|
3262 gen_set_access_type(ctx, ACCESS_RES); |
|
3263 t0 = tcg_temp_local_new(); |
|
3264 gen_addr_reg_index(ctx, t0); |
|
3265 gen_check_align(ctx, t0, 0x07); |
|
3266 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer); |
|
3267 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO); |
|
3268 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1); |
|
3269 l1 = gen_new_label(); |
|
3270 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1); |
|
3271 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ); |
|
3272 gen_qemu_st64(ctx, cpu_gpr[rS(ctx->opcode)], t0); |
|
3273 gen_set_label(l1); |
|
3274 tcg_gen_movi_tl(cpu_reserve, -1); |
|
3275 tcg_temp_free(t0); |
|
3276 } |
|
3277 #endif /* defined(TARGET_PPC64) */ |
|
3278 |
|
3279 /* sync */ |
|
3280 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC) |
|
3281 { |
|
3282 } |
|
3283 |
|
3284 /* wait */ |
|
3285 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT) |
|
3286 { |
|
3287 TCGv_i32 t0 = tcg_temp_new_i32(); |
|
3288 tcg_gen_st_i32(t0, cpu_env, offsetof(CPUState, halted)); |
|
3289 tcg_temp_free_i32(t0); |
|
3290 /* Stop translation, as the CPU is supposed to sleep from now */ |
|
3291 gen_exception_err(ctx, EXCP_HLT, 1); |
|
3292 } |
|
3293 |
|
3294 /*** Floating-point load ***/ |
|
3295 #define GEN_LDF(name, ldop, opc, type) \ |
|
3296 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type) \ |
|
3297 { \ |
|
3298 TCGv EA; \ |
|
3299 if (unlikely(!ctx->fpu_enabled)) { \ |
|
3300 gen_exception(ctx, POWERPC_EXCP_FPU); \ |
|
3301 return; \ |
|
3302 } \ |
|
3303 gen_set_access_type(ctx, ACCESS_FLOAT); \ |
|
3304 EA = tcg_temp_new(); \ |
|
3305 gen_addr_imm_index(ctx, EA, 0); \ |
|
3306 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \ |
|
3307 tcg_temp_free(EA); \ |
|
3308 } |
|
3309 |
|
3310 #define GEN_LDUF(name, ldop, opc, type) \ |
|
3311 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type) \ |
|
3312 { \ |
|
3313 TCGv EA; \ |
|
3314 if (unlikely(!ctx->fpu_enabled)) { \ |
|
3315 gen_exception(ctx, POWERPC_EXCP_FPU); \ |
|
3316 return; \ |
|
3317 } \ |
|
3318 if (unlikely(rA(ctx->opcode) == 0)) { \ |
|
3319 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ |
|
3320 return; \ |
|
3321 } \ |
|
3322 gen_set_access_type(ctx, ACCESS_FLOAT); \ |
|
3323 EA = tcg_temp_new(); \ |
|
3324 gen_addr_imm_index(ctx, EA, 0); \ |
|
3325 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \ |
|
3326 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ |
|
3327 tcg_temp_free(EA); \ |
|
3328 } |
|
3329 |
|
3330 #define GEN_LDUXF(name, ldop, opc, type) \ |
|
3331 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type) \ |
|
3332 { \ |
|
3333 TCGv EA; \ |
|
3334 if (unlikely(!ctx->fpu_enabled)) { \ |
|
3335 gen_exception(ctx, POWERPC_EXCP_FPU); \ |
|
3336 return; \ |
|
3337 } \ |
|
3338 if (unlikely(rA(ctx->opcode) == 0)) { \ |
|
3339 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ |
|
3340 return; \ |
|
3341 } \ |
|
3342 gen_set_access_type(ctx, ACCESS_FLOAT); \ |
|
3343 EA = tcg_temp_new(); \ |
|
3344 gen_addr_reg_index(ctx, EA); \ |
|
3345 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \ |
|
3346 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ |
|
3347 tcg_temp_free(EA); \ |
|
3348 } |
|
3349 |
|
3350 #define GEN_LDXF(name, ldop, opc2, opc3, type) \ |
|
3351 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type) \ |
|
3352 { \ |
|
3353 TCGv EA; \ |
|
3354 if (unlikely(!ctx->fpu_enabled)) { \ |
|
3355 gen_exception(ctx, POWERPC_EXCP_FPU); \ |
|
3356 return; \ |
|
3357 } \ |
|
3358 gen_set_access_type(ctx, ACCESS_FLOAT); \ |
|
3359 EA = tcg_temp_new(); \ |
|
3360 gen_addr_reg_index(ctx, EA); \ |
|
3361 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \ |
|
3362 tcg_temp_free(EA); \ |
|
3363 } |
|
3364 |
|
3365 #define GEN_LDFS(name, ldop, op, type) \ |
|
3366 GEN_LDF(name, ldop, op | 0x20, type); \ |
|
3367 GEN_LDUF(name, ldop, op | 0x21, type); \ |
|
3368 GEN_LDUXF(name, ldop, op | 0x01, type); \ |
|
3369 GEN_LDXF(name, ldop, 0x17, op | 0x00, type) |
|
3370 |
|
3371 static always_inline void gen_qemu_ld32fs(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2) |
|
3372 { |
|
3373 TCGv t0 = tcg_temp_new(); |
|
3374 TCGv_i32 t1 = tcg_temp_new_i32(); |
|
3375 gen_qemu_ld32u(ctx, t0, arg2); |
|
3376 tcg_gen_trunc_tl_i32(t1, t0); |
|
3377 tcg_temp_free(t0); |
|
3378 gen_helper_float32_to_float64(arg1, t1); |
|
3379 tcg_temp_free_i32(t1); |
|
3380 } |
|
3381 |
|
3382 /* lfd lfdu lfdux lfdx */ |
|
3383 GEN_LDFS(lfd, ld64, 0x12, PPC_FLOAT); |
|
3384 /* lfs lfsu lfsux lfsx */ |
|
3385 GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT); |
|
3386 |
|
3387 /*** Floating-point store ***/ |
|
3388 #define GEN_STF(name, stop, opc, type) \ |
|
3389 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type) \ |
|
3390 { \ |
|
3391 TCGv EA; \ |
|
3392 if (unlikely(!ctx->fpu_enabled)) { \ |
|
3393 gen_exception(ctx, POWERPC_EXCP_FPU); \ |
|
3394 return; \ |
|
3395 } \ |
|
3396 gen_set_access_type(ctx, ACCESS_FLOAT); \ |
|
3397 EA = tcg_temp_new(); \ |
|
3398 gen_addr_imm_index(ctx, EA, 0); \ |
|
3399 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \ |
|
3400 tcg_temp_free(EA); \ |
|
3401 } |
|
3402 |
|
3403 #define GEN_STUF(name, stop, opc, type) \ |
|
3404 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type) \ |
|
3405 { \ |
|
3406 TCGv EA; \ |
|
3407 if (unlikely(!ctx->fpu_enabled)) { \ |
|
3408 gen_exception(ctx, POWERPC_EXCP_FPU); \ |
|
3409 return; \ |
|
3410 } \ |
|
3411 if (unlikely(rA(ctx->opcode) == 0)) { \ |
|
3412 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ |
|
3413 return; \ |
|
3414 } \ |
|
3415 gen_set_access_type(ctx, ACCESS_FLOAT); \ |
|
3416 EA = tcg_temp_new(); \ |
|
3417 gen_addr_imm_index(ctx, EA, 0); \ |
|
3418 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \ |
|
3419 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ |
|
3420 tcg_temp_free(EA); \ |
|
3421 } |
|
3422 |
|
3423 #define GEN_STUXF(name, stop, opc, type) \ |
|
3424 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type) \ |
|
3425 { \ |
|
3426 TCGv EA; \ |
|
3427 if (unlikely(!ctx->fpu_enabled)) { \ |
|
3428 gen_exception(ctx, POWERPC_EXCP_FPU); \ |
|
3429 return; \ |
|
3430 } \ |
|
3431 if (unlikely(rA(ctx->opcode) == 0)) { \ |
|
3432 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \ |
|
3433 return; \ |
|
3434 } \ |
|
3435 gen_set_access_type(ctx, ACCESS_FLOAT); \ |
|
3436 EA = tcg_temp_new(); \ |
|
3437 gen_addr_reg_index(ctx, EA); \ |
|
3438 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \ |
|
3439 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \ |
|
3440 tcg_temp_free(EA); \ |
|
3441 } |
|
3442 |
|
3443 #define GEN_STXF(name, stop, opc2, opc3, type) \ |
|
3444 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type) \ |
|
3445 { \ |
|
3446 TCGv EA; \ |
|
3447 if (unlikely(!ctx->fpu_enabled)) { \ |
|
3448 gen_exception(ctx, POWERPC_EXCP_FPU); \ |
|
3449 return; \ |
|
3450 } \ |
|
3451 gen_set_access_type(ctx, ACCESS_FLOAT); \ |
|
3452 EA = tcg_temp_new(); \ |
|
3453 gen_addr_reg_index(ctx, EA); \ |
|
3454 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \ |
|
3455 tcg_temp_free(EA); \ |
|
3456 } |
|
3457 |
|
3458 #define GEN_STFS(name, stop, op, type) \ |
|
3459 GEN_STF(name, stop, op | 0x20, type); \ |
|
3460 GEN_STUF(name, stop, op | 0x21, type); \ |
|
3461 GEN_STUXF(name, stop, op | 0x01, type); \ |
|
3462 GEN_STXF(name, stop, 0x17, op | 0x00, type) |
|
3463 |
|
3464 static always_inline void gen_qemu_st32fs(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2) |
|
3465 { |
|
3466 TCGv_i32 t0 = tcg_temp_new_i32(); |
|
3467 TCGv t1 = tcg_temp_new(); |
|
3468 gen_helper_float64_to_float32(t0, arg1); |
|
3469 tcg_gen_extu_i32_tl(t1, t0); |
|
3470 tcg_temp_free_i32(t0); |
|
3471 gen_qemu_st32(ctx, t1, arg2); |
|
3472 tcg_temp_free(t1); |
|
3473 } |
|
3474 |
|
3475 /* stfd stfdu stfdux stfdx */ |
|
3476 GEN_STFS(stfd, st64, 0x16, PPC_FLOAT); |
|
3477 /* stfs stfsu stfsux stfsx */ |
|
3478 GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT); |
|
3479 |
|
3480 /* Optional: */ |
|
3481 static always_inline void gen_qemu_st32fiw(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2) |
|
3482 { |
|
3483 TCGv t0 = tcg_temp_new(); |
|
3484 tcg_gen_trunc_i64_tl(t0, arg1), |
|
3485 gen_qemu_st32(ctx, t0, arg2); |
|
3486 tcg_temp_free(t0); |
|
3487 } |
|
3488 /* stfiwx */ |
|
3489 GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX); |
|
3490 |
|
3491 /*** Branch ***/ |
|
3492 static always_inline void gen_goto_tb (DisasContext *ctx, int n, |
|
3493 target_ulong dest) |
|
3494 { |
|
3495 TranslationBlock *tb; |
|
3496 tb = ctx->tb; |
|
3497 #if defined(TARGET_PPC64) |
|
3498 if (!ctx->sf_mode) |
|
3499 dest = (uint32_t) dest; |
|
3500 #endif |
|
3501 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) && |
|
3502 likely(!ctx->singlestep_enabled)) { |
|
3503 tcg_gen_goto_tb(n); |
|
3504 tcg_gen_movi_tl(cpu_nip, dest & ~3); |
|
3505 tcg_gen_exit_tb((long)tb + n); |
|
3506 } else { |
|
3507 tcg_gen_movi_tl(cpu_nip, dest & ~3); |
|
3508 if (unlikely(ctx->singlestep_enabled)) { |
|
3509 if ((ctx->singlestep_enabled & |
|
3510 (CPU_BRANCH_STEP | CPU_SINGLE_STEP)) && |
|
3511 ctx->exception == POWERPC_EXCP_BRANCH) { |
|
3512 target_ulong tmp = ctx->nip; |
|
3513 ctx->nip = dest; |
|
3514 gen_exception(ctx, POWERPC_EXCP_TRACE); |
|
3515 ctx->nip = tmp; |
|
3516 } |
|
3517 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) { |
|
3518 gen_debug_exception(ctx); |
|
3519 } |
|
3520 } |
|
3521 tcg_gen_exit_tb(0); |
|
3522 } |
|
3523 } |
|
3524 |
|
3525 static always_inline void gen_setlr (DisasContext *ctx, target_ulong nip) |
|
3526 { |
|
3527 #if defined(TARGET_PPC64) |
|
3528 if (ctx->sf_mode == 0) |
|
3529 tcg_gen_movi_tl(cpu_lr, (uint32_t)nip); |
|
3530 else |
|
3531 #endif |
|
3532 tcg_gen_movi_tl(cpu_lr, nip); |
|
3533 } |
|
3534 |
|
3535 /* b ba bl bla */ |
|
3536 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW) |
|
3537 { |
|
3538 target_ulong li, target; |
|
3539 |
|
3540 ctx->exception = POWERPC_EXCP_BRANCH; |
|
3541 /* sign extend LI */ |
|
3542 #if defined(TARGET_PPC64) |
|
3543 if (ctx->sf_mode) |
|
3544 li = ((int64_t)LI(ctx->opcode) << 38) >> 38; |
|
3545 else |
|
3546 #endif |
|
3547 li = ((int32_t)LI(ctx->opcode) << 6) >> 6; |
|
3548 if (likely(AA(ctx->opcode) == 0)) |
|
3549 target = ctx->nip + li - 4; |
|
3550 else |
|
3551 target = li; |
|
3552 if (LK(ctx->opcode)) |
|
3553 gen_setlr(ctx, ctx->nip); |
|
3554 gen_goto_tb(ctx, 0, target); |
|
3555 } |
|
3556 |
|
3557 #define BCOND_IM 0 |
|
3558 #define BCOND_LR 1 |
|
3559 #define BCOND_CTR 2 |
|
3560 |
|
3561 static always_inline void gen_bcond (DisasContext *ctx, int type) |
|
3562 { |
|
3563 uint32_t bo = BO(ctx->opcode); |
|
3564 int l1 = gen_new_label(); |
|
3565 TCGv target; |
|
3566 |
|
3567 ctx->exception = POWERPC_EXCP_BRANCH; |
|
3568 if (type == BCOND_LR || type == BCOND_CTR) { |
|
3569 target = tcg_temp_local_new(); |
|
3570 if (type == BCOND_CTR) |
|
3571 tcg_gen_mov_tl(target, cpu_ctr); |
|
3572 else |
|
3573 tcg_gen_mov_tl(target, cpu_lr); |
|
3574 } |
|
3575 if (LK(ctx->opcode)) |
|
3576 gen_setlr(ctx, ctx->nip); |
|
3577 l1 = gen_new_label(); |
|
3578 if ((bo & 0x4) == 0) { |
|
3579 /* Decrement and test CTR */ |
|
3580 TCGv temp = tcg_temp_new(); |
|
3581 if (unlikely(type == BCOND_CTR)) { |
|
3582 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); |
|
3583 return; |
|
3584 } |
|
3585 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); |
|
3586 #if defined(TARGET_PPC64) |
|
3587 if (!ctx->sf_mode) |
|
3588 tcg_gen_ext32u_tl(temp, cpu_ctr); |
|
3589 else |
|
3590 #endif |
|
3591 tcg_gen_mov_tl(temp, cpu_ctr); |
|
3592 if (bo & 0x2) { |
|
3593 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); |
|
3594 } else { |
|
3595 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); |
|
3596 } |
|
3597 tcg_temp_free(temp); |
|
3598 } |
|
3599 if ((bo & 0x10) == 0) { |
|
3600 /* Test CR */ |
|
3601 uint32_t bi = BI(ctx->opcode); |
|
3602 uint32_t mask = 1 << (3 - (bi & 0x03)); |
|
3603 TCGv_i32 temp = tcg_temp_new_i32(); |
|
3604 |
|
3605 if (bo & 0x8) { |
|
3606 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); |
|
3607 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1); |
|
3608 } else { |
|
3609 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); |
|
3610 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1); |
|
3611 } |
|
3612 tcg_temp_free_i32(temp); |
|
3613 } |
|
3614 if (type == BCOND_IM) { |
|
3615 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode))); |
|
3616 if (likely(AA(ctx->opcode) == 0)) { |
|
3617 gen_goto_tb(ctx, 0, ctx->nip + li - 4); |
|
3618 } else { |
|
3619 gen_goto_tb(ctx, 0, li); |
|
3620 } |
|
3621 gen_set_label(l1); |
|
3622 gen_goto_tb(ctx, 1, ctx->nip); |
|
3623 } else { |
|
3624 #if defined(TARGET_PPC64) |
|
3625 if (!(ctx->sf_mode)) |
|
3626 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3); |
|
3627 else |
|
3628 #endif |
|
3629 tcg_gen_andi_tl(cpu_nip, target, ~3); |
|
3630 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) |
|
3631 gen_debug_exception(ctx); |
|
3632 tcg_gen_exit_tb(0); |
|
3633 gen_set_label(l1); |
|
3634 #if defined(TARGET_PPC64) |
|
3635 if (!(ctx->sf_mode)) |
|
3636 tcg_gen_movi_tl(cpu_nip, (uint32_t)ctx->nip); |
|
3637 else |
|
3638 #endif |
|
3639 tcg_gen_movi_tl(cpu_nip, ctx->nip); |
|
3640 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) |
|
3641 gen_debug_exception(ctx); |
|
3642 tcg_gen_exit_tb(0); |
|
3643 } |
|
3644 } |
|
3645 |
|
3646 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW) |
|
3647 { |
|
3648 gen_bcond(ctx, BCOND_IM); |
|
3649 } |
|
3650 |
|
3651 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW) |
|
3652 { |
|
3653 gen_bcond(ctx, BCOND_CTR); |
|
3654 } |
|
3655 |
|
3656 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW) |
|
3657 { |
|
3658 gen_bcond(ctx, BCOND_LR); |
|
3659 } |
|
3660 |
|
3661 /*** Condition register logical ***/ |
|
3662 #define GEN_CRLOGIC(name, tcg_op, opc) \ |
|
3663 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) \ |
|
3664 { \ |
|
3665 uint8_t bitmask; \ |
|
3666 int sh; \ |
|
3667 TCGv_i32 t0, t1; \ |
|
3668 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \ |
|
3669 t0 = tcg_temp_new_i32(); \ |
|
3670 if (sh > 0) \ |
|
3671 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \ |
|
3672 else if (sh < 0) \ |
|
3673 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \ |
|
3674 else \ |
|
3675 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \ |
|
3676 t1 = tcg_temp_new_i32(); \ |
|
3677 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \ |
|
3678 if (sh > 0) \ |
|
3679 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \ |
|
3680 else if (sh < 0) \ |
|
3681 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \ |
|
3682 else \ |
|
3683 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \ |
|
3684 tcg_op(t0, t0, t1); \ |
|
3685 bitmask = 1 << (3 - (crbD(ctx->opcode) & 0x03)); \ |
|
3686 tcg_gen_andi_i32(t0, t0, bitmask); \ |
|
3687 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \ |
|
3688 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \ |
|
3689 tcg_temp_free_i32(t0); \ |
|
3690 tcg_temp_free_i32(t1); \ |
|
3691 } |
|
3692 |
|
3693 /* crand */ |
|
3694 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08); |
|
3695 /* crandc */ |
|
3696 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04); |
|
3697 /* creqv */ |
|
3698 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09); |
|
3699 /* crnand */ |
|
3700 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07); |
|
3701 /* crnor */ |
|
3702 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01); |
|
3703 /* cror */ |
|
3704 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E); |
|
3705 /* crorc */ |
|
3706 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D); |
|
3707 /* crxor */ |
|
3708 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06); |
|
3709 /* mcrf */ |
|
3710 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER) |
|
3711 { |
|
3712 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]); |
|
3713 } |
|
3714 |
|
3715 /*** System linkage ***/ |
|
3716 /* rfi (mem_idx only) */ |
|
3717 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW) |
|
3718 { |
|
3719 #if defined(CONFIG_USER_ONLY) |
|
3720 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
3721 #else |
|
3722 /* Restore CPU state */ |
|
3723 if (unlikely(!ctx->mem_idx)) { |
|
3724 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
3725 return; |
|
3726 } |
|
3727 gen_helper_rfi(); |
|
3728 gen_sync_exception(ctx); |
|
3729 #endif |
|
3730 } |
|
3731 |
|
3732 #if defined(TARGET_PPC64) |
|
3733 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B) |
|
3734 { |
|
3735 #if defined(CONFIG_USER_ONLY) |
|
3736 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
3737 #else |
|
3738 /* Restore CPU state */ |
|
3739 if (unlikely(!ctx->mem_idx)) { |
|
3740 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
3741 return; |
|
3742 } |
|
3743 gen_helper_rfid(); |
|
3744 gen_sync_exception(ctx); |
|
3745 #endif |
|
3746 } |
|
3747 |
|
3748 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H) |
|
3749 { |
|
3750 #if defined(CONFIG_USER_ONLY) |
|
3751 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
3752 #else |
|
3753 /* Restore CPU state */ |
|
3754 if (unlikely(ctx->mem_idx <= 1)) { |
|
3755 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
3756 return; |
|
3757 } |
|
3758 gen_helper_hrfid(); |
|
3759 gen_sync_exception(ctx); |
|
3760 #endif |
|
3761 } |
|
3762 #endif |
|
3763 |
|
3764 /* sc */ |
|
3765 #if defined(CONFIG_USER_ONLY) |
|
3766 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER |
|
3767 #else |
|
3768 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL |
|
3769 #endif |
|
3770 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW) |
|
3771 { |
|
3772 uint32_t lev; |
|
3773 |
|
3774 lev = (ctx->opcode >> 5) & 0x7F; |
|
3775 gen_exception_err(ctx, POWERPC_SYSCALL, lev); |
|
3776 } |
|
3777 |
|
3778 /*** Trap ***/ |
|
3779 /* tw */ |
|
3780 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW) |
|
3781 { |
|
3782 TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode)); |
|
3783 /* Update the nip since this might generate a trap exception */ |
|
3784 gen_update_nip(ctx, ctx->nip); |
|
3785 gen_helper_tw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); |
|
3786 tcg_temp_free_i32(t0); |
|
3787 } |
|
3788 |
|
3789 /* twi */ |
|
3790 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW) |
|
3791 { |
|
3792 TCGv t0 = tcg_const_tl(SIMM(ctx->opcode)); |
|
3793 TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode)); |
|
3794 /* Update the nip since this might generate a trap exception */ |
|
3795 gen_update_nip(ctx, ctx->nip); |
|
3796 gen_helper_tw(cpu_gpr[rA(ctx->opcode)], t0, t1); |
|
3797 tcg_temp_free(t0); |
|
3798 tcg_temp_free_i32(t1); |
|
3799 } |
|
3800 |
|
3801 #if defined(TARGET_PPC64) |
|
3802 /* td */ |
|
3803 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B) |
|
3804 { |
|
3805 TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode)); |
|
3806 /* Update the nip since this might generate a trap exception */ |
|
3807 gen_update_nip(ctx, ctx->nip); |
|
3808 gen_helper_td(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); |
|
3809 tcg_temp_free_i32(t0); |
|
3810 } |
|
3811 |
|
3812 /* tdi */ |
|
3813 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B) |
|
3814 { |
|
3815 TCGv t0 = tcg_const_tl(SIMM(ctx->opcode)); |
|
3816 TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode)); |
|
3817 /* Update the nip since this might generate a trap exception */ |
|
3818 gen_update_nip(ctx, ctx->nip); |
|
3819 gen_helper_td(cpu_gpr[rA(ctx->opcode)], t0, t1); |
|
3820 tcg_temp_free(t0); |
|
3821 tcg_temp_free_i32(t1); |
|
3822 } |
|
3823 #endif |
|
3824 |
|
3825 /*** Processor control ***/ |
|
3826 /* mcrxr */ |
|
3827 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC) |
|
3828 { |
|
3829 tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], cpu_xer); |
|
3830 tcg_gen_shri_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], XER_CA); |
|
3831 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_SO | 1 << XER_OV | 1 << XER_CA)); |
|
3832 } |
|
3833 |
|
3834 /* mfcr */ |
|
3835 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC) |
|
3836 { |
|
3837 uint32_t crm, crn; |
|
3838 |
|
3839 if (likely(ctx->opcode & 0x00100000)) { |
|
3840 crm = CRM(ctx->opcode); |
|
3841 if (likely((crm ^ (crm - 1)) == 0)) { |
|
3842 crn = ffs(crm); |
|
3843 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]); |
|
3844 } |
|
3845 } else { |
|
3846 gen_helper_load_cr(cpu_gpr[rD(ctx->opcode)]); |
|
3847 } |
|
3848 } |
|
3849 |
|
3850 /* mfmsr */ |
|
3851 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC) |
|
3852 { |
|
3853 #if defined(CONFIG_USER_ONLY) |
|
3854 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
3855 #else |
|
3856 if (unlikely(!ctx->mem_idx)) { |
|
3857 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
3858 return; |
|
3859 } |
|
3860 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr); |
|
3861 #endif |
|
3862 } |
|
3863 |
|
3864 #if 1 |
|
3865 #define SPR_NOACCESS ((void *)(-1UL)) |
|
3866 #else |
|
3867 static void spr_noaccess (void *opaque, int sprn) |
|
3868 { |
|
3869 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5); |
|
3870 printf("ERROR: try to access SPR %d !\n", sprn); |
|
3871 } |
|
3872 #define SPR_NOACCESS (&spr_noaccess) |
|
3873 #endif |
|
3874 |
|
3875 /* mfspr */ |
|
3876 static always_inline void gen_op_mfspr (DisasContext *ctx) |
|
3877 { |
|
3878 void (*read_cb)(void *opaque, int gprn, int sprn); |
|
3879 uint32_t sprn = SPR(ctx->opcode); |
|
3880 |
|
3881 #if !defined(CONFIG_USER_ONLY) |
|
3882 if (ctx->mem_idx == 2) |
|
3883 read_cb = ctx->spr_cb[sprn].hea_read; |
|
3884 else if (ctx->mem_idx) |
|
3885 read_cb = ctx->spr_cb[sprn].oea_read; |
|
3886 else |
|
3887 #endif |
|
3888 read_cb = ctx->spr_cb[sprn].uea_read; |
|
3889 if (likely(read_cb != NULL)) { |
|
3890 if (likely(read_cb != SPR_NOACCESS)) { |
|
3891 (*read_cb)(ctx, rD(ctx->opcode), sprn); |
|
3892 } else { |
|
3893 /* Privilege exception */ |
|
3894 /* This is a hack to avoid warnings when running Linux: |
|
3895 * this OS breaks the PowerPC virtualisation model, |
|
3896 * allowing userland application to read the PVR |
|
3897 */ |
|
3898 if (sprn != SPR_PVR) { |
|
3899 if (loglevel != 0) { |
|
3900 fprintf(logfile, "Trying to read privileged spr %d %03x at " |
|
3901 ADDRX "\n", sprn, sprn, ctx->nip); |
|
3902 } |
|
3903 printf("Trying to read privileged spr %d %03x at " ADDRX "\n", |
|
3904 sprn, sprn, ctx->nip); |
|
3905 } |
|
3906 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
3907 } |
|
3908 } else { |
|
3909 /* Not defined */ |
|
3910 if (loglevel != 0) { |
|
3911 fprintf(logfile, "Trying to read invalid spr %d %03x at " |
|
3912 ADDRX "\n", sprn, sprn, ctx->nip); |
|
3913 } |
|
3914 printf("Trying to read invalid spr %d %03x at " ADDRX "\n", |
|
3915 sprn, sprn, ctx->nip); |
|
3916 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_SPR); |
|
3917 } |
|
3918 } |
|
3919 |
|
3920 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC) |
|
3921 { |
|
3922 gen_op_mfspr(ctx); |
|
3923 } |
|
3924 |
|
3925 /* mftb */ |
|
3926 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB) |
|
3927 { |
|
3928 gen_op_mfspr(ctx); |
|
3929 } |
|
3930 |
|
3931 /* mtcrf */ |
|
3932 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC) |
|
3933 { |
|
3934 uint32_t crm, crn; |
|
3935 |
|
3936 crm = CRM(ctx->opcode); |
|
3937 if (likely((ctx->opcode & 0x00100000) || (crm ^ (crm - 1)) == 0)) { |
|
3938 TCGv_i32 temp = tcg_temp_new_i32(); |
|
3939 crn = ffs(crm); |
|
3940 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); |
|
3941 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4); |
|
3942 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf); |
|
3943 tcg_temp_free_i32(temp); |
|
3944 } else { |
|
3945 TCGv_i32 temp = tcg_const_i32(crm); |
|
3946 gen_helper_store_cr(cpu_gpr[rS(ctx->opcode)], temp); |
|
3947 tcg_temp_free_i32(temp); |
|
3948 } |
|
3949 } |
|
3950 |
|
3951 /* mtmsr */ |
|
3952 #if defined(TARGET_PPC64) |
|
3953 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B) |
|
3954 { |
|
3955 #if defined(CONFIG_USER_ONLY) |
|
3956 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
3957 #else |
|
3958 if (unlikely(!ctx->mem_idx)) { |
|
3959 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
3960 return; |
|
3961 } |
|
3962 if (ctx->opcode & 0x00010000) { |
|
3963 /* Special form that does not need any synchronisation */ |
|
3964 TCGv t0 = tcg_temp_new(); |
|
3965 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); |
|
3966 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE))); |
|
3967 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); |
|
3968 tcg_temp_free(t0); |
|
3969 } else { |
|
3970 /* XXX: we need to update nip before the store |
|
3971 * if we enter power saving mode, we will exit the loop |
|
3972 * directly from ppc_store_msr |
|
3973 */ |
|
3974 gen_update_nip(ctx, ctx->nip); |
|
3975 gen_helper_store_msr(cpu_gpr[rS(ctx->opcode)]); |
|
3976 /* Must stop the translation as machine state (may have) changed */ |
|
3977 /* Note that mtmsr is not always defined as context-synchronizing */ |
|
3978 gen_stop_exception(ctx); |
|
3979 } |
|
3980 #endif |
|
3981 } |
|
3982 #endif |
|
3983 |
|
3984 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001FF801, PPC_MISC) |
|
3985 { |
|
3986 #if defined(CONFIG_USER_ONLY) |
|
3987 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
3988 #else |
|
3989 if (unlikely(!ctx->mem_idx)) { |
|
3990 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
3991 return; |
|
3992 } |
|
3993 if (ctx->opcode & 0x00010000) { |
|
3994 /* Special form that does not need any synchronisation */ |
|
3995 TCGv t0 = tcg_temp_new(); |
|
3996 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE)); |
|
3997 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE))); |
|
3998 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); |
|
3999 tcg_temp_free(t0); |
|
4000 } else { |
|
4001 /* XXX: we need to update nip before the store |
|
4002 * if we enter power saving mode, we will exit the loop |
|
4003 * directly from ppc_store_msr |
|
4004 */ |
|
4005 gen_update_nip(ctx, ctx->nip); |
|
4006 #if defined(TARGET_PPC64) |
|
4007 if (!ctx->sf_mode) { |
|
4008 TCGv t0 = tcg_temp_new(); |
|
4009 TCGv t1 = tcg_temp_new(); |
|
4010 tcg_gen_andi_tl(t0, cpu_msr, 0xFFFFFFFF00000000ULL); |
|
4011 tcg_gen_ext32u_tl(t1, cpu_gpr[rS(ctx->opcode)]); |
|
4012 tcg_gen_or_tl(t0, t0, t1); |
|
4013 tcg_temp_free(t1); |
|
4014 gen_helper_store_msr(t0); |
|
4015 tcg_temp_free(t0); |
|
4016 } else |
|
4017 #endif |
|
4018 gen_helper_store_msr(cpu_gpr[rS(ctx->opcode)]); |
|
4019 /* Must stop the translation as machine state (may have) changed */ |
|
4020 /* Note that mtmsr is not always defined as context-synchronizing */ |
|
4021 gen_stop_exception(ctx); |
|
4022 } |
|
4023 #endif |
|
4024 } |
|
4025 |
|
4026 /* mtspr */ |
|
4027 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000001, PPC_MISC) |
|
4028 { |
|
4029 void (*write_cb)(void *opaque, int sprn, int gprn); |
|
4030 uint32_t sprn = SPR(ctx->opcode); |
|
4031 |
|
4032 #if !defined(CONFIG_USER_ONLY) |
|
4033 if (ctx->mem_idx == 2) |
|
4034 write_cb = ctx->spr_cb[sprn].hea_write; |
|
4035 else if (ctx->mem_idx) |
|
4036 write_cb = ctx->spr_cb[sprn].oea_write; |
|
4037 else |
|
4038 #endif |
|
4039 write_cb = ctx->spr_cb[sprn].uea_write; |
|
4040 if (likely(write_cb != NULL)) { |
|
4041 if (likely(write_cb != SPR_NOACCESS)) { |
|
4042 (*write_cb)(ctx, sprn, rS(ctx->opcode)); |
|
4043 } else { |
|
4044 /* Privilege exception */ |
|
4045 if (loglevel != 0) { |
|
4046 fprintf(logfile, "Trying to write privileged spr %d %03x at " |
|
4047 ADDRX "\n", sprn, sprn, ctx->nip); |
|
4048 } |
|
4049 printf("Trying to write privileged spr %d %03x at " ADDRX "\n", |
|
4050 sprn, sprn, ctx->nip); |
|
4051 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
4052 } |
|
4053 } else { |
|
4054 /* Not defined */ |
|
4055 if (loglevel != 0) { |
|
4056 fprintf(logfile, "Trying to write invalid spr %d %03x at " |
|
4057 ADDRX "\n", sprn, sprn, ctx->nip); |
|
4058 } |
|
4059 printf("Trying to write invalid spr %d %03x at " ADDRX "\n", |
|
4060 sprn, sprn, ctx->nip); |
|
4061 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_SPR); |
|
4062 } |
|
4063 } |
|
4064 |
|
4065 /*** Cache management ***/ |
|
4066 /* dcbf */ |
|
4067 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE) |
|
4068 { |
|
4069 /* XXX: specification says this is treated as a load by the MMU */ |
|
4070 TCGv t0; |
|
4071 gen_set_access_type(ctx, ACCESS_CACHE); |
|
4072 t0 = tcg_temp_new(); |
|
4073 gen_addr_reg_index(ctx, t0); |
|
4074 gen_qemu_ld8u(ctx, t0, t0); |
|
4075 tcg_temp_free(t0); |
|
4076 } |
|
4077 |
|
4078 /* dcbi (Supervisor only) */ |
|
4079 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE) |
|
4080 { |
|
4081 #if defined(CONFIG_USER_ONLY) |
|
4082 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
4083 #else |
|
4084 TCGv EA, val; |
|
4085 if (unlikely(!ctx->mem_idx)) { |
|
4086 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
4087 return; |
|
4088 } |
|
4089 EA = tcg_temp_new(); |
|
4090 gen_set_access_type(ctx, ACCESS_CACHE); |
|
4091 gen_addr_reg_index(ctx, EA); |
|
4092 val = tcg_temp_new(); |
|
4093 /* XXX: specification says this should be treated as a store by the MMU */ |
|
4094 gen_qemu_ld8u(ctx, val, EA); |
|
4095 gen_qemu_st8(ctx, val, EA); |
|
4096 tcg_temp_free(val); |
|
4097 tcg_temp_free(EA); |
|
4098 #endif |
|
4099 } |
|
4100 |
|
4101 /* dcdst */ |
|
4102 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE) |
|
4103 { |
|
4104 /* XXX: specification say this is treated as a load by the MMU */ |
|
4105 TCGv t0; |
|
4106 gen_set_access_type(ctx, ACCESS_CACHE); |
|
4107 t0 = tcg_temp_new(); |
|
4108 gen_addr_reg_index(ctx, t0); |
|
4109 gen_qemu_ld8u(ctx, t0, t0); |
|
4110 tcg_temp_free(t0); |
|
4111 } |
|
4112 |
|
4113 /* dcbt */ |
|
4114 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x02000001, PPC_CACHE) |
|
4115 { |
|
4116 /* interpreted as no-op */ |
|
4117 /* XXX: specification say this is treated as a load by the MMU |
|
4118 * but does not generate any exception |
|
4119 */ |
|
4120 } |
|
4121 |
|
4122 /* dcbtst */ |
|
4123 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x02000001, PPC_CACHE) |
|
4124 { |
|
4125 /* interpreted as no-op */ |
|
4126 /* XXX: specification say this is treated as a load by the MMU |
|
4127 * but does not generate any exception |
|
4128 */ |
|
4129 } |
|
4130 |
|
4131 /* dcbz */ |
|
4132 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03E00001, PPC_CACHE_DCBZ) |
|
4133 { |
|
4134 TCGv t0; |
|
4135 gen_set_access_type(ctx, ACCESS_CACHE); |
|
4136 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
4137 gen_update_nip(ctx, ctx->nip - 4); |
|
4138 t0 = tcg_temp_new(); |
|
4139 gen_addr_reg_index(ctx, t0); |
|
4140 gen_helper_dcbz(t0); |
|
4141 tcg_temp_free(t0); |
|
4142 } |
|
4143 |
|
4144 GEN_HANDLER2(dcbz_970, "dcbz", 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZT) |
|
4145 { |
|
4146 TCGv t0; |
|
4147 gen_set_access_type(ctx, ACCESS_CACHE); |
|
4148 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
4149 gen_update_nip(ctx, ctx->nip - 4); |
|
4150 t0 = tcg_temp_new(); |
|
4151 gen_addr_reg_index(ctx, t0); |
|
4152 if (ctx->opcode & 0x00200000) |
|
4153 gen_helper_dcbz(t0); |
|
4154 else |
|
4155 gen_helper_dcbz_970(t0); |
|
4156 tcg_temp_free(t0); |
|
4157 } |
|
4158 |
|
4159 /* icbi */ |
|
4160 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI) |
|
4161 { |
|
4162 TCGv t0; |
|
4163 gen_set_access_type(ctx, ACCESS_CACHE); |
|
4164 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
4165 gen_update_nip(ctx, ctx->nip - 4); |
|
4166 t0 = tcg_temp_new(); |
|
4167 gen_addr_reg_index(ctx, t0); |
|
4168 gen_helper_icbi(t0); |
|
4169 tcg_temp_free(t0); |
|
4170 } |
|
4171 |
|
4172 /* Optional: */ |
|
4173 /* dcba */ |
|
4174 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA) |
|
4175 { |
|
4176 /* interpreted as no-op */ |
|
4177 /* XXX: specification say this is treated as a store by the MMU |
|
4178 * but does not generate any exception |
|
4179 */ |
|
4180 } |
|
4181 |
|
4182 /*** Segment register manipulation ***/ |
|
4183 /* Supervisor only: */ |
|
4184 /* mfsr */ |
|
4185 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT) |
|
4186 { |
|
4187 #if defined(CONFIG_USER_ONLY) |
|
4188 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
4189 #else |
|
4190 TCGv t0; |
|
4191 if (unlikely(!ctx->mem_idx)) { |
|
4192 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
4193 return; |
|
4194 } |
|
4195 t0 = tcg_const_tl(SR(ctx->opcode)); |
|
4196 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0); |
|
4197 tcg_temp_free(t0); |
|
4198 #endif |
|
4199 } |
|
4200 |
|
4201 /* mfsrin */ |
|
4202 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT) |
|
4203 { |
|
4204 #if defined(CONFIG_USER_ONLY) |
|
4205 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
4206 #else |
|
4207 TCGv t0; |
|
4208 if (unlikely(!ctx->mem_idx)) { |
|
4209 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
4210 return; |
|
4211 } |
|
4212 t0 = tcg_temp_new(); |
|
4213 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28); |
|
4214 tcg_gen_andi_tl(t0, t0, 0xF); |
|
4215 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], t0); |
|
4216 tcg_temp_free(t0); |
|
4217 #endif |
|
4218 } |
|
4219 |
|
4220 /* mtsr */ |
|
4221 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT) |
|
4222 { |
|
4223 #if defined(CONFIG_USER_ONLY) |
|
4224 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
4225 #else |
|
4226 TCGv t0; |
|
4227 if (unlikely(!ctx->mem_idx)) { |
|
4228 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
4229 return; |
|
4230 } |
|
4231 t0 = tcg_const_tl(SR(ctx->opcode)); |
|
4232 gen_helper_store_sr(t0, cpu_gpr[rS(ctx->opcode)]); |
|
4233 tcg_temp_free(t0); |
|
4234 #endif |
|
4235 } |
|
4236 |
|
4237 /* mtsrin */ |
|
4238 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT) |
|
4239 { |
|
4240 #if defined(CONFIG_USER_ONLY) |
|
4241 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
4242 #else |
|
4243 TCGv t0; |
|
4244 if (unlikely(!ctx->mem_idx)) { |
|
4245 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
4246 return; |
|
4247 } |
|
4248 t0 = tcg_temp_new(); |
|
4249 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28); |
|
4250 tcg_gen_andi_tl(t0, t0, 0xF); |
|
4251 gen_helper_store_sr(t0, cpu_gpr[rD(ctx->opcode)]); |
|
4252 tcg_temp_free(t0); |
|
4253 #endif |
|
4254 } |
|
4255 |
|
4256 #if defined(TARGET_PPC64) |
|
4257 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */ |
|
4258 /* mfsr */ |
|
4259 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B) |
|
4260 { |
|
4261 #if defined(CONFIG_USER_ONLY) |
|
4262 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
4263 #else |
|
4264 TCGv t0; |
|
4265 if (unlikely(!ctx->mem_idx)) { |
|
4266 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
4267 return; |
|
4268 } |
|
4269 t0 = tcg_const_tl(SR(ctx->opcode)); |
|
4270 gen_helper_load_slb(cpu_gpr[rD(ctx->opcode)], t0); |
|
4271 tcg_temp_free(t0); |
|
4272 #endif |
|
4273 } |
|
4274 |
|
4275 /* mfsrin */ |
|
4276 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001, |
|
4277 PPC_SEGMENT_64B) |
|
4278 { |
|
4279 #if defined(CONFIG_USER_ONLY) |
|
4280 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
4281 #else |
|
4282 TCGv t0; |
|
4283 if (unlikely(!ctx->mem_idx)) { |
|
4284 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
4285 return; |
|
4286 } |
|
4287 t0 = tcg_temp_new(); |
|
4288 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28); |
|
4289 tcg_gen_andi_tl(t0, t0, 0xF); |
|
4290 gen_helper_load_slb(cpu_gpr[rD(ctx->opcode)], t0); |
|
4291 tcg_temp_free(t0); |
|
4292 #endif |
|
4293 } |
|
4294 |
|
4295 /* mtsr */ |
|
4296 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B) |
|
4297 { |
|
4298 #if defined(CONFIG_USER_ONLY) |
|
4299 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
4300 #else |
|
4301 TCGv t0; |
|
4302 if (unlikely(!ctx->mem_idx)) { |
|
4303 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
4304 return; |
|
4305 } |
|
4306 t0 = tcg_const_tl(SR(ctx->opcode)); |
|
4307 gen_helper_store_slb(t0, cpu_gpr[rS(ctx->opcode)]); |
|
4308 tcg_temp_free(t0); |
|
4309 #endif |
|
4310 } |
|
4311 |
|
4312 /* mtsrin */ |
|
4313 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001, |
|
4314 PPC_SEGMENT_64B) |
|
4315 { |
|
4316 #if defined(CONFIG_USER_ONLY) |
|
4317 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
4318 #else |
|
4319 TCGv t0; |
|
4320 if (unlikely(!ctx->mem_idx)) { |
|
4321 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
4322 return; |
|
4323 } |
|
4324 t0 = tcg_temp_new(); |
|
4325 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28); |
|
4326 tcg_gen_andi_tl(t0, t0, 0xF); |
|
4327 gen_helper_store_slb(t0, cpu_gpr[rS(ctx->opcode)]); |
|
4328 tcg_temp_free(t0); |
|
4329 #endif |
|
4330 } |
|
4331 #endif /* defined(TARGET_PPC64) */ |
|
4332 |
|
4333 /*** Lookaside buffer management ***/ |
|
4334 /* Optional & mem_idx only: */ |
|
4335 /* tlbia */ |
|
4336 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA) |
|
4337 { |
|
4338 #if defined(CONFIG_USER_ONLY) |
|
4339 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
4340 #else |
|
4341 if (unlikely(!ctx->mem_idx)) { |
|
4342 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
4343 return; |
|
4344 } |
|
4345 gen_helper_tlbia(); |
|
4346 #endif |
|
4347 } |
|
4348 |
|
4349 /* tlbie */ |
|
4350 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x03FF0001, PPC_MEM_TLBIE) |
|
4351 { |
|
4352 #if defined(CONFIG_USER_ONLY) |
|
4353 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
4354 #else |
|
4355 if (unlikely(!ctx->mem_idx)) { |
|
4356 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
4357 return; |
|
4358 } |
|
4359 #if defined(TARGET_PPC64) |
|
4360 if (!ctx->sf_mode) { |
|
4361 TCGv t0 = tcg_temp_new(); |
|
4362 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]); |
|
4363 gen_helper_tlbie(t0); |
|
4364 tcg_temp_free(t0); |
|
4365 } else |
|
4366 #endif |
|
4367 gen_helper_tlbie(cpu_gpr[rB(ctx->opcode)]); |
|
4368 #endif |
|
4369 } |
|
4370 |
|
4371 /* tlbsync */ |
|
4372 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC) |
|
4373 { |
|
4374 #if defined(CONFIG_USER_ONLY) |
|
4375 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
4376 #else |
|
4377 if (unlikely(!ctx->mem_idx)) { |
|
4378 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
4379 return; |
|
4380 } |
|
4381 /* This has no effect: it should ensure that all previous |
|
4382 * tlbie have completed |
|
4383 */ |
|
4384 gen_stop_exception(ctx); |
|
4385 #endif |
|
4386 } |
|
4387 |
|
4388 #if defined(TARGET_PPC64) |
|
4389 /* slbia */ |
|
4390 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x03FFFC01, PPC_SLBI) |
|
4391 { |
|
4392 #if defined(CONFIG_USER_ONLY) |
|
4393 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
4394 #else |
|
4395 if (unlikely(!ctx->mem_idx)) { |
|
4396 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
4397 return; |
|
4398 } |
|
4399 gen_helper_slbia(); |
|
4400 #endif |
|
4401 } |
|
4402 |
|
4403 /* slbie */ |
|
4404 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI) |
|
4405 { |
|
4406 #if defined(CONFIG_USER_ONLY) |
|
4407 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
4408 #else |
|
4409 if (unlikely(!ctx->mem_idx)) { |
|
4410 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
4411 return; |
|
4412 } |
|
4413 gen_helper_slbie(cpu_gpr[rB(ctx->opcode)]); |
|
4414 #endif |
|
4415 } |
|
4416 #endif |
|
4417 |
|
4418 /*** External control ***/ |
|
4419 /* Optional: */ |
|
4420 /* eciwx */ |
|
4421 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN) |
|
4422 { |
|
4423 TCGv t0; |
|
4424 /* Should check EAR[E] ! */ |
|
4425 gen_set_access_type(ctx, ACCESS_EXT); |
|
4426 t0 = tcg_temp_new(); |
|
4427 gen_addr_reg_index(ctx, t0); |
|
4428 gen_check_align(ctx, t0, 0x03); |
|
4429 gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], t0); |
|
4430 tcg_temp_free(t0); |
|
4431 } |
|
4432 |
|
4433 /* ecowx */ |
|
4434 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN) |
|
4435 { |
|
4436 TCGv t0; |
|
4437 /* Should check EAR[E] ! */ |
|
4438 gen_set_access_type(ctx, ACCESS_EXT); |
|
4439 t0 = tcg_temp_new(); |
|
4440 gen_addr_reg_index(ctx, t0); |
|
4441 gen_check_align(ctx, t0, 0x03); |
|
4442 gen_qemu_st32(ctx, cpu_gpr[rD(ctx->opcode)], t0); |
|
4443 tcg_temp_free(t0); |
|
4444 } |
|
4445 |
|
4446 /* PowerPC 601 specific instructions */ |
|
4447 /* abs - abs. */ |
|
4448 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR) |
|
4449 { |
|
4450 int l1 = gen_new_label(); |
|
4451 int l2 = gen_new_label(); |
|
4452 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1); |
|
4453 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); |
|
4454 tcg_gen_br(l2); |
|
4455 gen_set_label(l1); |
|
4456 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); |
|
4457 gen_set_label(l2); |
|
4458 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4459 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); |
|
4460 } |
|
4461 |
|
4462 /* abso - abso. */ |
|
4463 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR) |
|
4464 { |
|
4465 int l1 = gen_new_label(); |
|
4466 int l2 = gen_new_label(); |
|
4467 int l3 = gen_new_label(); |
|
4468 /* Start with XER OV disabled, the most likely case */ |
|
4469 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV)); |
|
4470 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2); |
|
4471 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1); |
|
4472 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO)); |
|
4473 tcg_gen_br(l2); |
|
4474 gen_set_label(l1); |
|
4475 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); |
|
4476 tcg_gen_br(l3); |
|
4477 gen_set_label(l2); |
|
4478 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); |
|
4479 gen_set_label(l3); |
|
4480 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4481 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); |
|
4482 } |
|
4483 |
|
4484 /* clcs */ |
|
4485 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR) |
|
4486 { |
|
4487 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode)); |
|
4488 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], t0); |
|
4489 tcg_temp_free_i32(t0); |
|
4490 /* Rc=1 sets CR0 to an undefined state */ |
|
4491 } |
|
4492 |
|
4493 /* div - div. */ |
|
4494 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR) |
|
4495 { |
|
4496 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); |
|
4497 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4498 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); |
|
4499 } |
|
4500 |
|
4501 /* divo - divo. */ |
|
4502 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR) |
|
4503 { |
|
4504 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); |
|
4505 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4506 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); |
|
4507 } |
|
4508 |
|
4509 /* divs - divs. */ |
|
4510 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR) |
|
4511 { |
|
4512 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); |
|
4513 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4514 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); |
|
4515 } |
|
4516 |
|
4517 /* divso - divso. */ |
|
4518 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR) |
|
4519 { |
|
4520 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); |
|
4521 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4522 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); |
|
4523 } |
|
4524 |
|
4525 /* doz - doz. */ |
|
4526 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR) |
|
4527 { |
|
4528 int l1 = gen_new_label(); |
|
4529 int l2 = gen_new_label(); |
|
4530 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); |
|
4531 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); |
|
4532 tcg_gen_br(l2); |
|
4533 gen_set_label(l1); |
|
4534 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); |
|
4535 gen_set_label(l2); |
|
4536 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4537 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); |
|
4538 } |
|
4539 |
|
4540 /* dozo - dozo. */ |
|
4541 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR) |
|
4542 { |
|
4543 int l1 = gen_new_label(); |
|
4544 int l2 = gen_new_label(); |
|
4545 TCGv t0 = tcg_temp_new(); |
|
4546 TCGv t1 = tcg_temp_new(); |
|
4547 TCGv t2 = tcg_temp_new(); |
|
4548 /* Start with XER OV disabled, the most likely case */ |
|
4549 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV)); |
|
4550 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1); |
|
4551 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); |
|
4552 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); |
|
4553 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0); |
|
4554 tcg_gen_andc_tl(t1, t1, t2); |
|
4555 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); |
|
4556 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); |
|
4557 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO)); |
|
4558 tcg_gen_br(l2); |
|
4559 gen_set_label(l1); |
|
4560 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); |
|
4561 gen_set_label(l2); |
|
4562 tcg_temp_free(t0); |
|
4563 tcg_temp_free(t1); |
|
4564 tcg_temp_free(t2); |
|
4565 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4566 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); |
|
4567 } |
|
4568 |
|
4569 /* dozi */ |
|
4570 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR) |
|
4571 { |
|
4572 target_long simm = SIMM(ctx->opcode); |
|
4573 int l1 = gen_new_label(); |
|
4574 int l2 = gen_new_label(); |
|
4575 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1); |
|
4576 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]); |
|
4577 tcg_gen_br(l2); |
|
4578 gen_set_label(l1); |
|
4579 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); |
|
4580 gen_set_label(l2); |
|
4581 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4582 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); |
|
4583 } |
|
4584 |
|
4585 /* lscbx - lscbx. */ |
|
4586 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR) |
|
4587 { |
|
4588 TCGv t0 = tcg_temp_new(); |
|
4589 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode)); |
|
4590 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode)); |
|
4591 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode)); |
|
4592 |
|
4593 gen_addr_reg_index(ctx, t0); |
|
4594 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
4595 gen_update_nip(ctx, ctx->nip - 4); |
|
4596 gen_helper_lscbx(t0, t0, t1, t2, t3); |
|
4597 tcg_temp_free_i32(t1); |
|
4598 tcg_temp_free_i32(t2); |
|
4599 tcg_temp_free_i32(t3); |
|
4600 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F); |
|
4601 tcg_gen_or_tl(cpu_xer, cpu_xer, t0); |
|
4602 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4603 gen_set_Rc0(ctx, t0); |
|
4604 tcg_temp_free(t0); |
|
4605 } |
|
4606 |
|
4607 /* maskg - maskg. */ |
|
4608 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR) |
|
4609 { |
|
4610 int l1 = gen_new_label(); |
|
4611 TCGv t0 = tcg_temp_new(); |
|
4612 TCGv t1 = tcg_temp_new(); |
|
4613 TCGv t2 = tcg_temp_new(); |
|
4614 TCGv t3 = tcg_temp_new(); |
|
4615 tcg_gen_movi_tl(t3, 0xFFFFFFFF); |
|
4616 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); |
|
4617 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F); |
|
4618 tcg_gen_addi_tl(t2, t0, 1); |
|
4619 tcg_gen_shr_tl(t2, t3, t2); |
|
4620 tcg_gen_shr_tl(t3, t3, t1); |
|
4621 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3); |
|
4622 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1); |
|
4623 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); |
|
4624 gen_set_label(l1); |
|
4625 tcg_temp_free(t0); |
|
4626 tcg_temp_free(t1); |
|
4627 tcg_temp_free(t2); |
|
4628 tcg_temp_free(t3); |
|
4629 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4630 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
4631 } |
|
4632 |
|
4633 /* maskir - maskir. */ |
|
4634 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR) |
|
4635 { |
|
4636 TCGv t0 = tcg_temp_new(); |
|
4637 TCGv t1 = tcg_temp_new(); |
|
4638 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); |
|
4639 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); |
|
4640 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); |
|
4641 tcg_temp_free(t0); |
|
4642 tcg_temp_free(t1); |
|
4643 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4644 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
4645 } |
|
4646 |
|
4647 /* mul - mul. */ |
|
4648 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR) |
|
4649 { |
|
4650 TCGv_i64 t0 = tcg_temp_new_i64(); |
|
4651 TCGv_i64 t1 = tcg_temp_new_i64(); |
|
4652 TCGv t2 = tcg_temp_new(); |
|
4653 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); |
|
4654 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); |
|
4655 tcg_gen_mul_i64(t0, t0, t1); |
|
4656 tcg_gen_trunc_i64_tl(t2, t0); |
|
4657 gen_store_spr(SPR_MQ, t2); |
|
4658 tcg_gen_shri_i64(t1, t0, 32); |
|
4659 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); |
|
4660 tcg_temp_free_i64(t0); |
|
4661 tcg_temp_free_i64(t1); |
|
4662 tcg_temp_free(t2); |
|
4663 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4664 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); |
|
4665 } |
|
4666 |
|
4667 /* mulo - mulo. */ |
|
4668 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR) |
|
4669 { |
|
4670 int l1 = gen_new_label(); |
|
4671 TCGv_i64 t0 = tcg_temp_new_i64(); |
|
4672 TCGv_i64 t1 = tcg_temp_new_i64(); |
|
4673 TCGv t2 = tcg_temp_new(); |
|
4674 /* Start with XER OV disabled, the most likely case */ |
|
4675 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV)); |
|
4676 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); |
|
4677 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); |
|
4678 tcg_gen_mul_i64(t0, t0, t1); |
|
4679 tcg_gen_trunc_i64_tl(t2, t0); |
|
4680 gen_store_spr(SPR_MQ, t2); |
|
4681 tcg_gen_shri_i64(t1, t0, 32); |
|
4682 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); |
|
4683 tcg_gen_ext32s_i64(t1, t0); |
|
4684 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1); |
|
4685 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO)); |
|
4686 gen_set_label(l1); |
|
4687 tcg_temp_free_i64(t0); |
|
4688 tcg_temp_free_i64(t1); |
|
4689 tcg_temp_free(t2); |
|
4690 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4691 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); |
|
4692 } |
|
4693 |
|
4694 /* nabs - nabs. */ |
|
4695 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR) |
|
4696 { |
|
4697 int l1 = gen_new_label(); |
|
4698 int l2 = gen_new_label(); |
|
4699 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); |
|
4700 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); |
|
4701 tcg_gen_br(l2); |
|
4702 gen_set_label(l1); |
|
4703 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); |
|
4704 gen_set_label(l2); |
|
4705 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4706 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); |
|
4707 } |
|
4708 |
|
4709 /* nabso - nabso. */ |
|
4710 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR) |
|
4711 { |
|
4712 int l1 = gen_new_label(); |
|
4713 int l2 = gen_new_label(); |
|
4714 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1); |
|
4715 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); |
|
4716 tcg_gen_br(l2); |
|
4717 gen_set_label(l1); |
|
4718 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); |
|
4719 gen_set_label(l2); |
|
4720 /* nabs never overflows */ |
|
4721 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV)); |
|
4722 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4723 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); |
|
4724 } |
|
4725 |
|
4726 /* rlmi - rlmi. */ |
|
4727 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR) |
|
4728 { |
|
4729 uint32_t mb = MB(ctx->opcode); |
|
4730 uint32_t me = ME(ctx->opcode); |
|
4731 TCGv t0 = tcg_temp_new(); |
|
4732 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); |
|
4733 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); |
|
4734 tcg_gen_andi_tl(t0, t0, MASK(mb, me)); |
|
4735 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me)); |
|
4736 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0); |
|
4737 tcg_temp_free(t0); |
|
4738 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4739 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
4740 } |
|
4741 |
|
4742 /* rrib - rrib. */ |
|
4743 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR) |
|
4744 { |
|
4745 TCGv t0 = tcg_temp_new(); |
|
4746 TCGv t1 = tcg_temp_new(); |
|
4747 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); |
|
4748 tcg_gen_movi_tl(t1, 0x80000000); |
|
4749 tcg_gen_shr_tl(t1, t1, t0); |
|
4750 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); |
|
4751 tcg_gen_and_tl(t0, t0, t1); |
|
4752 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1); |
|
4753 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); |
|
4754 tcg_temp_free(t0); |
|
4755 tcg_temp_free(t1); |
|
4756 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4757 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
4758 } |
|
4759 |
|
4760 /* sle - sle. */ |
|
4761 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR) |
|
4762 { |
|
4763 TCGv t0 = tcg_temp_new(); |
|
4764 TCGv t1 = tcg_temp_new(); |
|
4765 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); |
|
4766 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); |
|
4767 tcg_gen_subfi_tl(t1, 32, t1); |
|
4768 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); |
|
4769 tcg_gen_or_tl(t1, t0, t1); |
|
4770 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); |
|
4771 gen_store_spr(SPR_MQ, t1); |
|
4772 tcg_temp_free(t0); |
|
4773 tcg_temp_free(t1); |
|
4774 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4775 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
4776 } |
|
4777 |
|
4778 /* sleq - sleq. */ |
|
4779 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR) |
|
4780 { |
|
4781 TCGv t0 = tcg_temp_new(); |
|
4782 TCGv t1 = tcg_temp_new(); |
|
4783 TCGv t2 = tcg_temp_new(); |
|
4784 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); |
|
4785 tcg_gen_movi_tl(t2, 0xFFFFFFFF); |
|
4786 tcg_gen_shl_tl(t2, t2, t0); |
|
4787 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); |
|
4788 gen_load_spr(t1, SPR_MQ); |
|
4789 gen_store_spr(SPR_MQ, t0); |
|
4790 tcg_gen_and_tl(t0, t0, t2); |
|
4791 tcg_gen_andc_tl(t1, t1, t2); |
|
4792 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); |
|
4793 tcg_temp_free(t0); |
|
4794 tcg_temp_free(t1); |
|
4795 tcg_temp_free(t2); |
|
4796 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4797 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
4798 } |
|
4799 |
|
4800 /* sliq - sliq. */ |
|
4801 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR) |
|
4802 { |
|
4803 int sh = SH(ctx->opcode); |
|
4804 TCGv t0 = tcg_temp_new(); |
|
4805 TCGv t1 = tcg_temp_new(); |
|
4806 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); |
|
4807 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); |
|
4808 tcg_gen_or_tl(t1, t0, t1); |
|
4809 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); |
|
4810 gen_store_spr(SPR_MQ, t1); |
|
4811 tcg_temp_free(t0); |
|
4812 tcg_temp_free(t1); |
|
4813 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4814 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
4815 } |
|
4816 |
|
4817 /* slliq - slliq. */ |
|
4818 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR) |
|
4819 { |
|
4820 int sh = SH(ctx->opcode); |
|
4821 TCGv t0 = tcg_temp_new(); |
|
4822 TCGv t1 = tcg_temp_new(); |
|
4823 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); |
|
4824 gen_load_spr(t1, SPR_MQ); |
|
4825 gen_store_spr(SPR_MQ, t0); |
|
4826 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh)); |
|
4827 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh)); |
|
4828 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); |
|
4829 tcg_temp_free(t0); |
|
4830 tcg_temp_free(t1); |
|
4831 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4832 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
4833 } |
|
4834 |
|
4835 /* sllq - sllq. */ |
|
4836 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR) |
|
4837 { |
|
4838 int l1 = gen_new_label(); |
|
4839 int l2 = gen_new_label(); |
|
4840 TCGv t0 = tcg_temp_local_new(); |
|
4841 TCGv t1 = tcg_temp_local_new(); |
|
4842 TCGv t2 = tcg_temp_local_new(); |
|
4843 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); |
|
4844 tcg_gen_movi_tl(t1, 0xFFFFFFFF); |
|
4845 tcg_gen_shl_tl(t1, t1, t2); |
|
4846 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); |
|
4847 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); |
|
4848 gen_load_spr(t0, SPR_MQ); |
|
4849 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); |
|
4850 tcg_gen_br(l2); |
|
4851 gen_set_label(l1); |
|
4852 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); |
|
4853 gen_load_spr(t2, SPR_MQ); |
|
4854 tcg_gen_andc_tl(t1, t2, t1); |
|
4855 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); |
|
4856 gen_set_label(l2); |
|
4857 tcg_temp_free(t0); |
|
4858 tcg_temp_free(t1); |
|
4859 tcg_temp_free(t2); |
|
4860 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4861 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
4862 } |
|
4863 |
|
4864 /* slq - slq. */ |
|
4865 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR) |
|
4866 { |
|
4867 int l1 = gen_new_label(); |
|
4868 TCGv t0 = tcg_temp_new(); |
|
4869 TCGv t1 = tcg_temp_new(); |
|
4870 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); |
|
4871 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); |
|
4872 tcg_gen_subfi_tl(t1, 32, t1); |
|
4873 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); |
|
4874 tcg_gen_or_tl(t1, t0, t1); |
|
4875 gen_store_spr(SPR_MQ, t1); |
|
4876 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); |
|
4877 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); |
|
4878 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); |
|
4879 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); |
|
4880 gen_set_label(l1); |
|
4881 tcg_temp_free(t0); |
|
4882 tcg_temp_free(t1); |
|
4883 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4884 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
4885 } |
|
4886 |
|
4887 /* sraiq - sraiq. */ |
|
4888 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR) |
|
4889 { |
|
4890 int sh = SH(ctx->opcode); |
|
4891 int l1 = gen_new_label(); |
|
4892 TCGv t0 = tcg_temp_new(); |
|
4893 TCGv t1 = tcg_temp_new(); |
|
4894 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); |
|
4895 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); |
|
4896 tcg_gen_or_tl(t0, t0, t1); |
|
4897 gen_store_spr(SPR_MQ, t0); |
|
4898 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA)); |
|
4899 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); |
|
4900 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1); |
|
4901 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_CA)); |
|
4902 gen_set_label(l1); |
|
4903 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh); |
|
4904 tcg_temp_free(t0); |
|
4905 tcg_temp_free(t1); |
|
4906 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4907 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
4908 } |
|
4909 |
|
4910 /* sraq - sraq. */ |
|
4911 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR) |
|
4912 { |
|
4913 int l1 = gen_new_label(); |
|
4914 int l2 = gen_new_label(); |
|
4915 TCGv t0 = tcg_temp_new(); |
|
4916 TCGv t1 = tcg_temp_local_new(); |
|
4917 TCGv t2 = tcg_temp_local_new(); |
|
4918 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); |
|
4919 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); |
|
4920 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2); |
|
4921 tcg_gen_subfi_tl(t2, 32, t2); |
|
4922 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2); |
|
4923 tcg_gen_or_tl(t0, t0, t2); |
|
4924 gen_store_spr(SPR_MQ, t0); |
|
4925 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); |
|
4926 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1); |
|
4927 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]); |
|
4928 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31); |
|
4929 gen_set_label(l1); |
|
4930 tcg_temp_free(t0); |
|
4931 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1); |
|
4932 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA)); |
|
4933 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); |
|
4934 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2); |
|
4935 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_CA)); |
|
4936 gen_set_label(l2); |
|
4937 tcg_temp_free(t1); |
|
4938 tcg_temp_free(t2); |
|
4939 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4940 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
4941 } |
|
4942 |
|
4943 /* sre - sre. */ |
|
4944 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR) |
|
4945 { |
|
4946 TCGv t0 = tcg_temp_new(); |
|
4947 TCGv t1 = tcg_temp_new(); |
|
4948 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); |
|
4949 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); |
|
4950 tcg_gen_subfi_tl(t1, 32, t1); |
|
4951 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); |
|
4952 tcg_gen_or_tl(t1, t0, t1); |
|
4953 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); |
|
4954 gen_store_spr(SPR_MQ, t1); |
|
4955 tcg_temp_free(t0); |
|
4956 tcg_temp_free(t1); |
|
4957 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4958 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
4959 } |
|
4960 |
|
4961 /* srea - srea. */ |
|
4962 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR) |
|
4963 { |
|
4964 TCGv t0 = tcg_temp_new(); |
|
4965 TCGv t1 = tcg_temp_new(); |
|
4966 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); |
|
4967 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); |
|
4968 gen_store_spr(SPR_MQ, t0); |
|
4969 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1); |
|
4970 tcg_temp_free(t0); |
|
4971 tcg_temp_free(t1); |
|
4972 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4973 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
4974 } |
|
4975 |
|
4976 /* sreq */ |
|
4977 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR) |
|
4978 { |
|
4979 TCGv t0 = tcg_temp_new(); |
|
4980 TCGv t1 = tcg_temp_new(); |
|
4981 TCGv t2 = tcg_temp_new(); |
|
4982 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); |
|
4983 tcg_gen_movi_tl(t1, 0xFFFFFFFF); |
|
4984 tcg_gen_shr_tl(t1, t1, t0); |
|
4985 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); |
|
4986 gen_load_spr(t2, SPR_MQ); |
|
4987 gen_store_spr(SPR_MQ, t0); |
|
4988 tcg_gen_and_tl(t0, t0, t1); |
|
4989 tcg_gen_andc_tl(t2, t2, t1); |
|
4990 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); |
|
4991 tcg_temp_free(t0); |
|
4992 tcg_temp_free(t1); |
|
4993 tcg_temp_free(t2); |
|
4994 if (unlikely(Rc(ctx->opcode) != 0)) |
|
4995 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
4996 } |
|
4997 |
|
4998 /* sriq */ |
|
4999 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR) |
|
5000 { |
|
5001 int sh = SH(ctx->opcode); |
|
5002 TCGv t0 = tcg_temp_new(); |
|
5003 TCGv t1 = tcg_temp_new(); |
|
5004 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); |
|
5005 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); |
|
5006 tcg_gen_or_tl(t1, t0, t1); |
|
5007 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); |
|
5008 gen_store_spr(SPR_MQ, t1); |
|
5009 tcg_temp_free(t0); |
|
5010 tcg_temp_free(t1); |
|
5011 if (unlikely(Rc(ctx->opcode) != 0)) |
|
5012 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
5013 } |
|
5014 |
|
5015 /* srliq */ |
|
5016 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR) |
|
5017 { |
|
5018 int sh = SH(ctx->opcode); |
|
5019 TCGv t0 = tcg_temp_new(); |
|
5020 TCGv t1 = tcg_temp_new(); |
|
5021 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); |
|
5022 gen_load_spr(t1, SPR_MQ); |
|
5023 gen_store_spr(SPR_MQ, t0); |
|
5024 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh)); |
|
5025 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh)); |
|
5026 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); |
|
5027 tcg_temp_free(t0); |
|
5028 tcg_temp_free(t1); |
|
5029 if (unlikely(Rc(ctx->opcode) != 0)) |
|
5030 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
5031 } |
|
5032 |
|
5033 /* srlq */ |
|
5034 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR) |
|
5035 { |
|
5036 int l1 = gen_new_label(); |
|
5037 int l2 = gen_new_label(); |
|
5038 TCGv t0 = tcg_temp_local_new(); |
|
5039 TCGv t1 = tcg_temp_local_new(); |
|
5040 TCGv t2 = tcg_temp_local_new(); |
|
5041 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); |
|
5042 tcg_gen_movi_tl(t1, 0xFFFFFFFF); |
|
5043 tcg_gen_shr_tl(t2, t1, t2); |
|
5044 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); |
|
5045 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); |
|
5046 gen_load_spr(t0, SPR_MQ); |
|
5047 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); |
|
5048 tcg_gen_br(l2); |
|
5049 gen_set_label(l1); |
|
5050 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); |
|
5051 tcg_gen_and_tl(t0, t0, t2); |
|
5052 gen_load_spr(t1, SPR_MQ); |
|
5053 tcg_gen_andc_tl(t1, t1, t2); |
|
5054 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); |
|
5055 gen_set_label(l2); |
|
5056 tcg_temp_free(t0); |
|
5057 tcg_temp_free(t1); |
|
5058 tcg_temp_free(t2); |
|
5059 if (unlikely(Rc(ctx->opcode) != 0)) |
|
5060 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
5061 } |
|
5062 |
|
5063 /* srq */ |
|
5064 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR) |
|
5065 { |
|
5066 int l1 = gen_new_label(); |
|
5067 TCGv t0 = tcg_temp_new(); |
|
5068 TCGv t1 = tcg_temp_new(); |
|
5069 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); |
|
5070 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); |
|
5071 tcg_gen_subfi_tl(t1, 32, t1); |
|
5072 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); |
|
5073 tcg_gen_or_tl(t1, t0, t1); |
|
5074 gen_store_spr(SPR_MQ, t1); |
|
5075 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); |
|
5076 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); |
|
5077 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); |
|
5078 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); |
|
5079 gen_set_label(l1); |
|
5080 tcg_temp_free(t0); |
|
5081 tcg_temp_free(t1); |
|
5082 if (unlikely(Rc(ctx->opcode) != 0)) |
|
5083 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); |
|
5084 } |
|
5085 |
|
5086 /* PowerPC 602 specific instructions */ |
|
5087 /* dsa */ |
|
5088 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC) |
|
5089 { |
|
5090 /* XXX: TODO */ |
|
5091 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); |
|
5092 } |
|
5093 |
|
5094 /* esa */ |
|
5095 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC) |
|
5096 { |
|
5097 /* XXX: TODO */ |
|
5098 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); |
|
5099 } |
|
5100 |
|
5101 /* mfrom */ |
|
5102 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC) |
|
5103 { |
|
5104 #if defined(CONFIG_USER_ONLY) |
|
5105 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5106 #else |
|
5107 if (unlikely(!ctx->mem_idx)) { |
|
5108 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5109 return; |
|
5110 } |
|
5111 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); |
|
5112 #endif |
|
5113 } |
|
5114 |
|
5115 /* 602 - 603 - G2 TLB management */ |
|
5116 /* tlbld */ |
|
5117 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB) |
|
5118 { |
|
5119 #if defined(CONFIG_USER_ONLY) |
|
5120 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5121 #else |
|
5122 if (unlikely(!ctx->mem_idx)) { |
|
5123 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5124 return; |
|
5125 } |
|
5126 gen_helper_6xx_tlbd(cpu_gpr[rB(ctx->opcode)]); |
|
5127 #endif |
|
5128 } |
|
5129 |
|
5130 /* tlbli */ |
|
5131 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB) |
|
5132 { |
|
5133 #if defined(CONFIG_USER_ONLY) |
|
5134 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5135 #else |
|
5136 if (unlikely(!ctx->mem_idx)) { |
|
5137 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5138 return; |
|
5139 } |
|
5140 gen_helper_6xx_tlbi(cpu_gpr[rB(ctx->opcode)]); |
|
5141 #endif |
|
5142 } |
|
5143 |
|
5144 /* 74xx TLB management */ |
|
5145 /* tlbld */ |
|
5146 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB) |
|
5147 { |
|
5148 #if defined(CONFIG_USER_ONLY) |
|
5149 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5150 #else |
|
5151 if (unlikely(!ctx->mem_idx)) { |
|
5152 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5153 return; |
|
5154 } |
|
5155 gen_helper_74xx_tlbd(cpu_gpr[rB(ctx->opcode)]); |
|
5156 #endif |
|
5157 } |
|
5158 |
|
5159 /* tlbli */ |
|
5160 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB) |
|
5161 { |
|
5162 #if defined(CONFIG_USER_ONLY) |
|
5163 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5164 #else |
|
5165 if (unlikely(!ctx->mem_idx)) { |
|
5166 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5167 return; |
|
5168 } |
|
5169 gen_helper_74xx_tlbi(cpu_gpr[rB(ctx->opcode)]); |
|
5170 #endif |
|
5171 } |
|
5172 |
|
5173 /* POWER instructions not in PowerPC 601 */ |
|
5174 /* clf */ |
|
5175 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER) |
|
5176 { |
|
5177 /* Cache line flush: implemented as no-op */ |
|
5178 } |
|
5179 |
|
5180 /* cli */ |
|
5181 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER) |
|
5182 { |
|
5183 /* Cache line invalidate: privileged and treated as no-op */ |
|
5184 #if defined(CONFIG_USER_ONLY) |
|
5185 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5186 #else |
|
5187 if (unlikely(!ctx->mem_idx)) { |
|
5188 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5189 return; |
|
5190 } |
|
5191 #endif |
|
5192 } |
|
5193 |
|
5194 /* dclst */ |
|
5195 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER) |
|
5196 { |
|
5197 /* Data cache line store: treated as no-op */ |
|
5198 } |
|
5199 |
|
5200 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER) |
|
5201 { |
|
5202 #if defined(CONFIG_USER_ONLY) |
|
5203 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5204 #else |
|
5205 int ra = rA(ctx->opcode); |
|
5206 int rd = rD(ctx->opcode); |
|
5207 TCGv t0; |
|
5208 if (unlikely(!ctx->mem_idx)) { |
|
5209 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5210 return; |
|
5211 } |
|
5212 t0 = tcg_temp_new(); |
|
5213 gen_addr_reg_index(ctx, t0); |
|
5214 tcg_gen_shri_tl(t0, t0, 28); |
|
5215 tcg_gen_andi_tl(t0, t0, 0xF); |
|
5216 gen_helper_load_sr(cpu_gpr[rd], t0); |
|
5217 tcg_temp_free(t0); |
|
5218 if (ra != 0 && ra != rd) |
|
5219 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]); |
|
5220 #endif |
|
5221 } |
|
5222 |
|
5223 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER) |
|
5224 { |
|
5225 #if defined(CONFIG_USER_ONLY) |
|
5226 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5227 #else |
|
5228 TCGv t0; |
|
5229 if (unlikely(!ctx->mem_idx)) { |
|
5230 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5231 return; |
|
5232 } |
|
5233 t0 = tcg_temp_new(); |
|
5234 gen_addr_reg_index(ctx, t0); |
|
5235 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], t0); |
|
5236 tcg_temp_free(t0); |
|
5237 #endif |
|
5238 } |
|
5239 |
|
5240 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER) |
|
5241 { |
|
5242 #if defined(CONFIG_USER_ONLY) |
|
5243 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5244 #else |
|
5245 if (unlikely(!ctx->mem_idx)) { |
|
5246 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5247 return; |
|
5248 } |
|
5249 gen_helper_rfsvc(); |
|
5250 gen_sync_exception(ctx); |
|
5251 #endif |
|
5252 } |
|
5253 |
|
5254 /* svc is not implemented for now */ |
|
5255 |
|
5256 /* POWER2 specific instructions */ |
|
5257 /* Quad manipulation (load/store two floats at a time) */ |
|
5258 |
|
5259 /* lfq */ |
|
5260 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2) |
|
5261 { |
|
5262 int rd = rD(ctx->opcode); |
|
5263 TCGv t0; |
|
5264 gen_set_access_type(ctx, ACCESS_FLOAT); |
|
5265 t0 = tcg_temp_new(); |
|
5266 gen_addr_imm_index(ctx, t0, 0); |
|
5267 gen_qemu_ld64(ctx, cpu_fpr[rd], t0); |
|
5268 gen_addr_add(ctx, t0, t0, 8); |
|
5269 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t0); |
|
5270 tcg_temp_free(t0); |
|
5271 } |
|
5272 |
|
5273 /* lfqu */ |
|
5274 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2) |
|
5275 { |
|
5276 int ra = rA(ctx->opcode); |
|
5277 int rd = rD(ctx->opcode); |
|
5278 TCGv t0, t1; |
|
5279 gen_set_access_type(ctx, ACCESS_FLOAT); |
|
5280 t0 = tcg_temp_new(); |
|
5281 t1 = tcg_temp_new(); |
|
5282 gen_addr_imm_index(ctx, t0, 0); |
|
5283 gen_qemu_ld64(ctx, cpu_fpr[rd], t0); |
|
5284 gen_addr_add(ctx, t1, t0, 8); |
|
5285 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t1); |
|
5286 if (ra != 0) |
|
5287 tcg_gen_mov_tl(cpu_gpr[ra], t0); |
|
5288 tcg_temp_free(t0); |
|
5289 tcg_temp_free(t1); |
|
5290 } |
|
5291 |
|
5292 /* lfqux */ |
|
5293 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2) |
|
5294 { |
|
5295 int ra = rA(ctx->opcode); |
|
5296 int rd = rD(ctx->opcode); |
|
5297 gen_set_access_type(ctx, ACCESS_FLOAT); |
|
5298 TCGv t0, t1; |
|
5299 t0 = tcg_temp_new(); |
|
5300 gen_addr_reg_index(ctx, t0); |
|
5301 gen_qemu_ld64(ctx, cpu_fpr[rd], t0); |
|
5302 t1 = tcg_temp_new(); |
|
5303 gen_addr_add(ctx, t1, t0, 8); |
|
5304 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t1); |
|
5305 tcg_temp_free(t1); |
|
5306 if (ra != 0) |
|
5307 tcg_gen_mov_tl(cpu_gpr[ra], t0); |
|
5308 tcg_temp_free(t0); |
|
5309 } |
|
5310 |
|
5311 /* lfqx */ |
|
5312 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2) |
|
5313 { |
|
5314 int rd = rD(ctx->opcode); |
|
5315 TCGv t0; |
|
5316 gen_set_access_type(ctx, ACCESS_FLOAT); |
|
5317 t0 = tcg_temp_new(); |
|
5318 gen_addr_reg_index(ctx, t0); |
|
5319 gen_qemu_ld64(ctx, cpu_fpr[rd], t0); |
|
5320 gen_addr_add(ctx, t0, t0, 8); |
|
5321 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t0); |
|
5322 tcg_temp_free(t0); |
|
5323 } |
|
5324 |
|
5325 /* stfq */ |
|
5326 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2) |
|
5327 { |
|
5328 int rd = rD(ctx->opcode); |
|
5329 TCGv t0; |
|
5330 gen_set_access_type(ctx, ACCESS_FLOAT); |
|
5331 t0 = tcg_temp_new(); |
|
5332 gen_addr_imm_index(ctx, t0, 0); |
|
5333 gen_qemu_st64(ctx, cpu_fpr[rd], t0); |
|
5334 gen_addr_add(ctx, t0, t0, 8); |
|
5335 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t0); |
|
5336 tcg_temp_free(t0); |
|
5337 } |
|
5338 |
|
5339 /* stfqu */ |
|
5340 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2) |
|
5341 { |
|
5342 int ra = rA(ctx->opcode); |
|
5343 int rd = rD(ctx->opcode); |
|
5344 TCGv t0, t1; |
|
5345 gen_set_access_type(ctx, ACCESS_FLOAT); |
|
5346 t0 = tcg_temp_new(); |
|
5347 gen_addr_imm_index(ctx, t0, 0); |
|
5348 gen_qemu_st64(ctx, cpu_fpr[rd], t0); |
|
5349 t1 = tcg_temp_new(); |
|
5350 gen_addr_add(ctx, t1, t0, 8); |
|
5351 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t1); |
|
5352 tcg_temp_free(t1); |
|
5353 if (ra != 0) |
|
5354 tcg_gen_mov_tl(cpu_gpr[ra], t0); |
|
5355 tcg_temp_free(t0); |
|
5356 } |
|
5357 |
|
5358 /* stfqux */ |
|
5359 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2) |
|
5360 { |
|
5361 int ra = rA(ctx->opcode); |
|
5362 int rd = rD(ctx->opcode); |
|
5363 TCGv t0, t1; |
|
5364 gen_set_access_type(ctx, ACCESS_FLOAT); |
|
5365 t0 = tcg_temp_new(); |
|
5366 gen_addr_reg_index(ctx, t0); |
|
5367 gen_qemu_st64(ctx, cpu_fpr[rd], t0); |
|
5368 t1 = tcg_temp_new(); |
|
5369 gen_addr_add(ctx, t1, t0, 8); |
|
5370 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t1); |
|
5371 tcg_temp_free(t1); |
|
5372 if (ra != 0) |
|
5373 tcg_gen_mov_tl(cpu_gpr[ra], t0); |
|
5374 tcg_temp_free(t0); |
|
5375 } |
|
5376 |
|
5377 /* stfqx */ |
|
5378 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2) |
|
5379 { |
|
5380 int rd = rD(ctx->opcode); |
|
5381 TCGv t0; |
|
5382 gen_set_access_type(ctx, ACCESS_FLOAT); |
|
5383 t0 = tcg_temp_new(); |
|
5384 gen_addr_reg_index(ctx, t0); |
|
5385 gen_qemu_st64(ctx, cpu_fpr[rd], t0); |
|
5386 gen_addr_add(ctx, t0, t0, 8); |
|
5387 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t0); |
|
5388 tcg_temp_free(t0); |
|
5389 } |
|
5390 |
|
5391 /* BookE specific instructions */ |
|
5392 /* XXX: not implemented on 440 ? */ |
|
5393 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI) |
|
5394 { |
|
5395 /* XXX: TODO */ |
|
5396 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); |
|
5397 } |
|
5398 |
|
5399 /* XXX: not implemented on 440 ? */ |
|
5400 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA) |
|
5401 { |
|
5402 #if defined(CONFIG_USER_ONLY) |
|
5403 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5404 #else |
|
5405 TCGv t0; |
|
5406 if (unlikely(!ctx->mem_idx)) { |
|
5407 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5408 return; |
|
5409 } |
|
5410 t0 = tcg_temp_new(); |
|
5411 gen_addr_reg_index(ctx, t0); |
|
5412 gen_helper_tlbie(cpu_gpr[rB(ctx->opcode)]); |
|
5413 tcg_temp_free(t0); |
|
5414 #endif |
|
5415 } |
|
5416 |
|
5417 /* All 405 MAC instructions are translated here */ |
|
5418 static always_inline void gen_405_mulladd_insn (DisasContext *ctx, |
|
5419 int opc2, int opc3, |
|
5420 int ra, int rb, int rt, int Rc) |
|
5421 { |
|
5422 TCGv t0, t1; |
|
5423 |
|
5424 t0 = tcg_temp_local_new(); |
|
5425 t1 = tcg_temp_local_new(); |
|
5426 |
|
5427 switch (opc3 & 0x0D) { |
|
5428 case 0x05: |
|
5429 /* macchw - macchw. - macchwo - macchwo. */ |
|
5430 /* macchws - macchws. - macchwso - macchwso. */ |
|
5431 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */ |
|
5432 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */ |
|
5433 /* mulchw - mulchw. */ |
|
5434 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); |
|
5435 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); |
|
5436 tcg_gen_ext16s_tl(t1, t1); |
|
5437 break; |
|
5438 case 0x04: |
|
5439 /* macchwu - macchwu. - macchwuo - macchwuo. */ |
|
5440 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */ |
|
5441 /* mulchwu - mulchwu. */ |
|
5442 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); |
|
5443 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); |
|
5444 tcg_gen_ext16u_tl(t1, t1); |
|
5445 break; |
|
5446 case 0x01: |
|
5447 /* machhw - machhw. - machhwo - machhwo. */ |
|
5448 /* machhws - machhws. - machhwso - machhwso. */ |
|
5449 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */ |
|
5450 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */ |
|
5451 /* mulhhw - mulhhw. */ |
|
5452 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16); |
|
5453 tcg_gen_ext16s_tl(t0, t0); |
|
5454 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); |
|
5455 tcg_gen_ext16s_tl(t1, t1); |
|
5456 break; |
|
5457 case 0x00: |
|
5458 /* machhwu - machhwu. - machhwuo - machhwuo. */ |
|
5459 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */ |
|
5460 /* mulhhwu - mulhhwu. */ |
|
5461 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16); |
|
5462 tcg_gen_ext16u_tl(t0, t0); |
|
5463 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); |
|
5464 tcg_gen_ext16u_tl(t1, t1); |
|
5465 break; |
|
5466 case 0x0D: |
|
5467 /* maclhw - maclhw. - maclhwo - maclhwo. */ |
|
5468 /* maclhws - maclhws. - maclhwso - maclhwso. */ |
|
5469 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */ |
|
5470 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */ |
|
5471 /* mullhw - mullhw. */ |
|
5472 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); |
|
5473 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]); |
|
5474 break; |
|
5475 case 0x0C: |
|
5476 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */ |
|
5477 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */ |
|
5478 /* mullhwu - mullhwu. */ |
|
5479 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); |
|
5480 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]); |
|
5481 break; |
|
5482 } |
|
5483 if (opc2 & 0x04) { |
|
5484 /* (n)multiply-and-accumulate (0x0C / 0x0E) */ |
|
5485 tcg_gen_mul_tl(t1, t0, t1); |
|
5486 if (opc2 & 0x02) { |
|
5487 /* nmultiply-and-accumulate (0x0E) */ |
|
5488 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1); |
|
5489 } else { |
|
5490 /* multiply-and-accumulate (0x0C) */ |
|
5491 tcg_gen_add_tl(t0, cpu_gpr[rt], t1); |
|
5492 } |
|
5493 |
|
5494 if (opc3 & 0x12) { |
|
5495 /* Check overflow and/or saturate */ |
|
5496 int l1 = gen_new_label(); |
|
5497 |
|
5498 if (opc3 & 0x10) { |
|
5499 /* Start with XER OV disabled, the most likely case */ |
|
5500 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV)); |
|
5501 } |
|
5502 if (opc3 & 0x01) { |
|
5503 /* Signed */ |
|
5504 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1); |
|
5505 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1); |
|
5506 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0); |
|
5507 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1); |
|
5508 if (opc3 & 0x02) { |
|
5509 /* Saturate */ |
|
5510 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31); |
|
5511 tcg_gen_xori_tl(t0, t0, 0x7fffffff); |
|
5512 } |
|
5513 } else { |
|
5514 /* Unsigned */ |
|
5515 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1); |
|
5516 if (opc3 & 0x02) { |
|
5517 /* Saturate */ |
|
5518 tcg_gen_movi_tl(t0, UINT32_MAX); |
|
5519 } |
|
5520 } |
|
5521 if (opc3 & 0x10) { |
|
5522 /* Check overflow */ |
|
5523 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO)); |
|
5524 } |
|
5525 gen_set_label(l1); |
|
5526 tcg_gen_mov_tl(cpu_gpr[rt], t0); |
|
5527 } |
|
5528 } else { |
|
5529 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1); |
|
5530 } |
|
5531 tcg_temp_free(t0); |
|
5532 tcg_temp_free(t1); |
|
5533 if (unlikely(Rc) != 0) { |
|
5534 /* Update Rc0 */ |
|
5535 gen_set_Rc0(ctx, cpu_gpr[rt]); |
|
5536 } |
|
5537 } |
|
5538 |
|
5539 #define GEN_MAC_HANDLER(name, opc2, opc3) \ |
|
5540 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) \ |
|
5541 { \ |
|
5542 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \ |
|
5543 rD(ctx->opcode), Rc(ctx->opcode)); \ |
|
5544 } |
|
5545 |
|
5546 /* macchw - macchw. */ |
|
5547 GEN_MAC_HANDLER(macchw, 0x0C, 0x05); |
|
5548 /* macchwo - macchwo. */ |
|
5549 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15); |
|
5550 /* macchws - macchws. */ |
|
5551 GEN_MAC_HANDLER(macchws, 0x0C, 0x07); |
|
5552 /* macchwso - macchwso. */ |
|
5553 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17); |
|
5554 /* macchwsu - macchwsu. */ |
|
5555 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06); |
|
5556 /* macchwsuo - macchwsuo. */ |
|
5557 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16); |
|
5558 /* macchwu - macchwu. */ |
|
5559 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04); |
|
5560 /* macchwuo - macchwuo. */ |
|
5561 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14); |
|
5562 /* machhw - machhw. */ |
|
5563 GEN_MAC_HANDLER(machhw, 0x0C, 0x01); |
|
5564 /* machhwo - machhwo. */ |
|
5565 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11); |
|
5566 /* machhws - machhws. */ |
|
5567 GEN_MAC_HANDLER(machhws, 0x0C, 0x03); |
|
5568 /* machhwso - machhwso. */ |
|
5569 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13); |
|
5570 /* machhwsu - machhwsu. */ |
|
5571 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02); |
|
5572 /* machhwsuo - machhwsuo. */ |
|
5573 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12); |
|
5574 /* machhwu - machhwu. */ |
|
5575 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00); |
|
5576 /* machhwuo - machhwuo. */ |
|
5577 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10); |
|
5578 /* maclhw - maclhw. */ |
|
5579 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D); |
|
5580 /* maclhwo - maclhwo. */ |
|
5581 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D); |
|
5582 /* maclhws - maclhws. */ |
|
5583 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F); |
|
5584 /* maclhwso - maclhwso. */ |
|
5585 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F); |
|
5586 /* maclhwu - maclhwu. */ |
|
5587 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C); |
|
5588 /* maclhwuo - maclhwuo. */ |
|
5589 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C); |
|
5590 /* maclhwsu - maclhwsu. */ |
|
5591 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E); |
|
5592 /* maclhwsuo - maclhwsuo. */ |
|
5593 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E); |
|
5594 /* nmacchw - nmacchw. */ |
|
5595 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05); |
|
5596 /* nmacchwo - nmacchwo. */ |
|
5597 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15); |
|
5598 /* nmacchws - nmacchws. */ |
|
5599 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07); |
|
5600 /* nmacchwso - nmacchwso. */ |
|
5601 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17); |
|
5602 /* nmachhw - nmachhw. */ |
|
5603 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01); |
|
5604 /* nmachhwo - nmachhwo. */ |
|
5605 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11); |
|
5606 /* nmachhws - nmachhws. */ |
|
5607 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03); |
|
5608 /* nmachhwso - nmachhwso. */ |
|
5609 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13); |
|
5610 /* nmaclhw - nmaclhw. */ |
|
5611 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D); |
|
5612 /* nmaclhwo - nmaclhwo. */ |
|
5613 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D); |
|
5614 /* nmaclhws - nmaclhws. */ |
|
5615 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F); |
|
5616 /* nmaclhwso - nmaclhwso. */ |
|
5617 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F); |
|
5618 |
|
5619 /* mulchw - mulchw. */ |
|
5620 GEN_MAC_HANDLER(mulchw, 0x08, 0x05); |
|
5621 /* mulchwu - mulchwu. */ |
|
5622 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04); |
|
5623 /* mulhhw - mulhhw. */ |
|
5624 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01); |
|
5625 /* mulhhwu - mulhhwu. */ |
|
5626 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00); |
|
5627 /* mullhw - mullhw. */ |
|
5628 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D); |
|
5629 /* mullhwu - mullhwu. */ |
|
5630 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C); |
|
5631 |
|
5632 /* mfdcr */ |
|
5633 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR) |
|
5634 { |
|
5635 #if defined(CONFIG_USER_ONLY) |
|
5636 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
5637 #else |
|
5638 TCGv dcrn; |
|
5639 if (unlikely(!ctx->mem_idx)) { |
|
5640 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
5641 return; |
|
5642 } |
|
5643 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
5644 gen_update_nip(ctx, ctx->nip - 4); |
|
5645 dcrn = tcg_const_tl(SPR(ctx->opcode)); |
|
5646 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], dcrn); |
|
5647 tcg_temp_free(dcrn); |
|
5648 #endif |
|
5649 } |
|
5650 |
|
5651 /* mtdcr */ |
|
5652 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR) |
|
5653 { |
|
5654 #if defined(CONFIG_USER_ONLY) |
|
5655 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
5656 #else |
|
5657 TCGv dcrn; |
|
5658 if (unlikely(!ctx->mem_idx)) { |
|
5659 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
5660 return; |
|
5661 } |
|
5662 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
5663 gen_update_nip(ctx, ctx->nip - 4); |
|
5664 dcrn = tcg_const_tl(SPR(ctx->opcode)); |
|
5665 gen_helper_store_dcr(dcrn, cpu_gpr[rS(ctx->opcode)]); |
|
5666 tcg_temp_free(dcrn); |
|
5667 #endif |
|
5668 } |
|
5669 |
|
5670 /* mfdcrx */ |
|
5671 /* XXX: not implemented on 440 ? */ |
|
5672 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX) |
|
5673 { |
|
5674 #if defined(CONFIG_USER_ONLY) |
|
5675 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
5676 #else |
|
5677 if (unlikely(!ctx->mem_idx)) { |
|
5678 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
5679 return; |
|
5680 } |
|
5681 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
5682 gen_update_nip(ctx, ctx->nip - 4); |
|
5683 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); |
|
5684 /* Note: Rc update flag set leads to undefined state of Rc0 */ |
|
5685 #endif |
|
5686 } |
|
5687 |
|
5688 /* mtdcrx */ |
|
5689 /* XXX: not implemented on 440 ? */ |
|
5690 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX) |
|
5691 { |
|
5692 #if defined(CONFIG_USER_ONLY) |
|
5693 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
5694 #else |
|
5695 if (unlikely(!ctx->mem_idx)) { |
|
5696 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); |
|
5697 return; |
|
5698 } |
|
5699 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
5700 gen_update_nip(ctx, ctx->nip - 4); |
|
5701 gen_helper_store_dcr(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); |
|
5702 /* Note: Rc update flag set leads to undefined state of Rc0 */ |
|
5703 #endif |
|
5704 } |
|
5705 |
|
5706 /* mfdcrux (PPC 460) : user-mode access to DCR */ |
|
5707 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX) |
|
5708 { |
|
5709 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
5710 gen_update_nip(ctx, ctx->nip - 4); |
|
5711 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); |
|
5712 /* Note: Rc update flag set leads to undefined state of Rc0 */ |
|
5713 } |
|
5714 |
|
5715 /* mtdcrux (PPC 460) : user-mode access to DCR */ |
|
5716 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX) |
|
5717 { |
|
5718 /* NIP cannot be restored if the memory exception comes from an helper */ |
|
5719 gen_update_nip(ctx, ctx->nip - 4); |
|
5720 gen_helper_store_dcr(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); |
|
5721 /* Note: Rc update flag set leads to undefined state of Rc0 */ |
|
5722 } |
|
5723 |
|
5724 /* dccci */ |
|
5725 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON) |
|
5726 { |
|
5727 #if defined(CONFIG_USER_ONLY) |
|
5728 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5729 #else |
|
5730 if (unlikely(!ctx->mem_idx)) { |
|
5731 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5732 return; |
|
5733 } |
|
5734 /* interpreted as no-op */ |
|
5735 #endif |
|
5736 } |
|
5737 |
|
5738 /* dcread */ |
|
5739 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON) |
|
5740 { |
|
5741 #if defined(CONFIG_USER_ONLY) |
|
5742 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5743 #else |
|
5744 TCGv EA, val; |
|
5745 if (unlikely(!ctx->mem_idx)) { |
|
5746 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5747 return; |
|
5748 } |
|
5749 gen_set_access_type(ctx, ACCESS_CACHE); |
|
5750 EA = tcg_temp_new(); |
|
5751 gen_addr_reg_index(ctx, EA); |
|
5752 val = tcg_temp_new(); |
|
5753 gen_qemu_ld32u(ctx, val, EA); |
|
5754 tcg_temp_free(val); |
|
5755 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA); |
|
5756 tcg_temp_free(EA); |
|
5757 #endif |
|
5758 } |
|
5759 |
|
5760 /* icbt */ |
|
5761 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT) |
|
5762 { |
|
5763 /* interpreted as no-op */ |
|
5764 /* XXX: specification say this is treated as a load by the MMU |
|
5765 * but does not generate any exception |
|
5766 */ |
|
5767 } |
|
5768 |
|
5769 /* iccci */ |
|
5770 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON) |
|
5771 { |
|
5772 #if defined(CONFIG_USER_ONLY) |
|
5773 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5774 #else |
|
5775 if (unlikely(!ctx->mem_idx)) { |
|
5776 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5777 return; |
|
5778 } |
|
5779 /* interpreted as no-op */ |
|
5780 #endif |
|
5781 } |
|
5782 |
|
5783 /* icread */ |
|
5784 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON) |
|
5785 { |
|
5786 #if defined(CONFIG_USER_ONLY) |
|
5787 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5788 #else |
|
5789 if (unlikely(!ctx->mem_idx)) { |
|
5790 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5791 return; |
|
5792 } |
|
5793 /* interpreted as no-op */ |
|
5794 #endif |
|
5795 } |
|
5796 |
|
5797 /* rfci (mem_idx only) */ |
|
5798 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP) |
|
5799 { |
|
5800 #if defined(CONFIG_USER_ONLY) |
|
5801 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5802 #else |
|
5803 if (unlikely(!ctx->mem_idx)) { |
|
5804 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5805 return; |
|
5806 } |
|
5807 /* Restore CPU state */ |
|
5808 gen_helper_40x_rfci(); |
|
5809 gen_sync_exception(ctx); |
|
5810 #endif |
|
5811 } |
|
5812 |
|
5813 GEN_HANDLER(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE) |
|
5814 { |
|
5815 #if defined(CONFIG_USER_ONLY) |
|
5816 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5817 #else |
|
5818 if (unlikely(!ctx->mem_idx)) { |
|
5819 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5820 return; |
|
5821 } |
|
5822 /* Restore CPU state */ |
|
5823 gen_helper_rfci(); |
|
5824 gen_sync_exception(ctx); |
|
5825 #endif |
|
5826 } |
|
5827 |
|
5828 /* BookE specific */ |
|
5829 /* XXX: not implemented on 440 ? */ |
|
5830 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI) |
|
5831 { |
|
5832 #if defined(CONFIG_USER_ONLY) |
|
5833 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5834 #else |
|
5835 if (unlikely(!ctx->mem_idx)) { |
|
5836 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5837 return; |
|
5838 } |
|
5839 /* Restore CPU state */ |
|
5840 gen_helper_rfdi(); |
|
5841 gen_sync_exception(ctx); |
|
5842 #endif |
|
5843 } |
|
5844 |
|
5845 /* XXX: not implemented on 440 ? */ |
|
5846 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI) |
|
5847 { |
|
5848 #if defined(CONFIG_USER_ONLY) |
|
5849 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5850 #else |
|
5851 if (unlikely(!ctx->mem_idx)) { |
|
5852 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5853 return; |
|
5854 } |
|
5855 /* Restore CPU state */ |
|
5856 gen_helper_rfmci(); |
|
5857 gen_sync_exception(ctx); |
|
5858 #endif |
|
5859 } |
|
5860 |
|
5861 /* TLB management - PowerPC 405 implementation */ |
|
5862 /* tlbre */ |
|
5863 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB) |
|
5864 { |
|
5865 #if defined(CONFIG_USER_ONLY) |
|
5866 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5867 #else |
|
5868 if (unlikely(!ctx->mem_idx)) { |
|
5869 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5870 return; |
|
5871 } |
|
5872 switch (rB(ctx->opcode)) { |
|
5873 case 0: |
|
5874 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); |
|
5875 break; |
|
5876 case 1: |
|
5877 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); |
|
5878 break; |
|
5879 default: |
|
5880 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); |
|
5881 break; |
|
5882 } |
|
5883 #endif |
|
5884 } |
|
5885 |
|
5886 /* tlbsx - tlbsx. */ |
|
5887 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB) |
|
5888 { |
|
5889 #if defined(CONFIG_USER_ONLY) |
|
5890 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5891 #else |
|
5892 TCGv t0; |
|
5893 if (unlikely(!ctx->mem_idx)) { |
|
5894 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5895 return; |
|
5896 } |
|
5897 t0 = tcg_temp_new(); |
|
5898 gen_addr_reg_index(ctx, t0); |
|
5899 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], t0); |
|
5900 tcg_temp_free(t0); |
|
5901 if (Rc(ctx->opcode)) { |
|
5902 int l1 = gen_new_label(); |
|
5903 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer); |
|
5904 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO); |
|
5905 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1); |
|
5906 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); |
|
5907 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); |
|
5908 gen_set_label(l1); |
|
5909 } |
|
5910 #endif |
|
5911 } |
|
5912 |
|
5913 /* tlbwe */ |
|
5914 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB) |
|
5915 { |
|
5916 #if defined(CONFIG_USER_ONLY) |
|
5917 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5918 #else |
|
5919 if (unlikely(!ctx->mem_idx)) { |
|
5920 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5921 return; |
|
5922 } |
|
5923 switch (rB(ctx->opcode)) { |
|
5924 case 0: |
|
5925 gen_helper_4xx_tlbwe_hi(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); |
|
5926 break; |
|
5927 case 1: |
|
5928 gen_helper_4xx_tlbwe_lo(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); |
|
5929 break; |
|
5930 default: |
|
5931 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); |
|
5932 break; |
|
5933 } |
|
5934 #endif |
|
5935 } |
|
5936 |
|
5937 /* TLB management - PowerPC 440 implementation */ |
|
5938 /* tlbre */ |
|
5939 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE) |
|
5940 { |
|
5941 #if defined(CONFIG_USER_ONLY) |
|
5942 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5943 #else |
|
5944 if (unlikely(!ctx->mem_idx)) { |
|
5945 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5946 return; |
|
5947 } |
|
5948 switch (rB(ctx->opcode)) { |
|
5949 case 0: |
|
5950 case 1: |
|
5951 case 2: |
|
5952 { |
|
5953 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); |
|
5954 gen_helper_440_tlbwe(t0, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); |
|
5955 tcg_temp_free_i32(t0); |
|
5956 } |
|
5957 break; |
|
5958 default: |
|
5959 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); |
|
5960 break; |
|
5961 } |
|
5962 #endif |
|
5963 } |
|
5964 |
|
5965 /* tlbsx - tlbsx. */ |
|
5966 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE) |
|
5967 { |
|
5968 #if defined(CONFIG_USER_ONLY) |
|
5969 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5970 #else |
|
5971 TCGv t0; |
|
5972 if (unlikely(!ctx->mem_idx)) { |
|
5973 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5974 return; |
|
5975 } |
|
5976 t0 = tcg_temp_new(); |
|
5977 gen_addr_reg_index(ctx, t0); |
|
5978 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], t0); |
|
5979 tcg_temp_free(t0); |
|
5980 if (Rc(ctx->opcode)) { |
|
5981 int l1 = gen_new_label(); |
|
5982 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer); |
|
5983 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO); |
|
5984 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1); |
|
5985 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); |
|
5986 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); |
|
5987 gen_set_label(l1); |
|
5988 } |
|
5989 #endif |
|
5990 } |
|
5991 |
|
5992 /* tlbwe */ |
|
5993 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE) |
|
5994 { |
|
5995 #if defined(CONFIG_USER_ONLY) |
|
5996 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
5997 #else |
|
5998 if (unlikely(!ctx->mem_idx)) { |
|
5999 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
6000 return; |
|
6001 } |
|
6002 switch (rB(ctx->opcode)) { |
|
6003 case 0: |
|
6004 case 1: |
|
6005 case 2: |
|
6006 { |
|
6007 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); |
|
6008 gen_helper_440_tlbwe(t0, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); |
|
6009 tcg_temp_free_i32(t0); |
|
6010 } |
|
6011 break; |
|
6012 default: |
|
6013 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); |
|
6014 break; |
|
6015 } |
|
6016 #endif |
|
6017 } |
|
6018 |
|
6019 /* wrtee */ |
|
6020 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE) |
|
6021 { |
|
6022 #if defined(CONFIG_USER_ONLY) |
|
6023 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
6024 #else |
|
6025 TCGv t0; |
|
6026 if (unlikely(!ctx->mem_idx)) { |
|
6027 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
6028 return; |
|
6029 } |
|
6030 t0 = tcg_temp_new(); |
|
6031 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE)); |
|
6032 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); |
|
6033 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); |
|
6034 tcg_temp_free(t0); |
|
6035 /* Stop translation to have a chance to raise an exception |
|
6036 * if we just set msr_ee to 1 |
|
6037 */ |
|
6038 gen_stop_exception(ctx); |
|
6039 #endif |
|
6040 } |
|
6041 |
|
6042 /* wrteei */ |
|
6043 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000EFC01, PPC_WRTEE) |
|
6044 { |
|
6045 #if defined(CONFIG_USER_ONLY) |
|
6046 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
6047 #else |
|
6048 if (unlikely(!ctx->mem_idx)) { |
|
6049 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC); |
|
6050 return; |
|
6051 } |
|
6052 if (ctx->opcode & 0x00010000) { |
|
6053 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE)); |
|
6054 /* Stop translation to have a chance to raise an exception */ |
|
6055 gen_stop_exception(ctx); |
|
6056 } else { |
|
6057 tcg_gen_andi_tl(cpu_msr, cpu_msr, (1 << MSR_EE)); |
|
6058 } |
|
6059 #endif |
|
6060 } |
|
6061 |
|
6062 /* PowerPC 440 specific instructions */ |
|
6063 /* dlmzb */ |
|
6064 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC) |
|
6065 { |
|
6066 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode)); |
|
6067 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], |
|
6068 cpu_gpr[rB(ctx->opcode)], t0); |
|
6069 tcg_temp_free_i32(t0); |
|
6070 } |
|
6071 |
|
6072 /* mbar replaces eieio on 440 */ |
|
6073 GEN_HANDLER(mbar, 0x1F, 0x16, 0x1a, 0x001FF801, PPC_BOOKE) |
|
6074 { |
|
6075 /* interpreted as no-op */ |
|
6076 } |
|
6077 |
|
6078 /* msync replaces sync on 440 */ |
|
6079 GEN_HANDLER(msync, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE) |
|
6080 { |
|
6081 /* interpreted as no-op */ |
|
6082 } |
|
6083 |
|
6084 /* icbt */ |
|
6085 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, PPC_BOOKE) |
|
6086 { |
|
6087 /* interpreted as no-op */ |
|
6088 /* XXX: specification say this is treated as a load by the MMU |
|
6089 * but does not generate any exception |
|
6090 */ |
|
6091 } |
|
6092 |
|
6093 /*** Altivec vector extension ***/ |
|
6094 /* Altivec registers moves */ |
|
6095 |
|
6096 static always_inline TCGv_ptr gen_avr_ptr(int reg) |
|
6097 { |
|
6098 TCGv_ptr r = tcg_temp_new(); |
|
6099 tcg_gen_addi_ptr(r, cpu_env, offsetof(CPUPPCState, avr[reg])); |
|
6100 return r; |
|
6101 } |
|
6102 |
|
6103 #define GEN_VR_LDX(name, opc2, opc3) \ |
|
6104 GEN_HANDLER(name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC) \ |
|
6105 { \ |
|
6106 TCGv EA; \ |
|
6107 if (unlikely(!ctx->altivec_enabled)) { \ |
|
6108 gen_exception(ctx, POWERPC_EXCP_VPU); \ |
|
6109 return; \ |
|
6110 } \ |
|
6111 gen_set_access_type(ctx, ACCESS_INT); \ |
|
6112 EA = tcg_temp_new(); \ |
|
6113 gen_addr_reg_index(ctx, EA); \ |
|
6114 tcg_gen_andi_tl(EA, EA, ~0xf); \ |
|
6115 if (ctx->le_mode) { \ |
|
6116 gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \ |
|
6117 tcg_gen_addi_tl(EA, EA, 8); \ |
|
6118 gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \ |
|
6119 } else { \ |
|
6120 gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \ |
|
6121 tcg_gen_addi_tl(EA, EA, 8); \ |
|
6122 gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \ |
|
6123 } \ |
|
6124 tcg_temp_free(EA); \ |
|
6125 } |
|
6126 |
|
6127 #define GEN_VR_STX(name, opc2, opc3) \ |
|
6128 GEN_HANDLER(st##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC) \ |
|
6129 { \ |
|
6130 TCGv EA; \ |
|
6131 if (unlikely(!ctx->altivec_enabled)) { \ |
|
6132 gen_exception(ctx, POWERPC_EXCP_VPU); \ |
|
6133 return; \ |
|
6134 } \ |
|
6135 gen_set_access_type(ctx, ACCESS_INT); \ |
|
6136 EA = tcg_temp_new(); \ |
|
6137 gen_addr_reg_index(ctx, EA); \ |
|
6138 tcg_gen_andi_tl(EA, EA, ~0xf); \ |
|
6139 if (ctx->le_mode) { \ |
|
6140 gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \ |
|
6141 tcg_gen_addi_tl(EA, EA, 8); \ |
|
6142 gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \ |
|
6143 } else { \ |
|
6144 gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \ |
|
6145 tcg_gen_addi_tl(EA, EA, 8); \ |
|
6146 gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \ |
|
6147 } \ |
|
6148 tcg_temp_free(EA); \ |
|
6149 } |
|
6150 |
|
6151 GEN_VR_LDX(lvx, 0x07, 0x03); |
|
6152 /* As we don't emulate the cache, lvxl is stricly equivalent to lvx */ |
|
6153 GEN_VR_LDX(lvxl, 0x07, 0x0B); |
|
6154 |
|
6155 GEN_VR_STX(svx, 0x07, 0x07); |
|
6156 /* As we don't emulate the cache, stvxl is stricly equivalent to stvx */ |
|
6157 GEN_VR_STX(svxl, 0x07, 0x0F); |
|
6158 |
|
6159 /* Logical operations */ |
|
6160 #define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \ |
|
6161 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC) \ |
|
6162 { \ |
|
6163 if (unlikely(!ctx->altivec_enabled)) { \ |
|
6164 gen_exception(ctx, POWERPC_EXCP_VPU); \ |
|
6165 return; \ |
|
6166 } \ |
|
6167 tcg_op(cpu_avrh[rD(ctx->opcode)], cpu_avrh[rA(ctx->opcode)], cpu_avrh[rB(ctx->opcode)]); \ |
|
6168 tcg_op(cpu_avrl[rD(ctx->opcode)], cpu_avrl[rA(ctx->opcode)], cpu_avrl[rB(ctx->opcode)]); \ |
|
6169 } |
|
6170 |
|
6171 GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16); |
|
6172 GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17); |
|
6173 GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18); |
|
6174 GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19); |
|
6175 GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20); |
|
6176 |
|
6177 /* FIXME: This is old dyngen based AltiVec code. It should probably justi |
|
6178 be ripped out once the TCG based stuff is merged. */ |
|
6179 #if 0 |
|
6180 OP_VR_ST_TABLE(vebx); |
|
6181 GEN_VR_STX(vebx, 0x07, 0x04); |
|
6182 OP_VR_ST_TABLE(vehx); |
|
6183 GEN_VR_STX(vehx, 0x07, 0x05); |
|
6184 OP_VR_ST_TABLE(vewx); |
|
6185 GEN_VR_STX(vewx, 0x07, 0x06); |
|
6186 |
|
6187 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC) |
|
6188 { |
|
6189 if (unlikely(!ctx->altivec_enabled)) { |
|
6190 GEN_EXCP_NO_VR(ctx); |
|
6191 return; |
|
6192 } |
|
6193 gen_addr_reg_index(cpu_T[0], ctx); |
|
6194 gen_op_lvsl (); |
|
6195 gen_store_avr(rD(ctx->opcode), 0); |
|
6196 } |
|
6197 |
|
6198 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC) |
|
6199 { |
|
6200 if (unlikely(!ctx->altivec_enabled)) { |
|
6201 GEN_EXCP_NO_VR(ctx); |
|
6202 return; |
|
6203 } |
|
6204 gen_addr_reg_index(cpu_T[0], ctx); |
|
6205 gen_op_lvsr (); |
|
6206 gen_store_avr(rD(ctx->opcode), 0); |
|
6207 } |
|
6208 |
|
6209 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC) |
|
6210 { |
|
6211 if (unlikely(!ctx->altivec_enabled)) { |
|
6212 GEN_EXCP_NO_VR(ctx); |
|
6213 return; |
|
6214 } |
|
6215 gen_op_load_vscr_A0 (); |
|
6216 gen_store_avr(rD(ctx->opcode), 0); |
|
6217 } |
|
6218 |
|
6219 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC) |
|
6220 { |
|
6221 if (unlikely(!ctx->altivec_enabled)) { |
|
6222 GEN_EXCP_NO_VR(ctx); |
|
6223 return; |
|
6224 } |
|
6225 gen_load_avr(0, rB(ctx->opcode)); |
|
6226 gen_op_store_vscr_A0 (); |
|
6227 } |
|
6228 |
|
6229 #define GEN_VR_VXFORM(name, xo) \ |
|
6230 GEN_HANDLER(name, 0x04, (xo >> 1) & 0x1f, (xo >> 6) & 0x1f, 0x00000000, PPC_ALTIVEC) \ |
|
6231 { \ |
|
6232 if (unlikely(!ctx->altivec_enabled)) { \ |
|
6233 GEN_EXCP_NO_VR(ctx); \ |
|
6234 return; \ |
|
6235 } \ |
|
6236 gen_load_avr(0, rA(ctx->opcode)); \ |
|
6237 gen_load_avr(1, rB(ctx->opcode)); \ |
|
6238 gen_op_##name (); \ |
|
6239 gen_store_avr(rD(ctx->opcode), 0); \ |
|
6240 } |
|
6241 |
|
6242 #define GEN_VR_VXFORM_UIMM(name, xo) \ |
|
6243 GEN_HANDLER(name, 0x04, (xo >> 1) & 0x1f, (xo >> 6) & 0x1f, 0x00000000, PPC_ALTIVEC) \ |
|
6244 { \ |
|
6245 target_ulong uimm = UIMM5(ctx->opcode); \ |
|
6246 if (unlikely(!ctx->altivec_enabled)) { \ |
|
6247 GEN_EXCP_NO_VR(ctx); \ |
|
6248 return; \ |
|
6249 } \ |
|
6250 tcg_gen_movi_tl(cpu_T[0], uimm); \ |
|
6251 gen_load_avr(1, rB(ctx->opcode)); \ |
|
6252 gen_op_##name (); \ |
|
6253 gen_store_avr(rD(ctx->opcode), 0); \ |
|
6254 } |
|
6255 |
|
6256 #define GEN_VR_VXFORM_SIMM(name, xo) \ |
|
6257 GEN_HANDLER(name, 0x04, (xo >> 1) & 0x1f, (xo >> 6) & 0x1f, 0x0000f800, PPC_ALTIVEC) \ |
|
6258 { \ |
|
6259 target_ulong simm = SIMM5(ctx->opcode); \ |
|
6260 if (unlikely(!ctx->altivec_enabled)) { \ |
|
6261 GEN_EXCP_NO_VR(ctx); \ |
|
6262 return; \ |
|
6263 } \ |
|
6264 tcg_gen_movi_tl(cpu_T[0], simm); \ |
|
6265 gen_op_##name (); \ |
|
6266 gen_store_avr(rD(ctx->opcode), 0); \ |
|
6267 } |
|
6268 |
|
6269 #define GEN_VR_VXFORM_NOA(name, xo) \ |
|
6270 GEN_HANDLER(name, 0x04, (xo >> 1) & 0x1f, (xo >> 6) & 0x1f, 0x001f0000, PPC_ALTIVEC) \ |
|
6271 { \ |
|
6272 if (unlikely(!ctx->altivec_enabled)) { \ |
|
6273 GEN_EXCP_NO_VR(ctx); \ |
|
6274 return; \ |
|
6275 } \ |
|
6276 gen_load_avr(1, rB(ctx->opcode)); \ |
|
6277 gen_op_##name (); \ |
|
6278 gen_store_avr(rD(ctx->opcode), 0); \ |
|
6279 } |
|
6280 |
|
6281 GEN_VR_VXFORM(vaddubm, 0); |
|
6282 GEN_VR_VXFORM(vadduhm, 64); |
|
6283 GEN_VR_VXFORM(vadduwm, 128); |
|
6284 GEN_VR_VXFORM(vaddcuw, 384); |
|
6285 GEN_VR_VXFORM(vaddubs, 512); |
|
6286 GEN_VR_VXFORM(vadduhs, 576); |
|
6287 GEN_VR_VXFORM(vadduws, 640); |
|
6288 GEN_VR_VXFORM(vaddsbs, 768); |
|
6289 GEN_VR_VXFORM(vaddshs, 832); |
|
6290 GEN_VR_VXFORM(vaddsws, 896); |
|
6291 GEN_VR_VXFORM(vsububm, 1024); |
|
6292 GEN_VR_VXFORM(vsubuhm, 1088); |
|
6293 GEN_VR_VXFORM(vsubuwm, 1152); |
|
6294 GEN_VR_VXFORM(vsubcuw, 1408); |
|
6295 GEN_VR_VXFORM(vsububs, 1536); |
|
6296 GEN_VR_VXFORM(vsubuhs, 1600); |
|
6297 GEN_VR_VXFORM(vsubuws, 1664); |
|
6298 GEN_VR_VXFORM(vsubsbs, 1792); |
|
6299 GEN_VR_VXFORM(vsubshs, 1856); |
|
6300 GEN_VR_VXFORM(vsubsws, 1920); |
|
6301 GEN_VR_VXFORM(vmaxub, 2); |
|
6302 GEN_VR_VXFORM(vmaxuh, 66); |
|
6303 GEN_VR_VXFORM(vmaxuw, 130); |
|
6304 GEN_VR_VXFORM(vmaxsb, 258); |
|
6305 GEN_VR_VXFORM(vmaxsh, 322); |
|
6306 GEN_VR_VXFORM(vmaxsw, 386); |
|
6307 GEN_VR_VXFORM(vminub, 514); |
|
6308 GEN_VR_VXFORM(vminuh, 578); |
|
6309 GEN_VR_VXFORM(vminuw, 642); |
|
6310 GEN_VR_VXFORM(vminsb, 770); |
|
6311 GEN_VR_VXFORM(vminsh, 834); |
|
6312 GEN_VR_VXFORM(vminsw, 898); |
|
6313 GEN_VR_VXFORM(vavgub, 1026); |
|
6314 GEN_VR_VXFORM(vavguh, 1090); |
|
6315 GEN_VR_VXFORM(vavguw, 1154); |
|
6316 GEN_VR_VXFORM(vavgsb, 1282); |
|
6317 GEN_VR_VXFORM(vavgsh, 1346); |
|
6318 GEN_VR_VXFORM(vavgsw, 1410); |
|
6319 GEN_VR_VXFORM(vrlb, 4); |
|
6320 GEN_VR_VXFORM(vrlh, 68); |
|
6321 GEN_VR_VXFORM(vrlw, 132); |
|
6322 GEN_VR_VXFORM(vslb, 260); |
|
6323 GEN_VR_VXFORM(vslh, 324); |
|
6324 GEN_VR_VXFORM(vslw, 388); |
|
6325 GEN_VR_VXFORM(vsl, 452); |
|
6326 GEN_VR_VXFORM(vsrb, 516); |
|
6327 GEN_VR_VXFORM(vsrh, 580); |
|
6328 GEN_VR_VXFORM(vsrw, 644); |
|
6329 GEN_VR_VXFORM(vsr, 708); |
|
6330 GEN_VR_VXFORM(vsrab, 772); |
|
6331 GEN_VR_VXFORM(vsrah, 836); |
|
6332 GEN_VR_VXFORM(vsraw, 900); |
|
6333 GEN_VR_VXFORM(vand, 1028); |
|
6334 GEN_VR_VXFORM(vandc, 1092); |
|
6335 GEN_VR_VXFORM(vor, 1156); |
|
6336 GEN_VR_VXFORM(vnor, 1284); |
|
6337 /* mfvscr and mtvscr implemented separately */ |
|
6338 GEN_VR_VXFORM(vmuloub, 8); |
|
6339 GEN_VR_VXFORM(vmulouh, 72); |
|
6340 GEN_VR_VXFORM(vmulosb, 264); |
|
6341 GEN_VR_VXFORM(vmulosh, 328); |
|
6342 GEN_VR_VXFORM(vmuleub, 520); |
|
6343 GEN_VR_VXFORM(vmuleuh, 584); |
|
6344 GEN_VR_VXFORM(vmulesb, 776); |
|
6345 GEN_VR_VXFORM(vmulesh, 840); |
|
6346 GEN_VR_VXFORM(vsum4ubs, 1544); |
|
6347 GEN_VR_VXFORM(vsum4sbs, 1800); |
|
6348 GEN_VR_VXFORM(vsum4shs, 1608); |
|
6349 GEN_VR_VXFORM(vsum2sws, 1672); |
|
6350 GEN_VR_VXFORM(vsumsws, 1928); |
|
6351 GEN_VR_VXFORM(vaddfp, 10); |
|
6352 GEN_VR_VXFORM(vsubfp, 74); |
|
6353 GEN_VR_VXFORM_NOA(vrefp, 266); |
|
6354 GEN_VR_VXFORM_NOA(vrsqrtefp, 330); |
|
6355 GEN_VR_VXFORM_NOA(vexptefp, 394); |
|
6356 GEN_VR_VXFORM_NOA(vlogefp, 458); |
|
6357 GEN_VR_VXFORM_NOA(vrfin, 522); |
|
6358 GEN_VR_VXFORM_NOA(vrfiz, 586); |
|
6359 GEN_VR_VXFORM_NOA(vrfip, 650); |
|
6360 GEN_VR_VXFORM_NOA(vrfim, 714); |
|
6361 GEN_VR_VXFORM_UIMM(vcfux, 778); |
|
6362 GEN_VR_VXFORM_UIMM(vcfsx, 842); |
|
6363 GEN_VR_VXFORM_UIMM(vctuxs, 906); |
|
6364 GEN_VR_VXFORM_UIMM(vctsxs, 970); |
|
6365 GEN_VR_VXFORM(vmaxfp, 1034); |
|
6366 GEN_VR_VXFORM(vminfp, 1098); |
|
6367 GEN_VR_VXFORM(vmrghb, 12); |
|
6368 GEN_VR_VXFORM(vmrghh, 76); |
|
6369 GEN_VR_VXFORM(vmrghw, 140); |
|
6370 GEN_VR_VXFORM(vmrglb, 268); |
|
6371 GEN_VR_VXFORM(vmrglh, 332); |
|
6372 GEN_VR_VXFORM(vmrglw, 396); |
|
6373 GEN_VR_VXFORM_UIMM(vspltb, 524); |
|
6374 GEN_VR_VXFORM_UIMM(vsplth, 588); |
|
6375 GEN_VR_VXFORM_UIMM(vspltw, 652); |
|
6376 GEN_VR_VXFORM_SIMM(vspltisb, 780); |
|
6377 GEN_VR_VXFORM_SIMM(vspltish, 844); |
|
6378 GEN_VR_VXFORM_SIMM(vspltisw, 908); |
|
6379 GEN_VR_VXFORM(vslo, 1036); |
|
6380 GEN_VR_VXFORM(vsro, 1100); |
|
6381 GEN_VR_VXFORM(vpkuhum, 14); |
|
6382 GEN_VR_VXFORM(vpkuwum, 78); |
|
6383 GEN_VR_VXFORM(vpkuhus, 142); |
|
6384 GEN_VR_VXFORM(vpkuwus, 206); |
|
6385 GEN_VR_VXFORM(vpkshus, 270); |
|
6386 GEN_VR_VXFORM(vpkswus, 334); |
|
6387 GEN_VR_VXFORM(vpkshss, 398); |
|
6388 GEN_VR_VXFORM(vpkswss, 462); |
|
6389 GEN_VR_VXFORM_NOA(vupkhsb, 526); |
|
6390 GEN_VR_VXFORM_NOA(vupkhsh, 590); |
|
6391 GEN_VR_VXFORM_NOA(vupklsb, 654); |
|
6392 GEN_VR_VXFORM_NOA(vupklsh, 718); |
|
6393 GEN_VR_VXFORM(vpkpx, 782); |
|
6394 GEN_VR_VXFORM_NOA(vupkhpx, 846); |
|
6395 GEN_VR_VXFORM_NOA(vupklpx, 974); |
|
6396 GEN_VR_VXFORM(vxor, 1220); |
|
6397 |
|
6398 #define GEN_VR_VXRFORM1(opname, name, str, xo, rc) \ |
|
6399 GEN_HANDLER2(name, str, 0x4, (xo >> 1) & 0x1f, ((xo >> 6) & 0x1f) | (rc << 4), 0x00000000, PPC_ALTIVEC) \ |
|
6400 { \ |
|
6401 if (unlikely(!ctx->altivec_enabled)) { \ |
|
6402 GEN_EXCP_NO_VR(ctx); \ |
|
6403 return; \ |
|
6404 } \ |
|
6405 gen_load_avr(0, rA(ctx->opcode)); \ |
|
6406 gen_load_avr(1, rB(ctx->opcode)); \ |
|
6407 gen_op_##opname (); \ |
|
6408 gen_store_avr(rD(ctx->opcode), 0); \ |
|
6409 if (rc) { \ |
|
6410 tcg_gen_mov_i32(cpu_crf[6], cpu_T[0]); \ |
|
6411 } \ |
|
6412 } |
|
6413 |
|
6414 #define GEN_VR_VXRFORM(name, xo) \ |
|
6415 GEN_VR_VXRFORM1(name, name, #name, xo, 0) \ |
|
6416 GEN_VR_VXRFORM1(name, name##_, #name ".", xo, 1) |
|
6417 |
|
6418 GEN_VR_VXRFORM(vcmpbfp, 966); |
|
6419 GEN_VR_VXRFORM(vcmpeqfp, 198); |
|
6420 GEN_VR_VXRFORM(vcmpequb, 6) |
|
6421 GEN_VR_VXRFORM(vcmpequh, 70) |
|
6422 GEN_VR_VXRFORM(vcmpequw, 134) |
|
6423 GEN_VR_VXRFORM(vcmpgefp, 454) |
|
6424 GEN_VR_VXRFORM(vcmpgtfp, 710) |
|
6425 GEN_VR_VXRFORM(vcmpgtsb, 774) |
|
6426 GEN_VR_VXRFORM(vcmpgtsh, 838) |
|
6427 GEN_VR_VXRFORM(vcmpgtsw, 902) |
|
6428 GEN_VR_VXRFORM(vcmpgtub, 518) |
|
6429 GEN_VR_VXRFORM(vcmpgtuh, 582) |
|
6430 GEN_VR_VXRFORM(vcmpgtuw, 646) |
|
6431 |
|
6432 #define GEN_VR_VAFORM_PAIRED(name0, name1, xo) \ |
|
6433 GEN_HANDLER(name0##_##name1, 0x04, xo>>1, 0xFF, 0x00000000, PPC_ALTIVEC) \ |
|
6434 { \ |
|
6435 if (unlikely(!ctx->altivec_enabled)) { \ |
|
6436 GEN_EXCP_NO_VR(ctx); \ |
|
6437 return; \ |
|
6438 } \ |
|
6439 gen_load_avr(0, rA(ctx->opcode)); \ |
|
6440 gen_load_avr(1, rB(ctx->opcode)); \ |
|
6441 gen_load_avr(2, rC(ctx->opcode)); \ |
|
6442 if (Rc(ctx->opcode)) { \ |
|
6443 gen_op_##name1 (); \ |
|
6444 } else { \ |
|
6445 gen_op_##name0 (); \ |
|
6446 } \ |
|
6447 gen_store_avr(rD(ctx->opcode), 0); \ |
|
6448 } |
|
6449 GEN_VR_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 32) |
|
6450 |
|
6451 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC) |
|
6452 { |
|
6453 if (unlikely(!ctx->altivec_enabled)) { |
|
6454 GEN_EXCP_NO_VR(ctx); |
|
6455 return; |
|
6456 } |
|
6457 gen_load_avr(0, rA(ctx->opcode)); |
|
6458 gen_load_avr(1, rB(ctx->opcode)); |
|
6459 gen_load_avr(2, rC(ctx->opcode)); |
|
6460 gen_op_vmladduhm(); |
|
6461 gen_store_avr(rD(ctx->opcode), 0); |
|
6462 } |
|
6463 |
|
6464 GEN_VR_VAFORM_PAIRED(vmsumubm, vmsummbm, 36) |
|
6465 GEN_VR_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 38) |
|
6466 GEN_VR_VAFORM_PAIRED(vmsumshm, vmsumshs, 40) |
|
6467 GEN_VR_VAFORM_PAIRED(vsel, vperm, 42) |
|
6468 GEN_VR_VAFORM_PAIRED(vmaddfp, vnmsubfp, 46) |
|
6469 |
|
6470 GEN_HANDLER(vsldoi, 0x04, 0x16, 0xFF, 0x00000400, PPC_ALTIVEC) |
|
6471 { |
|
6472 target_ulong sh = VSH(ctx->opcode); |
|
6473 if (unlikely(!ctx->altivec_enabled)) { |
|
6474 GEN_EXCP_NO_VR(ctx); |
|
6475 return; |
|
6476 } |
|
6477 gen_load_avr(0, rA(ctx->opcode)); |
|
6478 gen_load_avr(1, rB(ctx->opcode)); |
|
6479 tcg_gen_movi_tl(cpu_T[0], sh); |
|
6480 gen_op_vsldoi(); |
|
6481 gen_store_avr(rD(ctx->opcode), 0); |
|
6482 } |
|
6483 #endif |
|
6484 |
|
6485 /*** SPE extension ***/ |
|
6486 /* Register moves */ |
|
6487 |
|
6488 static always_inline void gen_load_gpr64(TCGv_i64 t, int reg) { |
|
6489 #if defined(TARGET_PPC64) |
|
6490 tcg_gen_mov_i64(t, cpu_gpr[reg]); |
|
6491 #else |
|
6492 tcg_gen_concat_i32_i64(t, cpu_gpr[reg], cpu_gprh[reg]); |
|
6493 #endif |
|
6494 } |
|
6495 |
|
6496 static always_inline void gen_store_gpr64(int reg, TCGv_i64 t) { |
|
6497 #if defined(TARGET_PPC64) |
|
6498 tcg_gen_mov_i64(cpu_gpr[reg], t); |
|
6499 #else |
|
6500 TCGv_i64 tmp = tcg_temp_new_i64(); |
|
6501 tcg_gen_trunc_i64_i32(cpu_gpr[reg], t); |
|
6502 tcg_gen_shri_i64(tmp, t, 32); |
|
6503 tcg_gen_trunc_i64_i32(cpu_gprh[reg], tmp); |
|
6504 tcg_temp_free_i64(tmp); |
|
6505 #endif |
|
6506 } |
|
6507 |
|
6508 #define GEN_SPE(name0, name1, opc2, opc3, inval, type) \ |
|
6509 GEN_HANDLER(name0##_##name1, 0x04, opc2, opc3, inval, type) \ |
|
6510 { \ |
|
6511 if (Rc(ctx->opcode)) \ |
|
6512 gen_##name1(ctx); \ |
|
6513 else \ |
|
6514 gen_##name0(ctx); \ |
|
6515 } |
|
6516 |
|
6517 /* Handler for undefined SPE opcodes */ |
|
6518 static always_inline void gen_speundef (DisasContext *ctx) |
|
6519 { |
|
6520 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); |
|
6521 } |
|
6522 |
|
6523 /* SPE logic */ |
|
6524 #if defined(TARGET_PPC64) |
|
6525 #define GEN_SPEOP_LOGIC2(name, tcg_op) \ |
|
6526 static always_inline void gen_##name (DisasContext *ctx) \ |
|
6527 { \ |
|
6528 if (unlikely(!ctx->spe_enabled)) { \ |
|
6529 gen_exception(ctx, POWERPC_EXCP_APU); \ |
|
6530 return; \ |
|
6531 } \ |
|
6532 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \ |
|
6533 cpu_gpr[rB(ctx->opcode)]); \ |
|
6534 } |
|
6535 #else |
|
6536 #define GEN_SPEOP_LOGIC2(name, tcg_op) \ |
|
6537 static always_inline void gen_##name (DisasContext *ctx) \ |
|
6538 { \ |
|
6539 if (unlikely(!ctx->spe_enabled)) { \ |
|
6540 gen_exception(ctx, POWERPC_EXCP_APU); \ |
|
6541 return; \ |
|
6542 } \ |
|
6543 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \ |
|
6544 cpu_gpr[rB(ctx->opcode)]); \ |
|
6545 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \ |
|
6546 cpu_gprh[rB(ctx->opcode)]); \ |
|
6547 } |
|
6548 #endif |
|
6549 |
|
6550 GEN_SPEOP_LOGIC2(evand, tcg_gen_and_tl); |
|
6551 GEN_SPEOP_LOGIC2(evandc, tcg_gen_andc_tl); |
|
6552 GEN_SPEOP_LOGIC2(evxor, tcg_gen_xor_tl); |
|
6553 GEN_SPEOP_LOGIC2(evor, tcg_gen_or_tl); |
|
6554 GEN_SPEOP_LOGIC2(evnor, tcg_gen_nor_tl); |
|
6555 GEN_SPEOP_LOGIC2(eveqv, tcg_gen_eqv_tl); |
|
6556 GEN_SPEOP_LOGIC2(evorc, tcg_gen_orc_tl); |
|
6557 GEN_SPEOP_LOGIC2(evnand, tcg_gen_nand_tl); |
|
6558 |
|
6559 /* SPE logic immediate */ |
|
6560 #if defined(TARGET_PPC64) |
|
6561 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \ |
|
6562 static always_inline void gen_##name (DisasContext *ctx) \ |
|
6563 { \ |
|
6564 if (unlikely(!ctx->spe_enabled)) { \ |
|
6565 gen_exception(ctx, POWERPC_EXCP_APU); \ |
|
6566 return; \ |
|
6567 } \ |
|
6568 TCGv_i32 t0 = tcg_temp_local_new_i32(); \ |
|
6569 TCGv_i32 t1 = tcg_temp_local_new_i32(); \ |
|
6570 TCGv_i64 t2 = tcg_temp_local_new_i64(); \ |
|
6571 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \ |
|
6572 tcg_opi(t0, t0, rB(ctx->opcode)); \ |
|
6573 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \ |
|
6574 tcg_gen_trunc_i64_i32(t1, t2); \ |
|
6575 tcg_temp_free_i64(t2); \ |
|
6576 tcg_opi(t1, t1, rB(ctx->opcode)); \ |
|
6577 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \ |
|
6578 tcg_temp_free_i32(t0); \ |
|
6579 tcg_temp_free_i32(t1); \ |
|
6580 } |
|
6581 #else |
|
6582 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \ |
|
6583 static always_inline void gen_##name (DisasContext *ctx) \ |
|
6584 { \ |
|
6585 if (unlikely(!ctx->spe_enabled)) { \ |
|
6586 gen_exception(ctx, POWERPC_EXCP_APU); \ |
|
6587 return; \ |
|
6588 } \ |
|
6589 tcg_opi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \ |
|
6590 rB(ctx->opcode)); \ |
|
6591 tcg_opi(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \ |
|
6592 rB(ctx->opcode)); \ |
|
6593 } |
|
6594 #endif |
|
6595 GEN_SPEOP_TCG_LOGIC_IMM2(evslwi, tcg_gen_shli_i32); |
|
6596 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwiu, tcg_gen_shri_i32); |
|
6597 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwis, tcg_gen_sari_i32); |
|
6598 GEN_SPEOP_TCG_LOGIC_IMM2(evrlwi, tcg_gen_rotli_i32); |
|
6599 |
|
6600 /* SPE arithmetic */ |
|
6601 #if defined(TARGET_PPC64) |
|
6602 #define GEN_SPEOP_ARITH1(name, tcg_op) \ |
|
6603 static always_inline void gen_##name (DisasContext *ctx) \ |
|
6604 { \ |
|
6605 if (unlikely(!ctx->spe_enabled)) { \ |
|
6606 gen_exception(ctx, POWERPC_EXCP_APU); \ |
|
6607 return; \ |
|
6608 } \ |
|
6609 TCGv_i32 t0 = tcg_temp_local_new_i32(); \ |
|
6610 TCGv_i32 t1 = tcg_temp_local_new_i32(); \ |
|
6611 TCGv_i64 t2 = tcg_temp_local_new_i64(); \ |
|
6612 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \ |
|
6613 tcg_op(t0, t0); \ |
|
6614 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \ |
|
6615 tcg_gen_trunc_i64_i32(t1, t2); \ |
|
6616 tcg_temp_free_i64(t2); \ |
|
6617 tcg_op(t1, t1); \ |
|
6618 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \ |
|
6619 tcg_temp_free_i32(t0); \ |
|
6620 tcg_temp_free_i32(t1); \ |
|
6621 } |
|
6622 #else |
|
6623 #define GEN_SPEOP_ARITH1(name, tcg_op) \ |
|
6624 static always_inline void gen_##name (DisasContext *ctx) \ |
|
6625 { \ |
|
6626 if (unlikely(!ctx->spe_enabled)) { \ |
|
6627 gen_exception(ctx, POWERPC_EXCP_APU); \ |
|
6628 return; \ |
|
6629 } \ |
|
6630 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); \ |
|
6631 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]); \ |
|
6632 } |
|
6633 #endif |
|
6634 |
|
6635 static always_inline void gen_op_evabs (TCGv_i32 ret, TCGv_i32 arg1) |
|
6636 { |
|
6637 int l1 = gen_new_label(); |
|
6638 int l2 = gen_new_label(); |
|
6639 |
|
6640 tcg_gen_brcondi_i32(TCG_COND_GE, arg1, 0, l1); |
|
6641 tcg_gen_neg_i32(ret, arg1); |
|
6642 tcg_gen_br(l2); |
|
6643 gen_set_label(l1); |
|
6644 tcg_gen_mov_i32(ret, arg1); |
|
6645 gen_set_label(l2); |
|
6646 } |
|
6647 GEN_SPEOP_ARITH1(evabs, gen_op_evabs); |
|
6648 GEN_SPEOP_ARITH1(evneg, tcg_gen_neg_i32); |
|
6649 GEN_SPEOP_ARITH1(evextsb, tcg_gen_ext8s_i32); |
|
6650 GEN_SPEOP_ARITH1(evextsh, tcg_gen_ext16s_i32); |
|
6651 static always_inline void gen_op_evrndw (TCGv_i32 ret, TCGv_i32 arg1) |
|
6652 { |
|
6653 tcg_gen_addi_i32(ret, arg1, 0x8000); |
|
6654 tcg_gen_ext16u_i32(ret, ret); |
|
6655 } |
|
6656 GEN_SPEOP_ARITH1(evrndw, gen_op_evrndw); |
|
6657 GEN_SPEOP_ARITH1(evcntlsw, gen_helper_cntlsw32); |
|
6658 GEN_SPEOP_ARITH1(evcntlzw, gen_helper_cntlzw32); |
|
6659 |
|
6660 #if defined(TARGET_PPC64) |
|
6661 #define GEN_SPEOP_ARITH2(name, tcg_op) \ |
|
6662 static always_inline void gen_##name (DisasContext *ctx) \ |
|
6663 { \ |
|
6664 if (unlikely(!ctx->spe_enabled)) { \ |
|
6665 gen_exception(ctx, POWERPC_EXCP_APU); \ |
|
6666 return; \ |
|
6667 } \ |
|
6668 TCGv_i32 t0 = tcg_temp_local_new_i32(); \ |
|
6669 TCGv_i32 t1 = tcg_temp_local_new_i32(); \ |
|
6670 TCGv_i32 t2 = tcg_temp_local_new_i32(); \ |
|
6671 TCGv_i64 t3 = tcg_temp_local_new(TCG_TYPE_I64); \ |
|
6672 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \ |
|
6673 tcg_gen_trunc_i64_i32(t2, cpu_gpr[rB(ctx->opcode)]); \ |
|
6674 tcg_op(t0, t0, t2); \ |
|
6675 tcg_gen_shri_i64(t3, cpu_gpr[rA(ctx->opcode)], 32); \ |
|
6676 tcg_gen_trunc_i64_i32(t1, t3); \ |
|
6677 tcg_gen_shri_i64(t3, cpu_gpr[rB(ctx->opcode)], 32); \ |
|
6678 tcg_gen_trunc_i64_i32(t2, t3); \ |
|
6679 tcg_temp_free_i64(t3); \ |
|
6680 tcg_op(t1, t1, t2); \ |
|
6681 tcg_temp_free_i32(t2); \ |
|
6682 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \ |
|
6683 tcg_temp_free_i32(t0); \ |
|
6684 tcg_temp_free_i32(t1); \ |
|
6685 } |
|
6686 #else |
|
6687 #define GEN_SPEOP_ARITH2(name, tcg_op) \ |
|
6688 static always_inline void gen_##name (DisasContext *ctx) \ |
|
6689 { \ |
|
6690 if (unlikely(!ctx->spe_enabled)) { \ |
|
6691 gen_exception(ctx, POWERPC_EXCP_APU); \ |
|
6692 return; \ |
|
6693 } \ |
|
6694 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \ |
|
6695 cpu_gpr[rB(ctx->opcode)]); \ |
|
6696 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \ |
|
6697 cpu_gprh[rB(ctx->opcode)]); \ |
|
6698 } |
|
6699 #endif |
|
6700 |
|
6701 static always_inline void gen_op_evsrwu (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) |
|
6702 { |
|
6703 TCGv_i32 t0; |
|
6704 int l1, l2; |
|
6705 |
|
6706 l1 = gen_new_label(); |
|
6707 l2 = gen_new_label(); |
|
6708 t0 = tcg_temp_local_new_i32(); |
|
6709 /* No error here: 6 bits are used */ |
|
6710 tcg_gen_andi_i32(t0, arg2, 0x3F); |
|
6711 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1); |
|
6712 tcg_gen_shr_i32(ret, arg1, t0); |
|
6713 tcg_gen_br(l2); |
|
6714 gen_set_label(l1); |
|
6715 tcg_gen_movi_i32(ret, 0); |
|
6716 tcg_gen_br(l2); |
|
6717 tcg_temp_free_i32(t0); |
|
6718 } |
|
6719 GEN_SPEOP_ARITH2(evsrwu, gen_op_evsrwu); |
|
6720 static always_inline void gen_op_evsrws (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) |
|
6721 { |
|
6722 TCGv_i32 t0; |
|
6723 int l1, l2; |
|
6724 |
|
6725 l1 = gen_new_label(); |
|
6726 l2 = gen_new_label(); |
|
6727 t0 = tcg_temp_local_new_i32(); |
|
6728 /* No error here: 6 bits are used */ |
|
6729 tcg_gen_andi_i32(t0, arg2, 0x3F); |
|
6730 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1); |
|
6731 tcg_gen_sar_i32(ret, arg1, t0); |
|
6732 tcg_gen_br(l2); |
|
6733 gen_set_label(l1); |
|
6734 tcg_gen_movi_i32(ret, 0); |
|
6735 tcg_gen_br(l2); |
|
6736 tcg_temp_free_i32(t0); |
|
6737 } |
|
6738 GEN_SPEOP_ARITH2(evsrws, gen_op_evsrws); |
|
6739 static always_inline void gen_op_evslw (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) |
|
6740 { |
|
6741 TCGv_i32 t0; |
|
6742 int l1, l2; |
|
6743 |
|
6744 l1 = gen_new_label(); |
|
6745 l2 = gen_new_label(); |
|
6746 t0 = tcg_temp_local_new_i32(); |
|
6747 /* No error here: 6 bits are used */ |
|
6748 tcg_gen_andi_i32(t0, arg2, 0x3F); |
|
6749 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1); |
|
6750 tcg_gen_shl_i32(ret, arg1, t0); |
|
6751 tcg_gen_br(l2); |
|
6752 gen_set_label(l1); |
|
6753 tcg_gen_movi_i32(ret, 0); |
|
6754 tcg_gen_br(l2); |
|
6755 tcg_temp_free_i32(t0); |
|
6756 } |
|
6757 GEN_SPEOP_ARITH2(evslw, gen_op_evslw); |
|
6758 static always_inline void gen_op_evrlw (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) |
|
6759 { |
|
6760 TCGv_i32 t0 = tcg_temp_new_i32(); |
|
6761 tcg_gen_andi_i32(t0, arg2, 0x1F); |
|
6762 tcg_gen_rotl_i32(ret, arg1, t0); |
|
6763 tcg_temp_free_i32(t0); |
|
6764 } |
|
6765 GEN_SPEOP_ARITH2(evrlw, gen_op_evrlw); |
|
6766 static always_inline void gen_evmergehi (DisasContext *ctx) |
|
6767 { |
|
6768 if (unlikely(!ctx->spe_enabled)) { |
|
6769 gen_exception(ctx, POWERPC_EXCP_APU); |
|
6770 return; |
|
6771 } |
|
6772 #if defined(TARGET_PPC64) |
|
6773 TCGv t0 = tcg_temp_new(); |
|
6774 TCGv t1 = tcg_temp_new(); |
|
6775 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32); |
|
6776 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL); |
|
6777 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1); |
|
6778 tcg_temp_free(t0); |
|
6779 tcg_temp_free(t1); |
|
6780 #else |
|
6781 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]); |
|
6782 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]); |
|
6783 #endif |
|
6784 } |
|
6785 GEN_SPEOP_ARITH2(evaddw, tcg_gen_add_i32); |
|
6786 static always_inline void gen_op_evsubf (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) |
|
6787 { |
|
6788 tcg_gen_sub_i32(ret, arg2, arg1); |
|
6789 } |
|
6790 GEN_SPEOP_ARITH2(evsubfw, gen_op_evsubf); |
|
6791 |
|
6792 /* SPE arithmetic immediate */ |
|
6793 #if defined(TARGET_PPC64) |
|
6794 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \ |
|
6795 static always_inline void gen_##name (DisasContext *ctx) \ |
|
6796 { \ |
|
6797 if (unlikely(!ctx->spe_enabled)) { \ |
|
6798 gen_exception(ctx, POWERPC_EXCP_APU); \ |
|
6799 return; \ |
|
6800 } \ |
|
6801 TCGv_i32 t0 = tcg_temp_local_new_i32(); \ |
|
6802 TCGv_i32 t1 = tcg_temp_local_new_i32(); \ |
|
6803 TCGv_i64 t2 = tcg_temp_local_new_i64(); \ |
|
6804 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rB(ctx->opcode)]); \ |
|
6805 tcg_op(t0, t0, rA(ctx->opcode)); \ |
|
6806 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \ |
|
6807 tcg_gen_trunc_i64_i32(t1, t2); \ |
|
6808 tcg_temp_free_i64(t2); \ |
|
6809 tcg_op(t1, t1, rA(ctx->opcode)); \ |
|
6810 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \ |
|
6811 tcg_temp_free_i32(t0); \ |
|
6812 tcg_temp_free_i32(t1); \ |
|
6813 } |
|
6814 #else |
|
6815 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \ |
|
6816 static always_inline void gen_##name (DisasContext *ctx) \ |
|
6817 { \ |
|
6818 if (unlikely(!ctx->spe_enabled)) { \ |
|
6819 gen_exception(ctx, POWERPC_EXCP_APU); \ |
|
6820 return; \ |
|
6821 } \ |
|
6822 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ |
|
6823 rA(ctx->opcode)); \ |
|
6824 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)], \ |
|
6825 rA(ctx->opcode)); \ |
|
6826 } |
|
6827 #endif |
|
6828 GEN_SPEOP_ARITH_IMM2(evaddiw, tcg_gen_addi_i32); |
|
6829 GEN_SPEOP_ARITH_IMM2(evsubifw, tcg_gen_subi_i32); |
|
6830 |
|
6831 /* SPE comparison */ |
|
6832 #if defined(TARGET_PPC64) |
|
6833 #define GEN_SPEOP_COMP(name, tcg_cond) \ |
|
6834 static always_inline void gen_##name (DisasContext *ctx) \ |
|
6835 { \ |
|
6836 if (unlikely(!ctx->spe_enabled)) { \ |
|
6837 gen_exception(ctx, POWERPC_EXCP_APU); \ |
|
6838 return; \ |
|
6839 } \ |
|
6840 int l1 = gen_new_label(); \ |
|
6841 int l2 = gen_new_label(); \ |
|
6842 int l3 = gen_new_label(); \ |
|
6843 int l4 = gen_new_label(); \ |
|
6844 TCGv_i32 t0 = tcg_temp_local_new_i32(); \ |
|
6845 TCGv_i32 t1 = tcg_temp_local_new_i32(); \ |
|
6846 TCGv_i64 t2 = tcg_temp_local_new_i64(); \ |
|
6847 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \ |
|
6848 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rB(ctx->opcode)]); \ |
|
6849 tcg_gen_brcond_i32(tcg_cond, t0, t1, l1); \ |
|
6850 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0); \ |
|
6851 tcg_gen_br(l2); \ |
|
6852 gen_set_label(l1); \ |
|
6853 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \ |
|
6854 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \ |
|
6855 gen_set_label(l2); \ |
|
6856 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \ |
|
6857 tcg_gen_trunc_i64_i32(t0, t2); \ |
|
6858 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \ |
|
6859 tcg_gen_trunc_i64_i32(t1, t2); \ |
|
6860 tcg_temp_free_i64(t2); \ |
|
6861 tcg_gen_brcond_i32(tcg_cond, t0, t1, l3); \ |
|
6862 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \ |
|
6863 ~(CRF_CH | CRF_CH_AND_CL)); \ |
|
6864 tcg_gen_br(l4); \ |
|
6865 gen_set_label(l3); \ |
|
6866 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \ |
|
6867 CRF_CH | CRF_CH_OR_CL); \ |
|
6868 gen_set_label(l4); \ |
|
6869 tcg_temp_free_i32(t0); \ |
|
6870 tcg_temp_free_i32(t1); \ |
|
6871 } |
|
6872 #else |
|
6873 #define GEN_SPEOP_COMP(name, tcg_cond) \ |
|
6874 static always_inline void gen_##name (DisasContext *ctx) \ |
|
6875 { \ |
|
6876 if (unlikely(!ctx->spe_enabled)) { \ |
|
6877 gen_exception(ctx, POWERPC_EXCP_APU); \ |
|
6878 return; \ |
|
6879 } \ |
|
6880 int l1 = gen_new_label(); \ |
|
6881 int l2 = gen_new_label(); \ |
|
6882 int l3 = gen_new_label(); \ |
|
6883 int l4 = gen_new_label(); \ |
|
6884 \ |
|
6885 tcg_gen_brcond_i32(tcg_cond, cpu_gpr[rA(ctx->opcode)], \ |
|
6886 cpu_gpr[rB(ctx->opcode)], l1); \ |
|
6887 tcg_gen_movi_tl(cpu_crf[crfD(ctx->opcode)], 0); \ |
|
6888 tcg_gen_br(l2); \ |
|
6889 gen_set_label(l1); \ |
|
6890 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \ |
|
6891 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \ |
|
6892 gen_set_label(l2); \ |
|
6893 tcg_gen_brcond_i32(tcg_cond, cpu_gprh[rA(ctx->opcode)], \ |
|
6894 cpu_gprh[rB(ctx->opcode)], l3); \ |
|
6895 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \ |
|
6896 ~(CRF_CH | CRF_CH_AND_CL)); \ |
|
6897 tcg_gen_br(l4); \ |
|
6898 gen_set_label(l3); \ |
|
6899 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \ |
|
6900 CRF_CH | CRF_CH_OR_CL); \ |
|
6901 gen_set_label(l4); \ |
|
6902 } |
|
6903 #endif |
|
6904 GEN_SPEOP_COMP(evcmpgtu, TCG_COND_GTU); |
|
6905 GEN_SPEOP_COMP(evcmpgts, TCG_COND_GT); |
|
6906 GEN_SPEOP_COMP(evcmpltu, TCG_COND_LTU); |
|
6907 GEN_SPEOP_COMP(evcmplts, TCG_COND_LT); |
|
6908 GEN_SPEOP_COMP(evcmpeq, TCG_COND_EQ); |
|
6909 |
|
6910 /* SPE misc */ |
|
6911 static always_inline void gen_brinc (DisasContext *ctx) |
|
6912 { |
|
6913 /* Note: brinc is usable even if SPE is disabled */ |
|
6914 gen_helper_brinc(cpu_gpr[rD(ctx->opcode)], |
|
6915 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); |
|
6916 } |
|
6917 static always_inline void gen_evmergelo (DisasContext *ctx) |
|
6918 { |
|
6919 if (unlikely(!ctx->spe_enabled)) { |
|
6920 gen_exception(ctx, POWERPC_EXCP_APU); |
|
6921 return; |
|
6922 } |
|
6923 #if defined(TARGET_PPC64) |
|
6924 TCGv t0 = tcg_temp_new(); |
|
6925 TCGv t1 = tcg_temp_new(); |
|
6926 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x00000000FFFFFFFFLL); |
|
6927 tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32); |
|
6928 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1); |
|
6929 tcg_temp_free(t0); |
|
6930 tcg_temp_free(t1); |
|
6931 #else |
|
6932 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); |
|
6933 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); |
|
6934 #endif |
|
6935 } |
|
6936 static always_inline void gen_evmergehilo (DisasContext *ctx) |
|
6937 { |
|
6938 if (unlikely(!ctx->spe_enabled)) { |
|
6939 gen_exception(ctx, POWERPC_EXCP_APU); |
|
6940 return; |
|
6941 } |
|
6942 #if defined(TARGET_PPC64) |
|
6943 TCGv t0 = tcg_temp_new(); |
|
6944 TCGv t1 = tcg_temp_new(); |
|
6945 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x00000000FFFFFFFFLL); |
|
6946 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL); |
|
6947 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1); |
|
6948 tcg_temp_free(t0); |
|
6949 tcg_temp_free(t1); |
|
6950 #else |
|
6951 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); |
|
6952 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]); |
|
6953 #endif |
|
6954 } |
|
6955 static always_inline void gen_evmergelohi (DisasContext *ctx) |
|
6956 { |
|
6957 if (unlikely(!ctx->spe_enabled)) { |
|
6958 gen_exception(ctx, POWERPC_EXCP_APU); |
|
6959 return; |
|
6960 } |
|
6961 #if defined(TARGET_PPC64) |
|
6962 TCGv t0 = tcg_temp_new(); |
|
6963 TCGv t1 = tcg_temp_new(); |
|
6964 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32); |
|
6965 tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32); |
|
6966 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1); |
|
6967 tcg_temp_free(t0); |
|
6968 tcg_temp_free(t1); |
|
6969 #else |
|
6970 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]); |
|
6971 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); |
|
6972 #endif |
|
6973 } |
|
6974 static always_inline void gen_evsplati (DisasContext *ctx) |
|
6975 { |
|
6976 uint64_t imm = ((int32_t)(rA(ctx->opcode) << 11)) >> 27; |
|
6977 |
|
6978 #if defined(TARGET_PPC64) |
|
6979 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm); |
|
6980 #else |
|
6981 tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm); |
|
6982 tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm); |
|
6983 #endif |
|
6984 } |
|
6985 static always_inline void gen_evsplatfi (DisasContext *ctx) |
|
6986 { |
|
6987 uint64_t imm = rA(ctx->opcode) << 11; |
|
6988 |
|
6989 #if defined(TARGET_PPC64) |
|
6990 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm); |
|
6991 #else |
|
6992 tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm); |
|
6993 tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm); |
|
6994 #endif |
|
6995 } |
|
6996 |
|
6997 static always_inline void gen_evsel (DisasContext *ctx) |
|
6998 { |
|
6999 int l1 = gen_new_label(); |
|
7000 int l2 = gen_new_label(); |
|
7001 int l3 = gen_new_label(); |
|
7002 int l4 = gen_new_label(); |
|
7003 TCGv_i32 t0 = tcg_temp_local_new_i32(); |
|
7004 #if defined(TARGET_PPC64) |
|
7005 TCGv t1 = tcg_temp_local_new(); |
|
7006 TCGv t2 = tcg_temp_local_new(); |
|
7007 #endif |
|
7008 tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 3); |
|
7009 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1); |
|
7010 #if defined(TARGET_PPC64) |
|
7011 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF00000000ULL); |
|
7012 #else |
|
7013 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]); |
|
7014 #endif |
|
7015 tcg_gen_br(l2); |
|
7016 gen_set_label(l1); |
|
7017 #if defined(TARGET_PPC64) |
|
7018 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0xFFFFFFFF00000000ULL); |
|
7019 #else |
|
7020 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]); |
|
7021 #endif |
|
7022 gen_set_label(l2); |
|
7023 tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 2); |
|
7024 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l3); |
|
7025 #if defined(TARGET_PPC64) |
|
7026 tcg_gen_andi_tl(t2, cpu_gpr[rA(ctx->opcode)], 0x00000000FFFFFFFFULL); |
|
7027 #else |
|
7028 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); |
|
7029 #endif |
|
7030 tcg_gen_br(l4); |
|
7031 gen_set_label(l3); |
|
7032 #if defined(TARGET_PPC64) |
|
7033 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x00000000FFFFFFFFULL); |
|
7034 #else |
|
7035 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); |
|
7036 #endif |
|
7037 gen_set_label(l4); |
|
7038 tcg_temp_free_i32(t0); |
|
7039 #if defined(TARGET_PPC64) |
|
7040 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t1, t2); |
|
7041 tcg_temp_free(t1); |
|
7042 tcg_temp_free(t2); |
|
7043 #endif |
|
7044 } |
|
7045 GEN_HANDLER2(evsel0, "evsel", 0x04, 0x1c, 0x09, 0x00000000, PPC_SPE) |
|
7046 { |
|
7047 gen_evsel(ctx); |
|
7048 } |
|
7049 GEN_HANDLER2(evsel1, "evsel", 0x04, 0x1d, 0x09, 0x00000000, PPC_SPE) |
|
7050 { |
|
7051 gen_evsel(ctx); |
|
7052 } |
|
7053 GEN_HANDLER2(evsel2, "evsel", 0x04, 0x1e, 0x09, 0x00000000, PPC_SPE) |
|
7054 { |
|
7055 gen_evsel(ctx); |
|
7056 } |
|
7057 GEN_HANDLER2(evsel3, "evsel", 0x04, 0x1f, 0x09, 0x00000000, PPC_SPE) |
|
7058 { |
|
7059 gen_evsel(ctx); |
|
7060 } |
|
7061 |
|
7062 GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, PPC_SPE); //// |
|
7063 GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, PPC_SPE); |
|
7064 GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, PPC_SPE); //// |
|
7065 GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, PPC_SPE); |
|
7066 GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, PPC_SPE); //// |
|
7067 GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, PPC_SPE); //// |
|
7068 GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, PPC_SPE); //// |
|
7069 GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x00000000, PPC_SPE); // |
|
7070 GEN_SPE(speundef, evand, 0x08, 0x08, 0x00000000, PPC_SPE); //// |
|
7071 GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, PPC_SPE); //// |
|
7072 GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, PPC_SPE); //// |
|
7073 GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, PPC_SPE); //// |
|
7074 GEN_SPE(speundef, evorc, 0x0D, 0x08, 0x00000000, PPC_SPE); //// |
|
7075 GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, PPC_SPE); //// |
|
7076 GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, PPC_SPE); //// |
|
7077 GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, PPC_SPE); |
|
7078 GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, PPC_SPE); //// |
|
7079 GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, PPC_SPE); |
|
7080 GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, PPC_SPE); // |
|
7081 GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, PPC_SPE); |
|
7082 GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, PPC_SPE); //// |
|
7083 GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, PPC_SPE); //// |
|
7084 GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, PPC_SPE); //// |
|
7085 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, PPC_SPE); //// |
|
7086 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, PPC_SPE); //// |
|
7087 |
|
7088 /* SPE load and stores */ |
|
7089 static always_inline void gen_addr_spe_imm_index (DisasContext *ctx, TCGv EA, int sh) |
|
7090 { |
|
7091 target_ulong uimm = rB(ctx->opcode); |
|
7092 |
|
7093 if (rA(ctx->opcode) == 0) { |
|
7094 tcg_gen_movi_tl(EA, uimm << sh); |
|
7095 } else { |
|
7096 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], uimm << sh); |
|
7097 #if defined(TARGET_PPC64) |
|
7098 if (!ctx->sf_mode) { |
|
7099 tcg_gen_ext32u_tl(EA, EA); |
|
7100 } |
|
7101 #endif |
|
7102 } |
|
7103 } |
|
7104 |
|
7105 static always_inline void gen_op_evldd(DisasContext *ctx, TCGv addr) |
|
7106 { |
|
7107 #if defined(TARGET_PPC64) |
|
7108 gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], addr); |
|
7109 #else |
|
7110 TCGv_i64 t0 = tcg_temp_new_i64(); |
|
7111 gen_qemu_ld64(ctx, t0, addr); |
|
7112 tcg_gen_trunc_i64_i32(cpu_gpr[rD(ctx->opcode)], t0); |
|
7113 tcg_gen_shri_i64(t0, t0, 32); |
|
7114 tcg_gen_trunc_i64_i32(cpu_gprh[rD(ctx->opcode)], t0); |
|
7115 tcg_temp_free_i64(t0); |
|
7116 #endif |
|
7117 } |
|
7118 |
|
7119 static always_inline void gen_op_evldw(DisasContext *ctx, TCGv addr) |
|
7120 { |
|
7121 #if defined(TARGET_PPC64) |
|
7122 TCGv t0 = tcg_temp_new(); |
|
7123 gen_qemu_ld32u(ctx, t0, addr); |
|
7124 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32); |
|
7125 gen_addr_add(ctx, addr, addr, 4); |
|
7126 gen_qemu_ld32u(ctx, t0, addr); |
|
7127 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); |
|
7128 tcg_temp_free(t0); |
|
7129 #else |
|
7130 gen_qemu_ld32u(ctx, cpu_gprh[rD(ctx->opcode)], addr); |
|
7131 gen_addr_add(ctx, addr, addr, 4); |
|
7132 gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], addr); |
|
7133 #endif |
|
7134 } |
|
7135 |
|
7136 static always_inline void gen_op_evldh(DisasContext *ctx, TCGv addr) |
|
7137 { |
|
7138 TCGv t0 = tcg_temp_new(); |
|
7139 #if defined(TARGET_PPC64) |
|
7140 gen_qemu_ld16u(ctx, t0, addr); |
|
7141 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48); |
|
7142 gen_addr_add(ctx, addr, addr, 2); |
|
7143 gen_qemu_ld16u(ctx, t0, addr); |
|
7144 tcg_gen_shli_tl(t0, t0, 32); |
|
7145 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); |
|
7146 gen_addr_add(ctx, addr, addr, 2); |
|
7147 gen_qemu_ld16u(ctx, t0, addr); |
|
7148 tcg_gen_shli_tl(t0, t0, 16); |
|
7149 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); |
|
7150 gen_addr_add(ctx, addr, addr, 2); |
|
7151 gen_qemu_ld16u(ctx, t0, addr); |
|
7152 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); |
|
7153 #else |
|
7154 gen_qemu_ld16u(ctx, t0, addr); |
|
7155 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16); |
|
7156 gen_addr_add(ctx, addr, addr, 2); |
|
7157 gen_qemu_ld16u(ctx, t0, addr); |
|
7158 tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0); |
|
7159 gen_addr_add(ctx, addr, addr, 2); |
|
7160 gen_qemu_ld16u(ctx, t0, addr); |
|
7161 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16); |
|
7162 gen_addr_add(ctx, addr, addr, 2); |
|
7163 gen_qemu_ld16u(ctx, t0, addr); |
|
7164 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); |
|
7165 #endif |
|
7166 tcg_temp_free(t0); |
|
7167 } |
|
7168 |
|
7169 static always_inline void gen_op_evlhhesplat(DisasContext *ctx, TCGv addr) |
|
7170 { |
|
7171 TCGv t0 = tcg_temp_new(); |
|
7172 gen_qemu_ld16u(ctx, t0, addr); |
|
7173 #if defined(TARGET_PPC64) |
|
7174 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48); |
|
7175 tcg_gen_shli_tl(t0, t0, 16); |
|
7176 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); |
|
7177 #else |
|
7178 tcg_gen_shli_tl(t0, t0, 16); |
|
7179 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0); |
|
7180 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); |
|
7181 #endif |
|
7182 tcg_temp_free(t0); |
|
7183 } |
|
7184 |
|
7185 static always_inline void gen_op_evlhhousplat(DisasContext *ctx, TCGv addr) |
|
7186 { |
|
7187 TCGv t0 = tcg_temp_new(); |
|
7188 gen_qemu_ld16u(ctx, t0, addr); |
|
7189 #if defined(TARGET_PPC64) |
|
7190 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32); |
|
7191 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); |
|
7192 #else |
|
7193 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0); |
|
7194 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); |
|
7195 #endif |
|
7196 tcg_temp_free(t0); |
|
7197 } |
|
7198 |
|
7199 static always_inline void gen_op_evlhhossplat(DisasContext *ctx, TCGv addr) |
|
7200 { |
|
7201 TCGv t0 = tcg_temp_new(); |
|
7202 gen_qemu_ld16s(ctx, t0, addr); |
|
7203 #if defined(TARGET_PPC64) |
|
7204 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32); |
|
7205 tcg_gen_ext32u_tl(t0, t0); |
|
7206 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); |
|
7207 #else |
|
7208 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0); |
|
7209 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); |
|
7210 #endif |
|
7211 tcg_temp_free(t0); |
|
7212 } |
|
7213 |
|
7214 static always_inline void gen_op_evlwhe(DisasContext *ctx, TCGv addr) |
|
7215 { |
|
7216 TCGv t0 = tcg_temp_new(); |
|
7217 #if defined(TARGET_PPC64) |
|
7218 gen_qemu_ld16u(ctx, t0, addr); |
|
7219 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48); |
|
7220 gen_addr_add(ctx, addr, addr, 2); |
|
7221 gen_qemu_ld16u(ctx, t0, addr); |
|
7222 tcg_gen_shli_tl(t0, t0, 16); |
|
7223 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); |
|
7224 #else |
|
7225 gen_qemu_ld16u(ctx, t0, addr); |
|
7226 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16); |
|
7227 gen_addr_add(ctx, addr, addr, 2); |
|
7228 gen_qemu_ld16u(ctx, t0, addr); |
|
7229 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16); |
|
7230 #endif |
|
7231 tcg_temp_free(t0); |
|
7232 } |
|
7233 |
|
7234 static always_inline void gen_op_evlwhou(DisasContext *ctx, TCGv addr) |
|
7235 { |
|
7236 #if defined(TARGET_PPC64) |
|
7237 TCGv t0 = tcg_temp_new(); |
|
7238 gen_qemu_ld16u(ctx, cpu_gpr[rD(ctx->opcode)], addr); |
|
7239 gen_addr_add(ctx, addr, addr, 2); |
|
7240 gen_qemu_ld16u(ctx, t0, addr); |
|
7241 tcg_gen_shli_tl(t0, t0, 32); |
|
7242 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); |
|
7243 tcg_temp_free(t0); |
|
7244 #else |
|
7245 gen_qemu_ld16u(ctx, cpu_gprh[rD(ctx->opcode)], addr); |
|
7246 gen_addr_add(ctx, addr, addr, 2); |
|
7247 gen_qemu_ld16u(ctx, cpu_gpr[rD(ctx->opcode)], addr); |
|
7248 #endif |
|
7249 } |
|
7250 |
|
7251 static always_inline void gen_op_evlwhos(DisasContext *ctx, TCGv addr) |
|
7252 { |
|
7253 #if defined(TARGET_PPC64) |
|
7254 TCGv t0 = tcg_temp_new(); |
|
7255 gen_qemu_ld16s(ctx, t0, addr); |
|
7256 tcg_gen_ext32u_tl(cpu_gpr[rD(ctx->opcode)], t0); |
|
7257 gen_addr_add(ctx, addr, addr, 2); |
|
7258 gen_qemu_ld16s(ctx, t0, addr); |
|
7259 tcg_gen_shli_tl(t0, t0, 32); |
|
7260 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); |
|
7261 tcg_temp_free(t0); |
|
7262 #else |
|
7263 gen_qemu_ld16s(ctx, cpu_gprh[rD(ctx->opcode)], addr); |
|
7264 gen_addr_add(ctx, addr, addr, 2); |
|
7265 gen_qemu_ld16s(ctx, cpu_gpr[rD(ctx->opcode)], addr); |
|
7266 #endif |
|
7267 } |
|
7268 |
|
7269 static always_inline void gen_op_evlwwsplat(DisasContext *ctx, TCGv addr) |
|
7270 { |
|
7271 TCGv t0 = tcg_temp_new(); |
|
7272 gen_qemu_ld32u(ctx, t0, addr); |
|
7273 #if defined(TARGET_PPC64) |
|
7274 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32); |
|
7275 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); |
|
7276 #else |
|
7277 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0); |
|
7278 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); |
|
7279 #endif |
|
7280 tcg_temp_free(t0); |
|
7281 } |
|
7282 |
|
7283 static always_inline void gen_op_evlwhsplat(DisasContext *ctx, TCGv addr) |
|
7284 { |
|
7285 TCGv t0 = tcg_temp_new(); |
|
7286 #if defined(TARGET_PPC64) |
|
7287 gen_qemu_ld16u(ctx, t0, addr); |
|
7288 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48); |
|
7289 tcg_gen_shli_tl(t0, t0, 32); |
|
7290 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); |
|
7291 gen_addr_add(ctx, addr, addr, 2); |
|
7292 gen_qemu_ld16u(ctx, t0, addr); |
|
7293 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); |
|
7294 tcg_gen_shli_tl(t0, t0, 16); |
|
7295 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0); |
|
7296 #else |
|
7297 gen_qemu_ld16u(ctx, t0, addr); |
|
7298 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16); |
|
7299 tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0); |
|
7300 gen_addr_add(ctx, addr, addr, 2); |
|
7301 gen_qemu_ld16u(ctx, t0, addr); |
|
7302 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16); |
|
7303 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0); |
|
7304 #endif |
|
7305 tcg_temp_free(t0); |
|
7306 } |
|
7307 |
|
7308 static always_inline void gen_op_evstdd(DisasContext *ctx, TCGv addr) |
|
7309 { |
|
7310 #if defined(TARGET_PPC64) |
|
7311 gen_qemu_st64(ctx, cpu_gpr[rS(ctx->opcode)], addr); |
|
7312 #else |
|
7313 TCGv_i64 t0 = tcg_temp_new_i64(); |
|
7314 tcg_gen_concat_i32_i64(t0, cpu_gpr[rS(ctx->opcode)], cpu_gprh[rS(ctx->opcode)]); |
|
7315 gen_qemu_st64(ctx, t0, addr); |
|
7316 tcg_temp_free_i64(t0); |
|
7317 #endif |
|
7318 } |
|
7319 |
|
7320 static always_inline void gen_op_evstdw(DisasContext *ctx, TCGv addr) |
|
7321 { |
|
7322 #if defined(TARGET_PPC64) |
|
7323 TCGv t0 = tcg_temp_new(); |
|
7324 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32); |
|
7325 gen_qemu_st32(ctx, t0, addr); |
|
7326 tcg_temp_free(t0); |
|
7327 #else |
|
7328 gen_qemu_st32(ctx, cpu_gprh[rS(ctx->opcode)], addr); |
|
7329 #endif |
|
7330 gen_addr_add(ctx, addr, addr, 4); |
|
7331 gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], addr); |
|
7332 } |
|
7333 |
|
7334 static always_inline void gen_op_evstdh(DisasContext *ctx, TCGv addr) |
|
7335 { |
|
7336 TCGv t0 = tcg_temp_new(); |
|
7337 #if defined(TARGET_PPC64) |
|
7338 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48); |
|
7339 #else |
|
7340 tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16); |
|
7341 #endif |
|
7342 gen_qemu_st16(ctx, t0, addr); |
|
7343 gen_addr_add(ctx, addr, addr, 2); |
|
7344 #if defined(TARGET_PPC64) |
|
7345 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32); |
|
7346 gen_qemu_st16(ctx, t0, addr); |
|
7347 #else |
|
7348 gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr); |
|
7349 #endif |
|
7350 gen_addr_add(ctx, addr, addr, 2); |
|
7351 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16); |
|
7352 gen_qemu_st16(ctx, t0, addr); |
|
7353 tcg_temp_free(t0); |
|
7354 gen_addr_add(ctx, addr, addr, 2); |
|
7355 gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr); |
|
7356 } |
|
7357 |
|
7358 static always_inline void gen_op_evstwhe(DisasContext *ctx, TCGv addr) |
|
7359 { |
|
7360 TCGv t0 = tcg_temp_new(); |
|
7361 #if defined(TARGET_PPC64) |
|
7362 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48); |
|
7363 #else |
|
7364 tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16); |
|
7365 #endif |
|
7366 gen_qemu_st16(ctx, t0, addr); |
|
7367 gen_addr_add(ctx, addr, addr, 2); |
|
7368 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16); |
|
7369 gen_qemu_st16(ctx, t0, addr); |
|
7370 tcg_temp_free(t0); |
|
7371 } |
|
7372 |
|
7373 static always_inline void gen_op_evstwho(DisasContext *ctx, TCGv addr) |
|
7374 { |
|
7375 #if defined(TARGET_PPC64) |
|
7376 TCGv t0 = tcg_temp_new(); |
|
7377 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32); |
|
7378 gen_qemu_st16(ctx, t0, addr); |
|
7379 tcg_temp_free(t0); |
|
7380 #else |
|
7381 gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr); |
|
7382 #endif |
|
7383 gen_addr_add(ctx, addr, addr, 2); |
|
7384 gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr); |
|
7385 } |
|
7386 |
|
7387 static always_inline void gen_op_evstwwe(DisasContext *ctx, TCGv addr) |
|
7388 { |
|
7389 #if defined(TARGET_PPC64) |
|
7390 TCGv t0 = tcg_temp_new(); |
|
7391 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32); |
|
7392 gen_qemu_st32(ctx, t0, addr); |
|
7393 tcg_temp_free(t0); |
|
7394 #else |
|
7395 gen_qemu_st32(ctx, cpu_gprh[rS(ctx->opcode)], addr); |
|
7396 #endif |
|
7397 } |
|
7398 |
|
7399 static always_inline void gen_op_evstwwo(DisasContext *ctx, TCGv addr) |
|
7400 { |
|
7401 gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], addr); |
|
7402 } |
|
7403 |
|
7404 #define GEN_SPEOP_LDST(name, opc2, sh) \ |
|
7405 GEN_HANDLER(name, 0x04, opc2, 0x0C, 0x00000000, PPC_SPE) \ |
|
7406 { \ |
|
7407 TCGv t0; \ |
|
7408 if (unlikely(!ctx->spe_enabled)) { \ |
|
7409 gen_exception(ctx, POWERPC_EXCP_APU); \ |
|
7410 return; \ |
|
7411 } \ |
|
7412 gen_set_access_type(ctx, ACCESS_INT); \ |
|
7413 t0 = tcg_temp_new(); \ |
|
7414 if (Rc(ctx->opcode)) { \ |
|
7415 gen_addr_spe_imm_index(ctx, t0, sh); \ |
|
7416 } else { \ |
|
7417 gen_addr_reg_index(ctx, t0); \ |
|
7418 } \ |
|
7419 gen_op_##name(ctx, t0); \ |
|
7420 tcg_temp_free(t0); \ |
|
7421 } |
|
7422 |
|
7423 GEN_SPEOP_LDST(evldd, 0x00, 3); |
|
7424 GEN_SPEOP_LDST(evldw, 0x01, 3); |
|
7425 GEN_SPEOP_LDST(evldh, 0x02, 3); |
|
7426 GEN_SPEOP_LDST(evlhhesplat, 0x04, 1); |
|
7427 GEN_SPEOP_LDST(evlhhousplat, 0x06, 1); |
|
7428 GEN_SPEOP_LDST(evlhhossplat, 0x07, 1); |
|
7429 GEN_SPEOP_LDST(evlwhe, 0x08, 2); |
|
7430 GEN_SPEOP_LDST(evlwhou, 0x0A, 2); |
|
7431 GEN_SPEOP_LDST(evlwhos, 0x0B, 2); |
|
7432 GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2); |
|
7433 GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2); |
|
7434 |
|
7435 GEN_SPEOP_LDST(evstdd, 0x10, 3); |
|
7436 GEN_SPEOP_LDST(evstdw, 0x11, 3); |
|
7437 GEN_SPEOP_LDST(evstdh, 0x12, 3); |
|
7438 GEN_SPEOP_LDST(evstwhe, 0x18, 2); |
|
7439 GEN_SPEOP_LDST(evstwho, 0x1A, 2); |
|
7440 GEN_SPEOP_LDST(evstwwe, 0x1C, 2); |
|
7441 GEN_SPEOP_LDST(evstwwo, 0x1E, 2); |
|
7442 |
|
7443 /* Multiply and add - TODO */ |
|
7444 #if 0 |
|
7445 GEN_SPE(speundef, evmhessf, 0x01, 0x10, 0x00000000, PPC_SPE); |
|
7446 GEN_SPE(speundef, evmhossf, 0x03, 0x10, 0x00000000, PPC_SPE); |
|
7447 GEN_SPE(evmheumi, evmhesmi, 0x04, 0x10, 0x00000000, PPC_SPE); |
|
7448 GEN_SPE(speundef, evmhesmf, 0x05, 0x10, 0x00000000, PPC_SPE); |
|
7449 GEN_SPE(evmhoumi, evmhosmi, 0x06, 0x10, 0x00000000, PPC_SPE); |
|
7450 GEN_SPE(speundef, evmhosmf, 0x07, 0x10, 0x00000000, PPC_SPE); |
|
7451 GEN_SPE(speundef, evmhessfa, 0x11, 0x10, 0x00000000, PPC_SPE); |
|
7452 GEN_SPE(speundef, evmhossfa, 0x13, 0x10, 0x00000000, PPC_SPE); |
|
7453 GEN_SPE(evmheumia, evmhesmia, 0x14, 0x10, 0x00000000, PPC_SPE); |
|
7454 GEN_SPE(speundef, evmhesmfa, 0x15, 0x10, 0x00000000, PPC_SPE); |
|
7455 GEN_SPE(evmhoumia, evmhosmia, 0x16, 0x10, 0x00000000, PPC_SPE); |
|
7456 GEN_SPE(speundef, evmhosmfa, 0x17, 0x10, 0x00000000, PPC_SPE); |
|
7457 |
|
7458 GEN_SPE(speundef, evmwhssf, 0x03, 0x11, 0x00000000, PPC_SPE); |
|
7459 GEN_SPE(evmwlumi, speundef, 0x04, 0x11, 0x00000000, PPC_SPE); |
|
7460 GEN_SPE(evmwhumi, evmwhsmi, 0x06, 0x11, 0x00000000, PPC_SPE); |
|
7461 GEN_SPE(speundef, evmwhsmf, 0x07, 0x11, 0x00000000, PPC_SPE); |
|
7462 GEN_SPE(speundef, evmwssf, 0x09, 0x11, 0x00000000, PPC_SPE); |
|
7463 GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, PPC_SPE); |
|
7464 GEN_SPE(speundef, evmwsmf, 0x0D, 0x11, 0x00000000, PPC_SPE); |
|
7465 GEN_SPE(speundef, evmwhssfa, 0x13, 0x11, 0x00000000, PPC_SPE); |
|
7466 GEN_SPE(evmwlumia, speundef, 0x14, 0x11, 0x00000000, PPC_SPE); |
|
7467 GEN_SPE(evmwhumia, evmwhsmia, 0x16, 0x11, 0x00000000, PPC_SPE); |
|
7468 GEN_SPE(speundef, evmwhsmfa, 0x17, 0x11, 0x00000000, PPC_SPE); |
|
7469 GEN_SPE(speundef, evmwssfa, 0x19, 0x11, 0x00000000, PPC_SPE); |
|
7470 GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, PPC_SPE); |
|
7471 GEN_SPE(speundef, evmwsmfa, 0x1D, 0x11, 0x00000000, PPC_SPE); |
|
7472 |
|
7473 GEN_SPE(evadduiaaw, evaddsiaaw, 0x00, 0x13, 0x0000F800, PPC_SPE); |
|
7474 GEN_SPE(evsubfusiaaw, evsubfssiaaw, 0x01, 0x13, 0x0000F800, PPC_SPE); |
|
7475 GEN_SPE(evaddumiaaw, evaddsmiaaw, 0x04, 0x13, 0x0000F800, PPC_SPE); |
|
7476 GEN_SPE(evsubfumiaaw, evsubfsmiaaw, 0x05, 0x13, 0x0000F800, PPC_SPE); |
|
7477 GEN_SPE(evdivws, evdivwu, 0x06, 0x13, 0x00000000, PPC_SPE); |
|
7478 GEN_SPE(evmra, speundef, 0x07, 0x13, 0x0000F800, PPC_SPE); |
|
7479 |
|
7480 GEN_SPE(evmheusiaaw, evmhessiaaw, 0x00, 0x14, 0x00000000, PPC_SPE); |
|
7481 GEN_SPE(speundef, evmhessfaaw, 0x01, 0x14, 0x00000000, PPC_SPE); |
|
7482 GEN_SPE(evmhousiaaw, evmhossiaaw, 0x02, 0x14, 0x00000000, PPC_SPE); |
|
7483 GEN_SPE(speundef, evmhossfaaw, 0x03, 0x14, 0x00000000, PPC_SPE); |
|
7484 GEN_SPE(evmheumiaaw, evmhesmiaaw, 0x04, 0x14, 0x00000000, PPC_SPE); |
|
7485 GEN_SPE(speundef, evmhesmfaaw, 0x05, 0x14, 0x00000000, PPC_SPE); |
|
7486 GEN_SPE(evmhoumiaaw, evmhosmiaaw, 0x06, 0x14, 0x00000000, PPC_SPE); |
|
7487 GEN_SPE(speundef, evmhosmfaaw, 0x07, 0x14, 0x00000000, PPC_SPE); |
|
7488 GEN_SPE(evmhegumiaa, evmhegsmiaa, 0x14, 0x14, 0x00000000, PPC_SPE); |
|
7489 GEN_SPE(speundef, evmhegsmfaa, 0x15, 0x14, 0x00000000, PPC_SPE); |
|
7490 GEN_SPE(evmhogumiaa, evmhogsmiaa, 0x16, 0x14, 0x00000000, PPC_SPE); |
|
7491 GEN_SPE(speundef, evmhogsmfaa, 0x17, 0x14, 0x00000000, PPC_SPE); |
|
7492 |
|
7493 GEN_SPE(evmwlusiaaw, evmwlssiaaw, 0x00, 0x15, 0x00000000, PPC_SPE); |
|
7494 GEN_SPE(evmwlumiaaw, evmwlsmiaaw, 0x04, 0x15, 0x00000000, PPC_SPE); |
|
7495 GEN_SPE(speundef, evmwssfaa, 0x09, 0x15, 0x00000000, PPC_SPE); |
|
7496 GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, PPC_SPE); |
|
7497 GEN_SPE(speundef, evmwsmfaa, 0x0D, 0x15, 0x00000000, PPC_SPE); |
|
7498 |
|
7499 GEN_SPE(evmheusianw, evmhessianw, 0x00, 0x16, 0x00000000, PPC_SPE); |
|
7500 GEN_SPE(speundef, evmhessfanw, 0x01, 0x16, 0x00000000, PPC_SPE); |
|
7501 GEN_SPE(evmhousianw, evmhossianw, 0x02, 0x16, 0x00000000, PPC_SPE); |
|
7502 GEN_SPE(speundef, evmhossfanw, 0x03, 0x16, 0x00000000, PPC_SPE); |
|
7503 GEN_SPE(evmheumianw, evmhesmianw, 0x04, 0x16, 0x00000000, PPC_SPE); |
|
7504 GEN_SPE(speundef, evmhesmfanw, 0x05, 0x16, 0x00000000, PPC_SPE); |
|
7505 GEN_SPE(evmhoumianw, evmhosmianw, 0x06, 0x16, 0x00000000, PPC_SPE); |
|
7506 GEN_SPE(speundef, evmhosmfanw, 0x07, 0x16, 0x00000000, PPC_SPE); |
|
7507 GEN_SPE(evmhegumian, evmhegsmian, 0x14, 0x16, 0x00000000, PPC_SPE); |
|
7508 GEN_SPE(speundef, evmhegsmfan, 0x15, 0x16, 0x00000000, PPC_SPE); |
|
7509 GEN_SPE(evmhigumian, evmhigsmian, 0x16, 0x16, 0x00000000, PPC_SPE); |
|
7510 GEN_SPE(speundef, evmhogsmfan, 0x17, 0x16, 0x00000000, PPC_SPE); |
|
7511 |
|
7512 GEN_SPE(evmwlusianw, evmwlssianw, 0x00, 0x17, 0x00000000, PPC_SPE); |
|
7513 GEN_SPE(evmwlumianw, evmwlsmianw, 0x04, 0x17, 0x00000000, PPC_SPE); |
|
7514 GEN_SPE(speundef, evmwssfan, 0x09, 0x17, 0x00000000, PPC_SPE); |
|
7515 GEN_SPE(evmwumian, evmwsmian, 0x0C, 0x17, 0x00000000, PPC_SPE); |
|
7516 GEN_SPE(speundef, evmwsmfan, 0x0D, 0x17, 0x00000000, PPC_SPE); |
|
7517 #endif |
|
7518 |
|
7519 /*** SPE floating-point extension ***/ |
|
7520 #if defined(TARGET_PPC64) |
|
7521 #define GEN_SPEFPUOP_CONV_32_32(name) \ |
|
7522 static always_inline void gen_##name (DisasContext *ctx) \ |
|
7523 { \ |
|
7524 TCGv_i32 t0; \ |
|
7525 TCGv t1; \ |
|
7526 t0 = tcg_temp_new_i32(); \ |
|
7527 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \ |
|
7528 gen_helper_##name(t0, t0); \ |
|
7529 t1 = tcg_temp_new(); \ |
|
7530 tcg_gen_extu_i32_tl(t1, t0); \ |
|
7531 tcg_temp_free_i32(t0); \ |
|
7532 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \ |
|
7533 0xFFFFFFFF00000000ULL); \ |
|
7534 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \ |
|
7535 tcg_temp_free(t1); \ |
|
7536 } |
|
7537 #define GEN_SPEFPUOP_CONV_32_64(name) \ |
|
7538 static always_inline void gen_##name (DisasContext *ctx) \ |
|
7539 { \ |
|
7540 TCGv_i32 t0; \ |
|
7541 TCGv t1; \ |
|
7542 t0 = tcg_temp_new_i32(); \ |
|
7543 gen_helper_##name(t0, cpu_gpr[rB(ctx->opcode)]); \ |
|
7544 t1 = tcg_temp_new(); \ |
|
7545 tcg_gen_extu_i32_tl(t1, t0); \ |
|
7546 tcg_temp_free_i32(t0); \ |
|
7547 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \ |
|
7548 0xFFFFFFFF00000000ULL); \ |
|
7549 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \ |
|
7550 tcg_temp_free(t1); \ |
|
7551 } |
|
7552 #define GEN_SPEFPUOP_CONV_64_32(name) \ |
|
7553 static always_inline void gen_##name (DisasContext *ctx) \ |
|
7554 { \ |
|
7555 TCGv_i32 t0 = tcg_temp_new_i32(); \ |
|
7556 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \ |
|
7557 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], t0); \ |
|
7558 tcg_temp_free_i32(t0); \ |
|
7559 } |
|
7560 #define GEN_SPEFPUOP_CONV_64_64(name) \ |
|
7561 static always_inline void gen_##name (DisasContext *ctx) \ |
|
7562 { \ |
|
7563 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \ |
|
7564 } |
|
7565 #define GEN_SPEFPUOP_ARITH2_32_32(name) \ |
|
7566 static always_inline void gen_##name (DisasContext *ctx) \ |
|
7567 { \ |
|
7568 TCGv_i32 t0, t1; \ |
|
7569 TCGv_i64 t2; \ |
|
7570 if (unlikely(!ctx->spe_enabled)) { \ |
|
7571 gen_exception(ctx, POWERPC_EXCP_APU); \ |
|
7572 return; \ |
|
7573 } \ |
|
7574 t0 = tcg_temp_new_i32(); \ |
|
7575 t1 = tcg_temp_new_i32(); \ |
|
7576 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \ |
|
7577 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \ |
|
7578 gen_helper_##name(t0, t0, t1); \ |
|
7579 tcg_temp_free_i32(t1); \ |
|
7580 t2 = tcg_temp_new(); \ |
|
7581 tcg_gen_extu_i32_tl(t2, t0); \ |
|
7582 tcg_temp_free_i32(t0); \ |
|
7583 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \ |
|
7584 0xFFFFFFFF00000000ULL); \ |
|
7585 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t2); \ |
|
7586 tcg_temp_free(t2); \ |
|
7587 } |
|
7588 #define GEN_SPEFPUOP_ARITH2_64_64(name) \ |
|
7589 static always_inline void gen_##name (DisasContext *ctx) \ |
|
7590 { \ |
|
7591 if (unlikely(!ctx->spe_enabled)) { \ |
|
7592 gen_exception(ctx, POWERPC_EXCP_APU); \ |
|
7593 return; \ |
|
7594 } \ |
|
7595 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \ |
|
7596 cpu_gpr[rB(ctx->opcode)]); \ |
|
7597 } |
|
7598 #define GEN_SPEFPUOP_COMP_32(name) \ |
|
7599 static always_inline void gen_##name (DisasContext *ctx) \ |
|
7600 { \ |
|
7601 TCGv_i32 t0, t1; \ |
|
7602 if (unlikely(!ctx->spe_enabled)) { \ |
|
7603 gen_exception(ctx, POWERPC_EXCP_APU); \ |
|
7604 return; \ |
|
7605 } \ |
|
7606 t0 = tcg_temp_new_i32(); \ |
|
7607 t1 = tcg_temp_new_i32(); \ |
|
7608 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \ |
|
7609 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \ |
|
7610 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], t0, t1); \ |
|
7611 tcg_temp_free_i32(t0); \ |
|
7612 tcg_temp_free_i32(t1); \ |
|
7613 } |
|
7614 #define GEN_SPEFPUOP_COMP_64(name) \ |
|
7615 static always_inline void gen_##name (DisasContext *ctx) \ |
|
7616 { \ |
|
7617 if (unlikely(!ctx->spe_enabled)) { \ |
|
7618 gen_exception(ctx, POWERPC_EXCP_APU); \ |
|
7619 return; \ |
|
7620 } \ |
|
7621 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], \ |
|
7622 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \ |
|
7623 } |
|
7624 #else |
|
7625 #define GEN_SPEFPUOP_CONV_32_32(name) \ |
|
7626 static always_inline void gen_##name (DisasContext *ctx) \ |
|
7627 { \ |
|
7628 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \ |
|
7629 } |
|
7630 #define GEN_SPEFPUOP_CONV_32_64(name) \ |
|
7631 static always_inline void gen_##name (DisasContext *ctx) \ |
|
7632 { \ |
|
7633 TCGv_i64 t0 = tcg_temp_new_i64(); \ |
|
7634 gen_load_gpr64(t0, rB(ctx->opcode)); \ |
|
7635 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], t0); \ |
|
7636 tcg_temp_free_i64(t0); \ |
|
7637 } |
|
7638 #define GEN_SPEFPUOP_CONV_64_32(name) \ |
|
7639 static always_inline void gen_##name (DisasContext *ctx) \ |
|
7640 { \ |
|
7641 TCGv_i64 t0 = tcg_temp_new_i64(); \ |
|
7642 gen_helper_##name(t0, cpu_gpr[rB(ctx->opcode)]); \ |
|
7643 gen_store_gpr64(rD(ctx->opcode), t0); \ |
|
7644 tcg_temp_free_i64(t0); \ |
|
7645 } |
|
7646 #define GEN_SPEFPUOP_CONV_64_64(name) \ |
|
7647 static always_inline void gen_##name (DisasContext *ctx) \ |
|
7648 { \ |
|
7649 TCGv_i64 t0 = tcg_temp_new_i64(); \ |
|
7650 gen_load_gpr64(t0, rB(ctx->opcode)); \ |
|
7651 gen_helper_##name(t0, t0); \ |
|
7652 gen_store_gpr64(rD(ctx->opcode), t0); \ |
|
7653 tcg_temp_free_i64(t0); \ |
|
7654 } |
|
7655 #define GEN_SPEFPUOP_ARITH2_32_32(name) \ |
|
7656 static always_inline void gen_##name (DisasContext *ctx) \ |
|
7657 { \ |
|
7658 if (unlikely(!ctx->spe_enabled)) { \ |
|
7659 gen_exception(ctx, POWERPC_EXCP_APU); \ |
|
7660 return; \ |
|
7661 } \ |
|
7662 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], \ |
|
7663 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \ |
|
7664 } |
|
7665 #define GEN_SPEFPUOP_ARITH2_64_64(name) \ |
|
7666 static always_inline void gen_##name (DisasContext *ctx) \ |
|
7667 { \ |
|
7668 TCGv_i64 t0, t1; \ |
|
7669 if (unlikely(!ctx->spe_enabled)) { \ |
|
7670 gen_exception(ctx, POWERPC_EXCP_APU); \ |
|
7671 return; \ |
|
7672 } \ |
|
7673 t0 = tcg_temp_new_i64(); \ |
|
7674 t1 = tcg_temp_new_i64(); \ |
|
7675 gen_load_gpr64(t0, rA(ctx->opcode)); \ |
|
7676 gen_load_gpr64(t1, rB(ctx->opcode)); \ |
|
7677 gen_helper_##name(t0, t0, t1); \ |
|
7678 gen_store_gpr64(rD(ctx->opcode), t0); \ |
|
7679 tcg_temp_free_i64(t0); \ |
|
7680 tcg_temp_free_i64(t1); \ |
|
7681 } |
|
7682 #define GEN_SPEFPUOP_COMP_32(name) \ |
|
7683 static always_inline void gen_##name (DisasContext *ctx) \ |
|
7684 { \ |
|
7685 if (unlikely(!ctx->spe_enabled)) { \ |
|
7686 gen_exception(ctx, POWERPC_EXCP_APU); \ |
|
7687 return; \ |
|
7688 } \ |
|
7689 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], \ |
|
7690 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \ |
|
7691 } |
|
7692 #define GEN_SPEFPUOP_COMP_64(name) \ |
|
7693 static always_inline void gen_##name (DisasContext *ctx) \ |
|
7694 { \ |
|
7695 TCGv_i64 t0, t1; \ |
|
7696 if (unlikely(!ctx->spe_enabled)) { \ |
|
7697 gen_exception(ctx, POWERPC_EXCP_APU); \ |
|
7698 return; \ |
|
7699 } \ |
|
7700 t0 = tcg_temp_new_i64(); \ |
|
7701 t1 = tcg_temp_new_i64(); \ |
|
7702 gen_load_gpr64(t0, rA(ctx->opcode)); \ |
|
7703 gen_load_gpr64(t1, rB(ctx->opcode)); \ |
|
7704 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], t0, t1); \ |
|
7705 tcg_temp_free_i64(t0); \ |
|
7706 tcg_temp_free_i64(t1); \ |
|
7707 } |
|
7708 #endif |
|
7709 |
|
7710 /* Single precision floating-point vectors operations */ |
|
7711 /* Arithmetic */ |
|
7712 GEN_SPEFPUOP_ARITH2_64_64(evfsadd); |
|
7713 GEN_SPEFPUOP_ARITH2_64_64(evfssub); |
|
7714 GEN_SPEFPUOP_ARITH2_64_64(evfsmul); |
|
7715 GEN_SPEFPUOP_ARITH2_64_64(evfsdiv); |
|
7716 static always_inline void gen_evfsabs (DisasContext *ctx) |
|
7717 { |
|
7718 if (unlikely(!ctx->spe_enabled)) { |
|
7719 gen_exception(ctx, POWERPC_EXCP_APU); |
|
7720 return; |
|
7721 } |
|
7722 #if defined(TARGET_PPC64) |
|
7723 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000080000000LL); |
|
7724 #else |
|
7725 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x80000000); |
|
7726 tcg_gen_andi_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000); |
|
7727 #endif |
|
7728 } |
|
7729 static always_inline void gen_evfsnabs (DisasContext *ctx) |
|
7730 { |
|
7731 if (unlikely(!ctx->spe_enabled)) { |
|
7732 gen_exception(ctx, POWERPC_EXCP_APU); |
|
7733 return; |
|
7734 } |
|
7735 #if defined(TARGET_PPC64) |
|
7736 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL); |
|
7737 #else |
|
7738 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000); |
|
7739 tcg_gen_ori_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000); |
|
7740 #endif |
|
7741 } |
|
7742 static always_inline void gen_evfsneg (DisasContext *ctx) |
|
7743 { |
|
7744 if (unlikely(!ctx->spe_enabled)) { |
|
7745 gen_exception(ctx, POWERPC_EXCP_APU); |
|
7746 return; |
|
7747 } |
|
7748 #if defined(TARGET_PPC64) |
|
7749 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL); |
|
7750 #else |
|
7751 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000); |
|
7752 tcg_gen_xori_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000); |
|
7753 #endif |
|
7754 } |
|
7755 |
|
7756 /* Conversion */ |
|
7757 GEN_SPEFPUOP_CONV_64_64(evfscfui); |
|
7758 GEN_SPEFPUOP_CONV_64_64(evfscfsi); |
|
7759 GEN_SPEFPUOP_CONV_64_64(evfscfuf); |
|
7760 GEN_SPEFPUOP_CONV_64_64(evfscfsf); |
|
7761 GEN_SPEFPUOP_CONV_64_64(evfsctui); |
|
7762 GEN_SPEFPUOP_CONV_64_64(evfsctsi); |
|
7763 GEN_SPEFPUOP_CONV_64_64(evfsctuf); |
|
7764 GEN_SPEFPUOP_CONV_64_64(evfsctsf); |
|
7765 GEN_SPEFPUOP_CONV_64_64(evfsctuiz); |
|
7766 GEN_SPEFPUOP_CONV_64_64(evfsctsiz); |
|
7767 |
|
7768 /* Comparison */ |
|
7769 GEN_SPEFPUOP_COMP_64(evfscmpgt); |
|
7770 GEN_SPEFPUOP_COMP_64(evfscmplt); |
|
7771 GEN_SPEFPUOP_COMP_64(evfscmpeq); |
|
7772 GEN_SPEFPUOP_COMP_64(evfststgt); |
|
7773 GEN_SPEFPUOP_COMP_64(evfststlt); |
|
7774 GEN_SPEFPUOP_COMP_64(evfststeq); |
|
7775 |
|
7776 /* Opcodes definitions */ |
|
7777 GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, PPC_SPEFPU); // |
|
7778 GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, PPC_SPEFPU); // |
|
7779 GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, PPC_SPEFPU); // |
|
7780 GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, PPC_SPEFPU); // |
|
7781 GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, PPC_SPEFPU); // |
|
7782 GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, PPC_SPEFPU); // |
|
7783 GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, PPC_SPEFPU); // |
|
7784 GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, PPC_SPEFPU); // |
|
7785 GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, PPC_SPEFPU); // |
|
7786 GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, PPC_SPEFPU); // |
|
7787 GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, PPC_SPEFPU); // |
|
7788 GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, PPC_SPEFPU); // |
|
7789 GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, PPC_SPEFPU); // |
|
7790 GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, PPC_SPEFPU); // |
|
7791 |
|
7792 /* Single precision floating-point operations */ |
|
7793 /* Arithmetic */ |
|
7794 GEN_SPEFPUOP_ARITH2_32_32(efsadd); |
|
7795 GEN_SPEFPUOP_ARITH2_32_32(efssub); |
|
7796 GEN_SPEFPUOP_ARITH2_32_32(efsmul); |
|
7797 GEN_SPEFPUOP_ARITH2_32_32(efsdiv); |
|
7798 static always_inline void gen_efsabs (DisasContext *ctx) |
|
7799 { |
|
7800 if (unlikely(!ctx->spe_enabled)) { |
|
7801 gen_exception(ctx, POWERPC_EXCP_APU); |
|
7802 return; |
|
7803 } |
|
7804 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], (target_long)~0x80000000LL); |
|
7805 } |
|
7806 static always_inline void gen_efsnabs (DisasContext *ctx) |
|
7807 { |
|
7808 if (unlikely(!ctx->spe_enabled)) { |
|
7809 gen_exception(ctx, POWERPC_EXCP_APU); |
|
7810 return; |
|
7811 } |
|
7812 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000); |
|
7813 } |
|
7814 static always_inline void gen_efsneg (DisasContext *ctx) |
|
7815 { |
|
7816 if (unlikely(!ctx->spe_enabled)) { |
|
7817 gen_exception(ctx, POWERPC_EXCP_APU); |
|
7818 return; |
|
7819 } |
|
7820 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000); |
|
7821 } |
|
7822 |
|
7823 /* Conversion */ |
|
7824 GEN_SPEFPUOP_CONV_32_32(efscfui); |
|
7825 GEN_SPEFPUOP_CONV_32_32(efscfsi); |
|
7826 GEN_SPEFPUOP_CONV_32_32(efscfuf); |
|
7827 GEN_SPEFPUOP_CONV_32_32(efscfsf); |
|
7828 GEN_SPEFPUOP_CONV_32_32(efsctui); |
|
7829 GEN_SPEFPUOP_CONV_32_32(efsctsi); |
|
7830 GEN_SPEFPUOP_CONV_32_32(efsctuf); |
|
7831 GEN_SPEFPUOP_CONV_32_32(efsctsf); |
|
7832 GEN_SPEFPUOP_CONV_32_32(efsctuiz); |
|
7833 GEN_SPEFPUOP_CONV_32_32(efsctsiz); |
|
7834 GEN_SPEFPUOP_CONV_32_64(efscfd); |
|
7835 |
|
7836 /* Comparison */ |
|
7837 GEN_SPEFPUOP_COMP_32(efscmpgt); |
|
7838 GEN_SPEFPUOP_COMP_32(efscmplt); |
|
7839 GEN_SPEFPUOP_COMP_32(efscmpeq); |
|
7840 GEN_SPEFPUOP_COMP_32(efststgt); |
|
7841 GEN_SPEFPUOP_COMP_32(efststlt); |
|
7842 GEN_SPEFPUOP_COMP_32(efststeq); |
|
7843 |
|
7844 /* Opcodes definitions */ |
|
7845 GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, PPC_SPEFPU); // |
|
7846 GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, PPC_SPEFPU); // |
|
7847 GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, PPC_SPEFPU); // |
|
7848 GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, PPC_SPEFPU); // |
|
7849 GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, PPC_SPEFPU); // |
|
7850 GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, PPC_SPEFPU); // |
|
7851 GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, PPC_SPEFPU); // |
|
7852 GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, PPC_SPEFPU); // |
|
7853 GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, PPC_SPEFPU); // |
|
7854 GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, PPC_SPEFPU); // |
|
7855 GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, PPC_SPEFPU); // |
|
7856 GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, PPC_SPEFPU); // |
|
7857 GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, PPC_SPEFPU); // |
|
7858 GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, PPC_SPEFPU); // |
|
7859 |
|
7860 /* Double precision floating-point operations */ |
|
7861 /* Arithmetic */ |
|
7862 GEN_SPEFPUOP_ARITH2_64_64(efdadd); |
|
7863 GEN_SPEFPUOP_ARITH2_64_64(efdsub); |
|
7864 GEN_SPEFPUOP_ARITH2_64_64(efdmul); |
|
7865 GEN_SPEFPUOP_ARITH2_64_64(efddiv); |
|
7866 static always_inline void gen_efdabs (DisasContext *ctx) |
|
7867 { |
|
7868 if (unlikely(!ctx->spe_enabled)) { |
|
7869 gen_exception(ctx, POWERPC_EXCP_APU); |
|
7870 return; |
|
7871 } |
|
7872 #if defined(TARGET_PPC64) |
|
7873 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000000000000LL); |
|
7874 #else |
|
7875 tcg_gen_andi_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000); |
|
7876 #endif |
|
7877 } |
|
7878 static always_inline void gen_efdnabs (DisasContext *ctx) |
|
7879 { |
|
7880 if (unlikely(!ctx->spe_enabled)) { |
|
7881 gen_exception(ctx, POWERPC_EXCP_APU); |
|
7882 return; |
|
7883 } |
|
7884 #if defined(TARGET_PPC64) |
|
7885 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL); |
|
7886 #else |
|
7887 tcg_gen_ori_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000); |
|
7888 #endif |
|
7889 } |
|
7890 static always_inline void gen_efdneg (DisasContext *ctx) |
|
7891 { |
|
7892 if (unlikely(!ctx->spe_enabled)) { |
|
7893 gen_exception(ctx, POWERPC_EXCP_APU); |
|
7894 return; |
|
7895 } |
|
7896 #if defined(TARGET_PPC64) |
|
7897 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL); |
|
7898 #else |
|
7899 tcg_gen_xori_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000); |
|
7900 #endif |
|
7901 } |
|
7902 |
|
7903 /* Conversion */ |
|
7904 GEN_SPEFPUOP_CONV_64_32(efdcfui); |
|
7905 GEN_SPEFPUOP_CONV_64_32(efdcfsi); |
|
7906 GEN_SPEFPUOP_CONV_64_32(efdcfuf); |
|
7907 GEN_SPEFPUOP_CONV_64_32(efdcfsf); |
|
7908 GEN_SPEFPUOP_CONV_32_64(efdctui); |
|
7909 GEN_SPEFPUOP_CONV_32_64(efdctsi); |
|
7910 GEN_SPEFPUOP_CONV_32_64(efdctuf); |
|
7911 GEN_SPEFPUOP_CONV_32_64(efdctsf); |
|
7912 GEN_SPEFPUOP_CONV_32_64(efdctuiz); |
|
7913 GEN_SPEFPUOP_CONV_32_64(efdctsiz); |
|
7914 GEN_SPEFPUOP_CONV_64_32(efdcfs); |
|
7915 GEN_SPEFPUOP_CONV_64_64(efdcfuid); |
|
7916 GEN_SPEFPUOP_CONV_64_64(efdcfsid); |
|
7917 GEN_SPEFPUOP_CONV_64_64(efdctuidz); |
|
7918 GEN_SPEFPUOP_CONV_64_64(efdctsidz); |
|
7919 |
|
7920 /* Comparison */ |
|
7921 GEN_SPEFPUOP_COMP_64(efdcmpgt); |
|
7922 GEN_SPEFPUOP_COMP_64(efdcmplt); |
|
7923 GEN_SPEFPUOP_COMP_64(efdcmpeq); |
|
7924 GEN_SPEFPUOP_COMP_64(efdtstgt); |
|
7925 GEN_SPEFPUOP_COMP_64(efdtstlt); |
|
7926 GEN_SPEFPUOP_COMP_64(efdtsteq); |
|
7927 |
|
7928 /* Opcodes definitions */ |
|
7929 GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, PPC_SPEFPU); // |
|
7930 GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, PPC_SPEFPU); // |
|
7931 GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, PPC_SPEFPU); // |
|
7932 GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, PPC_SPEFPU); // |
|
7933 GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, PPC_SPEFPU); // |
|
7934 GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, PPC_SPEFPU); // |
|
7935 GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, PPC_SPEFPU); // |
|
7936 GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, PPC_SPEFPU); // |
|
7937 GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, PPC_SPEFPU); // |
|
7938 GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, PPC_SPEFPU); // |
|
7939 GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, PPC_SPEFPU); // |
|
7940 GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, PPC_SPEFPU); // |
|
7941 GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, PPC_SPEFPU); // |
|
7942 GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, PPC_SPEFPU); // |
|
7943 GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, PPC_SPEFPU); // |
|
7944 GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, PPC_SPEFPU); // |
|
7945 |
|
7946 /* End opcode list */ |
|
7947 GEN_OPCODE_MARK(end); |
|
7948 |
|
7949 #include "translate_init.c" |
|
7950 #include "helper_regs.h" |
|
7951 |
|
7952 /*****************************************************************************/ |
|
7953 /* Misc PowerPC helpers */ |
|
7954 void cpu_dump_state (CPUState *env, FILE *f, |
|
7955 int (*cpu_fprintf)(FILE *f, const char *fmt, ...), |
|
7956 int flags) |
|
7957 { |
|
7958 #define RGPL 4 |
|
7959 #define RFPL 4 |
|
7960 |
|
7961 int i; |
|
7962 |
|
7963 cpu_fprintf(f, "NIP " ADDRX " LR " ADDRX " CTR " ADDRX " XER %08x\n", |
|
7964 env->nip, env->lr, env->ctr, env->xer); |
|
7965 cpu_fprintf(f, "MSR " ADDRX " HID0 " ADDRX " HF " ADDRX " idx %d\n", |
|
7966 env->msr, env->spr[SPR_HID0], env->hflags, env->mmu_idx); |
|
7967 #if !defined(NO_TIMER_DUMP) |
|
7968 cpu_fprintf(f, "TB %08x %08x " |
|
7969 #if !defined(CONFIG_USER_ONLY) |
|
7970 "DECR %08x" |
|
7971 #endif |
|
7972 "\n", |
|
7973 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env) |
|
7974 #if !defined(CONFIG_USER_ONLY) |
|
7975 , cpu_ppc_load_decr(env) |
|
7976 #endif |
|
7977 ); |
|
7978 #endif |
|
7979 for (i = 0; i < 32; i++) { |
|
7980 if ((i & (RGPL - 1)) == 0) |
|
7981 cpu_fprintf(f, "GPR%02d", i); |
|
7982 cpu_fprintf(f, " " REGX, ppc_dump_gpr(env, i)); |
|
7983 if ((i & (RGPL - 1)) == (RGPL - 1)) |
|
7984 cpu_fprintf(f, "\n"); |
|
7985 } |
|
7986 cpu_fprintf(f, "CR "); |
|
7987 for (i = 0; i < 8; i++) |
|
7988 cpu_fprintf(f, "%01x", env->crf[i]); |
|
7989 cpu_fprintf(f, " ["); |
|
7990 for (i = 0; i < 8; i++) { |
|
7991 char a = '-'; |
|
7992 if (env->crf[i] & 0x08) |
|
7993 a = 'L'; |
|
7994 else if (env->crf[i] & 0x04) |
|
7995 a = 'G'; |
|
7996 else if (env->crf[i] & 0x02) |
|
7997 a = 'E'; |
|
7998 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' '); |
|
7999 } |
|
8000 cpu_fprintf(f, " ] RES " ADDRX "\n", env->reserve); |
|
8001 for (i = 0; i < 32; i++) { |
|
8002 if ((i & (RFPL - 1)) == 0) |
|
8003 cpu_fprintf(f, "FPR%02d", i); |
|
8004 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i])); |
|
8005 if ((i & (RFPL - 1)) == (RFPL - 1)) |
|
8006 cpu_fprintf(f, "\n"); |
|
8007 } |
|
8008 cpu_fprintf(f, "FPSCR %08x\n", env->fpscr); |
|
8009 #if !defined(CONFIG_USER_ONLY) |
|
8010 cpu_fprintf(f, "SRR0 " ADDRX " SRR1 " ADDRX " SDR1 " ADDRX "\n", |
|
8011 env->spr[SPR_SRR0], env->spr[SPR_SRR1], env->sdr1); |
|
8012 #endif |
|
8013 |
|
8014 #undef RGPL |
|
8015 #undef RFPL |
|
8016 } |
|
8017 |
|
8018 void cpu_dump_statistics (CPUState *env, FILE*f, |
|
8019 int (*cpu_fprintf)(FILE *f, const char *fmt, ...), |
|
8020 int flags) |
|
8021 { |
|
8022 #if defined(DO_PPC_STATISTICS) |
|
8023 opc_handler_t **t1, **t2, **t3, *handler; |
|
8024 int op1, op2, op3; |
|
8025 |
|
8026 t1 = env->opcodes; |
|
8027 for (op1 = 0; op1 < 64; op1++) { |
|
8028 handler = t1[op1]; |
|
8029 if (is_indirect_opcode(handler)) { |
|
8030 t2 = ind_table(handler); |
|
8031 for (op2 = 0; op2 < 32; op2++) { |
|
8032 handler = t2[op2]; |
|
8033 if (is_indirect_opcode(handler)) { |
|
8034 t3 = ind_table(handler); |
|
8035 for (op3 = 0; op3 < 32; op3++) { |
|
8036 handler = t3[op3]; |
|
8037 if (handler->count == 0) |
|
8038 continue; |
|
8039 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: " |
|
8040 "%016llx %lld\n", |
|
8041 op1, op2, op3, op1, (op3 << 5) | op2, |
|
8042 handler->oname, |
|
8043 handler->count, handler->count); |
|
8044 } |
|
8045 } else { |
|
8046 if (handler->count == 0) |
|
8047 continue; |
|
8048 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: " |
|
8049 "%016llx %lld\n", |
|
8050 op1, op2, op1, op2, handler->oname, |
|
8051 handler->count, handler->count); |
|
8052 } |
|
8053 } |
|
8054 } else { |
|
8055 if (handler->count == 0) |
|
8056 continue; |
|
8057 cpu_fprintf(f, "%02x (%02x ) %16s: %016llx %lld\n", |
|
8058 op1, op1, handler->oname, |
|
8059 handler->count, handler->count); |
|
8060 } |
|
8061 } |
|
8062 #endif |
|
8063 } |
|
8064 |
|
8065 /*****************************************************************************/ |
|
8066 static always_inline void gen_intermediate_code_internal (CPUState *env, |
|
8067 TranslationBlock *tb, |
|
8068 int search_pc) |
|
8069 { |
|
8070 DisasContext ctx, *ctxp = &ctx; |
|
8071 opc_handler_t **table, *handler; |
|
8072 target_ulong pc_start; |
|
8073 uint16_t *gen_opc_end; |
|
8074 CPUBreakpoint *bp; |
|
8075 int j, lj = -1; |
|
8076 int num_insns; |
|
8077 int max_insns; |
|
8078 |
|
8079 pc_start = tb->pc; |
|
8080 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE; |
|
8081 ctx.nip = pc_start; |
|
8082 ctx.tb = tb; |
|
8083 ctx.exception = POWERPC_EXCP_NONE; |
|
8084 ctx.spr_cb = env->spr_cb; |
|
8085 ctx.mem_idx = env->mmu_idx; |
|
8086 ctx.access_type = -1; |
|
8087 ctx.le_mode = env->hflags & (1 << MSR_LE) ? 1 : 0; |
|
8088 #if defined(TARGET_PPC64) |
|
8089 ctx.sf_mode = msr_sf; |
|
8090 #endif |
|
8091 ctx.fpu_enabled = msr_fp; |
|
8092 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe) |
|
8093 ctx.spe_enabled = msr_spe; |
|
8094 else |
|
8095 ctx.spe_enabled = 0; |
|
8096 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr) |
|
8097 ctx.altivec_enabled = msr_vr; |
|
8098 else |
|
8099 ctx.altivec_enabled = 0; |
|
8100 if ((env->flags & POWERPC_FLAG_SE) && msr_se) |
|
8101 ctx.singlestep_enabled = CPU_SINGLE_STEP; |
|
8102 else |
|
8103 ctx.singlestep_enabled = 0; |
|
8104 if ((env->flags & POWERPC_FLAG_BE) && msr_be) |
|
8105 ctx.singlestep_enabled |= CPU_BRANCH_STEP; |
|
8106 if (unlikely(env->singlestep_enabled)) |
|
8107 ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP; |
|
8108 #if defined (DO_SINGLE_STEP) && 0 |
|
8109 /* Single step trace mode */ |
|
8110 msr_se = 1; |
|
8111 #endif |
|
8112 num_insns = 0; |
|
8113 max_insns = tb->cflags & CF_COUNT_MASK; |
|
8114 if (max_insns == 0) |
|
8115 max_insns = CF_COUNT_MASK; |
|
8116 |
|
8117 gen_icount_start(); |
|
8118 /* Set env in case of segfault during code fetch */ |
|
8119 while (ctx.exception == POWERPC_EXCP_NONE && gen_opc_ptr < gen_opc_end) { |
|
8120 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) { |
|
8121 TAILQ_FOREACH(bp, &env->breakpoints, entry) { |
|
8122 if (bp->pc == ctx.nip) { |
|
8123 gen_debug_exception(ctxp); |
|
8124 break; |
|
8125 } |
|
8126 } |
|
8127 } |
|
8128 if (unlikely(search_pc)) { |
|
8129 j = gen_opc_ptr - gen_opc_buf; |
|
8130 if (lj < j) { |
|
8131 lj++; |
|
8132 while (lj < j) |
|
8133 gen_opc_instr_start[lj++] = 0; |
|
8134 gen_opc_pc[lj] = ctx.nip; |
|
8135 gen_opc_instr_start[lj] = 1; |
|
8136 gen_opc_icount[lj] = num_insns; |
|
8137 } |
|
8138 } |
|
8139 #if defined PPC_DEBUG_DISAS |
|
8140 if (loglevel & CPU_LOG_TB_IN_ASM) { |
|
8141 fprintf(logfile, "----------------\n"); |
|
8142 fprintf(logfile, "nip=" ADDRX " super=%d ir=%d\n", |
|
8143 ctx.nip, ctx.mem_idx, (int)msr_ir); |
|
8144 } |
|
8145 #endif |
|
8146 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO)) |
|
8147 gen_io_start(); |
|
8148 if (unlikely(ctx.le_mode)) { |
|
8149 ctx.opcode = bswap32(ldl_code(ctx.nip)); |
|
8150 } else { |
|
8151 ctx.opcode = ldl_code(ctx.nip); |
|
8152 } |
|
8153 #if defined PPC_DEBUG_DISAS |
|
8154 if (loglevel & CPU_LOG_TB_IN_ASM) { |
|
8155 fprintf(logfile, "translate opcode %08x (%02x %02x %02x) (%s)\n", |
|
8156 ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode), |
|
8157 opc3(ctx.opcode), little_endian ? "little" : "big"); |
|
8158 } |
|
8159 #endif |
|
8160 ctx.nip += 4; |
|
8161 table = env->opcodes; |
|
8162 num_insns++; |
|
8163 handler = table[opc1(ctx.opcode)]; |
|
8164 if (is_indirect_opcode(handler)) { |
|
8165 table = ind_table(handler); |
|
8166 handler = table[opc2(ctx.opcode)]; |
|
8167 if (is_indirect_opcode(handler)) { |
|
8168 table = ind_table(handler); |
|
8169 handler = table[opc3(ctx.opcode)]; |
|
8170 } |
|
8171 } |
|
8172 /* Is opcode *REALLY* valid ? */ |
|
8173 if (unlikely(handler->handler == &gen_invalid)) { |
|
8174 if (loglevel != 0) { |
|
8175 fprintf(logfile, "invalid/unsupported opcode: " |
|
8176 "%02x - %02x - %02x (%08x) " ADDRX " %d\n", |
|
8177 opc1(ctx.opcode), opc2(ctx.opcode), |
|
8178 opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir); |
|
8179 } else { |
|
8180 printf("invalid/unsupported opcode: " |
|
8181 "%02x - %02x - %02x (%08x) " ADDRX " %d\n", |
|
8182 opc1(ctx.opcode), opc2(ctx.opcode), |
|
8183 opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir); |
|
8184 } |
|
8185 } else { |
|
8186 if (unlikely((ctx.opcode & handler->inval) != 0)) { |
|
8187 if (loglevel != 0) { |
|
8188 fprintf(logfile, "invalid bits: %08x for opcode: " |
|
8189 "%02x - %02x - %02x (%08x) " ADDRX "\n", |
|
8190 ctx.opcode & handler->inval, opc1(ctx.opcode), |
|
8191 opc2(ctx.opcode), opc3(ctx.opcode), |
|
8192 ctx.opcode, ctx.nip - 4); |
|
8193 } else { |
|
8194 printf("invalid bits: %08x for opcode: " |
|
8195 "%02x - %02x - %02x (%08x) " ADDRX "\n", |
|
8196 ctx.opcode & handler->inval, opc1(ctx.opcode), |
|
8197 opc2(ctx.opcode), opc3(ctx.opcode), |
|
8198 ctx.opcode, ctx.nip - 4); |
|
8199 } |
|
8200 gen_inval_exception(ctxp, POWERPC_EXCP_INVAL_INVAL); |
|
8201 break; |
|
8202 } |
|
8203 } |
|
8204 (*(handler->handler))(&ctx); |
|
8205 #if defined(DO_PPC_STATISTICS) |
|
8206 handler->count++; |
|
8207 #endif |
|
8208 /* Check trace mode exceptions */ |
|
8209 if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP && |
|
8210 (ctx.nip <= 0x100 || ctx.nip > 0xF00) && |
|
8211 ctx.exception != POWERPC_SYSCALL && |
|
8212 ctx.exception != POWERPC_EXCP_TRAP && |
|
8213 ctx.exception != POWERPC_EXCP_BRANCH)) { |
|
8214 gen_exception(ctxp, POWERPC_EXCP_TRACE); |
|
8215 } else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) || |
|
8216 (env->singlestep_enabled) || |
|
8217 num_insns >= max_insns)) { |
|
8218 /* if we reach a page boundary or are single stepping, stop |
|
8219 * generation |
|
8220 */ |
|
8221 break; |
|
8222 } |
|
8223 #if defined (DO_SINGLE_STEP) |
|
8224 break; |
|
8225 #endif |
|
8226 } |
|
8227 if (tb->cflags & CF_LAST_IO) |
|
8228 gen_io_end(); |
|
8229 if (ctx.exception == POWERPC_EXCP_NONE) { |
|
8230 gen_goto_tb(&ctx, 0, ctx.nip); |
|
8231 } else if (ctx.exception != POWERPC_EXCP_BRANCH) { |
|
8232 if (unlikely(env->singlestep_enabled)) { |
|
8233 gen_debug_exception(ctxp); |
|
8234 } |
|
8235 /* Generate the return instruction */ |
|
8236 tcg_gen_exit_tb(0); |
|
8237 } |
|
8238 gen_icount_end(tb, num_insns); |
|
8239 *gen_opc_ptr = INDEX_op_end; |
|
8240 if (unlikely(search_pc)) { |
|
8241 j = gen_opc_ptr - gen_opc_buf; |
|
8242 lj++; |
|
8243 while (lj <= j) |
|
8244 gen_opc_instr_start[lj++] = 0; |
|
8245 } else { |
|
8246 tb->size = ctx.nip - pc_start; |
|
8247 tb->icount = num_insns; |
|
8248 } |
|
8249 #if defined(DEBUG_DISAS) |
|
8250 if (loglevel & CPU_LOG_TB_CPU) { |
|
8251 fprintf(logfile, "---------------- excp: %04x\n", ctx.exception); |
|
8252 cpu_dump_state(env, logfile, fprintf, 0); |
|
8253 } |
|
8254 if (loglevel & CPU_LOG_TB_IN_ASM) { |
|
8255 int flags; |
|
8256 flags = env->bfd_mach; |
|
8257 flags |= ctx.le_mode << 16; |
|
8258 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start)); |
|
8259 target_disas(logfile, pc_start, ctx.nip - pc_start, flags); |
|
8260 fprintf(logfile, "\n"); |
|
8261 } |
|
8262 #endif |
|
8263 } |
|
8264 |
|
8265 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb) |
|
8266 { |
|
8267 gen_intermediate_code_internal(env, tb, 0); |
|
8268 } |
|
8269 |
|
8270 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb) |
|
8271 { |
|
8272 gen_intermediate_code_internal(env, tb, 1); |
|
8273 } |
|
8274 |
|
8275 void gen_pc_load(CPUState *env, TranslationBlock *tb, |
|
8276 unsigned long searched_pc, int pc_pos, void *puc) |
|
8277 { |
|
8278 env->nip = gen_opc_pc[pc_pos]; |
|
8279 } |