]> git.ipfire.org Git - thirdparty/qemu.git/blob - target-sparc/translate.c
Add missing files from previous commit.
[thirdparty/qemu.git] / target-sparc / translate.c
1 /*
2 SPARC translation
3
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
6
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
11
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
16
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 */
21
22 #include <stdarg.h>
23 #include <stdlib.h>
24 #include <stdio.h>
25 #include <string.h>
26 #include <inttypes.h>
27
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
33
34 #define DEBUG_DISAS
35
36 #define DYNAMIC_PC 1 /* dynamic pc value */
37 #define JUMP_PC 2 /* dynamic pc value which takes only two values
38 according to jump_pc[T2] */
39
40 /* global register indexes */
41 static TCGv cpu_env, cpu_regwptr;
42 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
43 static TCGv cpu_psr, cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
44 static TCGv cpu_y;
45 #ifndef CONFIG_USER_ONLY
46 static TCGv cpu_tbr;
47 #endif
48 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
49 #ifdef TARGET_SPARC64
50 static TCGv cpu_xcc, cpu_asi, cpu_fprs, cpu_gsr;
51 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
52 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
53 #else
54 static TCGv cpu_wim;
55 #endif
56 /* local register indexes (only used inside old micro ops) */
57 static TCGv cpu_tmp0, cpu_tmp32, cpu_tmp64;
58
59 #include "gen-icount.h"
60
61 typedef struct DisasContext {
62 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
63 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
64 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
65 int is_br;
66 int mem_idx;
67 int fpu_enabled;
68 int address_mask_32bit;
69 struct TranslationBlock *tb;
70 sparc_def_t *def;
71 } DisasContext;
72
73 // This function uses non-native bit order
74 #define GET_FIELD(X, FROM, TO) \
75 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
76
77 // This function uses the order in the manuals, i.e. bit 0 is 2^0
78 #define GET_FIELD_SP(X, FROM, TO) \
79 GET_FIELD(X, 31 - (TO), 31 - (FROM))
80
81 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
82 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
83
84 #ifdef TARGET_SPARC64
85 #define FFPREG(r) (r)
86 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
87 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
88 #else
89 #define FFPREG(r) (r)
90 #define DFPREG(r) (r & 0x1e)
91 #define QFPREG(r) (r & 0x1c)
92 #endif
93
94 static int sign_extend(int x, int len)
95 {
96 len = 32 - len;
97 return (x << len) >> len;
98 }
99
100 #define IS_IMM (insn & (1<<13))
101
102 /* floating point registers moves */
103 static void gen_op_load_fpr_FT0(unsigned int src)
104 {
105 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src]));
106 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, ft0));
107 }
108
109 static void gen_op_load_fpr_FT1(unsigned int src)
110 {
111 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src]));
112 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, ft1));
113 }
114
115 static void gen_op_store_FT0_fpr(unsigned int dst)
116 {
117 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, ft0));
118 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst]));
119 }
120
121 static void gen_op_load_fpr_DT0(unsigned int src)
122 {
123 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src]));
124 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, dt0) +
125 offsetof(CPU_DoubleU, l.upper));
126 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 1]));
127 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, dt0) +
128 offsetof(CPU_DoubleU, l.lower));
129 }
130
131 static void gen_op_load_fpr_DT1(unsigned int src)
132 {
133 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src]));
134 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, dt1) +
135 offsetof(CPU_DoubleU, l.upper));
136 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 1]));
137 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, dt1) +
138 offsetof(CPU_DoubleU, l.lower));
139 }
140
141 static void gen_op_store_DT0_fpr(unsigned int dst)
142 {
143 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, dt0) +
144 offsetof(CPU_DoubleU, l.upper));
145 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst]));
146 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, dt0) +
147 offsetof(CPU_DoubleU, l.lower));
148 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst + 1]));
149 }
150
151 static void gen_op_load_fpr_QT0(unsigned int src)
152 {
153 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src]));
154 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) +
155 offsetof(CPU_QuadU, l.upmost));
156 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 1]));
157 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) +
158 offsetof(CPU_QuadU, l.upper));
159 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 2]));
160 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) +
161 offsetof(CPU_QuadU, l.lower));
162 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 3]));
163 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) +
164 offsetof(CPU_QuadU, l.lowest));
165 }
166
167 static void gen_op_load_fpr_QT1(unsigned int src)
168 {
169 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src]));
170 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt1) +
171 offsetof(CPU_QuadU, l.upmost));
172 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 1]));
173 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt1) +
174 offsetof(CPU_QuadU, l.upper));
175 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 2]));
176 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt1) +
177 offsetof(CPU_QuadU, l.lower));
178 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 3]));
179 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt1) +
180 offsetof(CPU_QuadU, l.lowest));
181 }
182
183 static void gen_op_store_QT0_fpr(unsigned int dst)
184 {
185 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) +
186 offsetof(CPU_QuadU, l.upmost));
187 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst]));
188 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) +
189 offsetof(CPU_QuadU, l.upper));
190 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst + 1]));
191 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) +
192 offsetof(CPU_QuadU, l.lower));
193 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst + 2]));
194 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) +
195 offsetof(CPU_QuadU, l.lowest));
196 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst + 3]));
197 }
198
199 /* moves */
200 #ifdef CONFIG_USER_ONLY
201 #define supervisor(dc) 0
202 #ifdef TARGET_SPARC64
203 #define hypervisor(dc) 0
204 #endif
205 #else
206 #define supervisor(dc) (dc->mem_idx >= 1)
207 #ifdef TARGET_SPARC64
208 #define hypervisor(dc) (dc->mem_idx == 2)
209 #else
210 #endif
211 #endif
212
213 #ifdef TARGET_SPARC64
214 #ifndef TARGET_ABI32
215 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
216 #else
217 #define AM_CHECK(dc) (1)
218 #endif
219 #endif
220
221 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
222 {
223 #ifdef TARGET_SPARC64
224 if (AM_CHECK(dc))
225 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
226 #endif
227 }
228
229 static inline void gen_movl_reg_TN(int reg, TCGv tn)
230 {
231 if (reg == 0)
232 tcg_gen_movi_tl(tn, 0);
233 else if (reg < 8)
234 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
235 else {
236 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
237 }
238 }
239
240 static inline void gen_movl_TN_reg(int reg, TCGv tn)
241 {
242 if (reg == 0)
243 return;
244 else if (reg < 8)
245 tcg_gen_mov_tl(cpu_gregs[reg], tn);
246 else {
247 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
248 }
249 }
250
251 static inline void gen_goto_tb(DisasContext *s, int tb_num,
252 target_ulong pc, target_ulong npc)
253 {
254 TranslationBlock *tb;
255
256 tb = s->tb;
257 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
258 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
259 /* jump to same page: we can use a direct jump */
260 tcg_gen_goto_tb(tb_num);
261 tcg_gen_movi_tl(cpu_pc, pc);
262 tcg_gen_movi_tl(cpu_npc, npc);
263 tcg_gen_exit_tb((long)tb + tb_num);
264 } else {
265 /* jump to another page: currently not optimized */
266 tcg_gen_movi_tl(cpu_pc, pc);
267 tcg_gen_movi_tl(cpu_npc, npc);
268 tcg_gen_exit_tb(0);
269 }
270 }
271
272 // XXX suboptimal
273 static inline void gen_mov_reg_N(TCGv reg, TCGv src)
274 {
275 tcg_gen_extu_i32_tl(reg, src);
276 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
277 tcg_gen_andi_tl(reg, reg, 0x1);
278 }
279
280 static inline void gen_mov_reg_Z(TCGv reg, TCGv src)
281 {
282 tcg_gen_extu_i32_tl(reg, src);
283 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
284 tcg_gen_andi_tl(reg, reg, 0x1);
285 }
286
287 static inline void gen_mov_reg_V(TCGv reg, TCGv src)
288 {
289 tcg_gen_extu_i32_tl(reg, src);
290 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
291 tcg_gen_andi_tl(reg, reg, 0x1);
292 }
293
294 static inline void gen_mov_reg_C(TCGv reg, TCGv src)
295 {
296 tcg_gen_extu_i32_tl(reg, src);
297 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
298 tcg_gen_andi_tl(reg, reg, 0x1);
299 }
300
301 static inline void gen_cc_clear_icc(void)
302 {
303 tcg_gen_movi_i32(cpu_psr, 0);
304 }
305
306 #ifdef TARGET_SPARC64
307 static inline void gen_cc_clear_xcc(void)
308 {
309 tcg_gen_movi_i32(cpu_xcc, 0);
310 }
311 #endif
312
313 /* old op:
314 if (!T0)
315 env->psr |= PSR_ZERO;
316 if ((int32_t) T0 < 0)
317 env->psr |= PSR_NEG;
318 */
319 static inline void gen_cc_NZ_icc(TCGv dst)
320 {
321 TCGv r_temp;
322 int l1, l2;
323
324 l1 = gen_new_label();
325 l2 = gen_new_label();
326 r_temp = tcg_temp_new(TCG_TYPE_TL);
327 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
328 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
329 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
330 gen_set_label(l1);
331 tcg_gen_ext_i32_tl(r_temp, dst);
332 tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
333 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
334 gen_set_label(l2);
335 tcg_temp_free(r_temp);
336 }
337
338 #ifdef TARGET_SPARC64
339 static inline void gen_cc_NZ_xcc(TCGv dst)
340 {
341 int l1, l2;
342
343 l1 = gen_new_label();
344 l2 = gen_new_label();
345 tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
346 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
347 gen_set_label(l1);
348 tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
349 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
350 gen_set_label(l2);
351 }
352 #endif
353
354 /* old op:
355 if (T0 < src1)
356 env->psr |= PSR_CARRY;
357 */
358 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
359 {
360 TCGv r_temp1, r_temp2;
361 int l1;
362
363 l1 = gen_new_label();
364 r_temp1 = tcg_temp_new(TCG_TYPE_TL);
365 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
366 tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
367 tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
368 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
369 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
370 gen_set_label(l1);
371 tcg_temp_free(r_temp1);
372 tcg_temp_free(r_temp2);
373 }
374
375 #ifdef TARGET_SPARC64
376 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
377 {
378 int l1;
379
380 l1 = gen_new_label();
381 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
382 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
383 gen_set_label(l1);
384 }
385 #endif
386
387 /* old op:
388 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
389 env->psr |= PSR_OVF;
390 */
391 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
392 {
393 TCGv r_temp;
394
395 r_temp = tcg_temp_new(TCG_TYPE_TL);
396 tcg_gen_xor_tl(r_temp, src1, src2);
397 tcg_gen_xori_tl(r_temp, r_temp, -1);
398 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
399 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
400 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
401 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
402 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
403 tcg_temp_free(r_temp);
404 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
405 }
406
407 #ifdef TARGET_SPARC64
408 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
409 {
410 TCGv r_temp;
411
412 r_temp = tcg_temp_new(TCG_TYPE_TL);
413 tcg_gen_xor_tl(r_temp, src1, src2);
414 tcg_gen_xori_tl(r_temp, r_temp, -1);
415 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
416 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
417 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
418 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
419 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
420 tcg_temp_free(r_temp);
421 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
422 }
423 #endif
424
425 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
426 {
427 TCGv r_temp, r_const;
428 int l1;
429
430 l1 = gen_new_label();
431
432 r_temp = tcg_temp_new(TCG_TYPE_TL);
433 tcg_gen_xor_tl(r_temp, src1, src2);
434 tcg_gen_xori_tl(r_temp, r_temp, -1);
435 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
436 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
437 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
438 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
439 r_const = tcg_const_i32(TT_TOVF);
440 tcg_gen_helper_0_1(raise_exception, r_const);
441 tcg_temp_free(r_const);
442 gen_set_label(l1);
443 tcg_temp_free(r_temp);
444 }
445
446 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
447 {
448 int l1;
449
450 l1 = gen_new_label();
451 tcg_gen_or_tl(cpu_tmp0, src1, src2);
452 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
453 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
454 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
455 gen_set_label(l1);
456 }
457
458 static inline void gen_tag_tv(TCGv src1, TCGv src2)
459 {
460 int l1;
461 TCGv r_const;
462
463 l1 = gen_new_label();
464 tcg_gen_or_tl(cpu_tmp0, src1, src2);
465 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
466 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
467 r_const = tcg_const_i32(TT_TOVF);
468 tcg_gen_helper_0_1(raise_exception, r_const);
469 tcg_temp_free(r_const);
470 gen_set_label(l1);
471 }
472
473 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
474 {
475 tcg_gen_mov_tl(cpu_cc_src, src1);
476 tcg_gen_mov_tl(cpu_cc_src2, src2);
477 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
478 gen_cc_clear_icc();
479 gen_cc_NZ_icc(cpu_cc_dst);
480 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
481 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
482 #ifdef TARGET_SPARC64
483 gen_cc_clear_xcc();
484 gen_cc_NZ_xcc(cpu_cc_dst);
485 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
486 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
487 #endif
488 tcg_gen_mov_tl(dst, cpu_cc_dst);
489 }
490
491 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
492 {
493 tcg_gen_mov_tl(cpu_cc_src, src1);
494 tcg_gen_mov_tl(cpu_cc_src2, src2);
495 gen_mov_reg_C(cpu_tmp0, cpu_psr);
496 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
497 gen_cc_clear_icc();
498 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
499 #ifdef TARGET_SPARC64
500 gen_cc_clear_xcc();
501 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
502 #endif
503 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
504 gen_cc_NZ_icc(cpu_cc_dst);
505 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
506 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
507 #ifdef TARGET_SPARC64
508 gen_cc_NZ_xcc(cpu_cc_dst);
509 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
510 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
511 #endif
512 tcg_gen_mov_tl(dst, cpu_cc_dst);
513 }
514
515 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
516 {
517 tcg_gen_mov_tl(cpu_cc_src, src1);
518 tcg_gen_mov_tl(cpu_cc_src2, src2);
519 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
520 gen_cc_clear_icc();
521 gen_cc_NZ_icc(cpu_cc_dst);
522 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
523 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
524 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
525 #ifdef TARGET_SPARC64
526 gen_cc_clear_xcc();
527 gen_cc_NZ_xcc(cpu_cc_dst);
528 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
529 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
530 #endif
531 tcg_gen_mov_tl(dst, cpu_cc_dst);
532 }
533
534 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
535 {
536 tcg_gen_mov_tl(cpu_cc_src, src1);
537 tcg_gen_mov_tl(cpu_cc_src2, src2);
538 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
539 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
540 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
541 gen_cc_clear_icc();
542 gen_cc_NZ_icc(cpu_cc_dst);
543 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
544 #ifdef TARGET_SPARC64
545 gen_cc_clear_xcc();
546 gen_cc_NZ_xcc(cpu_cc_dst);
547 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
548 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
549 #endif
550 tcg_gen_mov_tl(dst, cpu_cc_dst);
551 }
552
553 /* old op:
554 if (src1 < T1)
555 env->psr |= PSR_CARRY;
556 */
557 static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
558 {
559 TCGv r_temp1, r_temp2;
560 int l1;
561
562 l1 = gen_new_label();
563 r_temp1 = tcg_temp_new(TCG_TYPE_TL);
564 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
565 tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
566 tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
567 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
568 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
569 gen_set_label(l1);
570 tcg_temp_free(r_temp1);
571 tcg_temp_free(r_temp2);
572 }
573
574 #ifdef TARGET_SPARC64
575 static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
576 {
577 int l1;
578
579 l1 = gen_new_label();
580 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
581 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
582 gen_set_label(l1);
583 }
584 #endif
585
586 /* old op:
587 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
588 env->psr |= PSR_OVF;
589 */
590 static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
591 {
592 TCGv r_temp;
593
594 r_temp = tcg_temp_new(TCG_TYPE_TL);
595 tcg_gen_xor_tl(r_temp, src1, src2);
596 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
597 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
598 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
599 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
600 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
601 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
602 tcg_temp_free(r_temp);
603 }
604
605 #ifdef TARGET_SPARC64
606 static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
607 {
608 TCGv r_temp;
609
610 r_temp = tcg_temp_new(TCG_TYPE_TL);
611 tcg_gen_xor_tl(r_temp, src1, src2);
612 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
613 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
614 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
615 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
616 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
617 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
618 tcg_temp_free(r_temp);
619 }
620 #endif
621
622 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
623 {
624 TCGv r_temp, r_const;
625 int l1;
626
627 l1 = gen_new_label();
628
629 r_temp = tcg_temp_new(TCG_TYPE_TL);
630 tcg_gen_xor_tl(r_temp, src1, src2);
631 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
632 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
633 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
634 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
635 r_const = tcg_const_i32(TT_TOVF);
636 tcg_gen_helper_0_1(raise_exception, r_const);
637 tcg_temp_free(r_const);
638 gen_set_label(l1);
639 tcg_temp_free(r_temp);
640 }
641
642 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
643 {
644 tcg_gen_mov_tl(cpu_cc_src, src1);
645 tcg_gen_mov_tl(cpu_cc_src2, src2);
646 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
647 gen_cc_clear_icc();
648 gen_cc_NZ_icc(cpu_cc_dst);
649 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
650 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
651 #ifdef TARGET_SPARC64
652 gen_cc_clear_xcc();
653 gen_cc_NZ_xcc(cpu_cc_dst);
654 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
655 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
656 #endif
657 tcg_gen_mov_tl(dst, cpu_cc_dst);
658 }
659
660 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
661 {
662 tcg_gen_mov_tl(cpu_cc_src, src1);
663 tcg_gen_mov_tl(cpu_cc_src2, src2);
664 gen_mov_reg_C(cpu_tmp0, cpu_psr);
665 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
666 gen_cc_clear_icc();
667 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
668 #ifdef TARGET_SPARC64
669 gen_cc_clear_xcc();
670 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
671 #endif
672 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
673 gen_cc_NZ_icc(cpu_cc_dst);
674 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
675 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
676 #ifdef TARGET_SPARC64
677 gen_cc_NZ_xcc(cpu_cc_dst);
678 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
679 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
680 #endif
681 tcg_gen_mov_tl(dst, cpu_cc_dst);
682 }
683
684 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
685 {
686 tcg_gen_mov_tl(cpu_cc_src, src1);
687 tcg_gen_mov_tl(cpu_cc_src2, src2);
688 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
689 gen_cc_clear_icc();
690 gen_cc_NZ_icc(cpu_cc_dst);
691 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
692 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
693 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
694 #ifdef TARGET_SPARC64
695 gen_cc_clear_xcc();
696 gen_cc_NZ_xcc(cpu_cc_dst);
697 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
698 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
699 #endif
700 tcg_gen_mov_tl(dst, cpu_cc_dst);
701 }
702
703 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
704 {
705 tcg_gen_mov_tl(cpu_cc_src, src1);
706 tcg_gen_mov_tl(cpu_cc_src2, src2);
707 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
708 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
709 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
710 gen_cc_clear_icc();
711 gen_cc_NZ_icc(cpu_cc_dst);
712 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
713 #ifdef TARGET_SPARC64
714 gen_cc_clear_xcc();
715 gen_cc_NZ_xcc(cpu_cc_dst);
716 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
717 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
718 #endif
719 tcg_gen_mov_tl(dst, cpu_cc_dst);
720 }
721
722 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
723 {
724 TCGv r_temp;
725 int l1;
726
727 l1 = gen_new_label();
728 r_temp = tcg_temp_new(TCG_TYPE_TL);
729
730 /* old op:
731 if (!(env->y & 1))
732 T1 = 0;
733 */
734 tcg_gen_mov_tl(cpu_cc_src, src1);
735 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
736 tcg_gen_mov_tl(cpu_cc_src2, src2);
737 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
738 tcg_gen_movi_tl(cpu_cc_src2, 0);
739 gen_set_label(l1);
740
741 // b2 = T0 & 1;
742 // env->y = (b2 << 31) | (env->y >> 1);
743 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
744 tcg_gen_shli_tl(r_temp, r_temp, 31);
745 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
746 tcg_gen_or_tl(cpu_y, cpu_tmp0, r_temp);
747
748 // b1 = N ^ V;
749 gen_mov_reg_N(cpu_tmp0, cpu_psr);
750 gen_mov_reg_V(r_temp, cpu_psr);
751 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
752 tcg_temp_free(r_temp);
753
754 // T0 = (b1 << 31) | (T0 >> 1);
755 // src1 = T0;
756 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
757 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
758 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
759
760 /* do addition and update flags */
761 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
762
763 gen_cc_clear_icc();
764 gen_cc_NZ_icc(cpu_cc_dst);
765 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
766 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
767 tcg_gen_mov_tl(dst, cpu_cc_dst);
768 }
769
770 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
771 {
772 TCGv r_temp, r_temp2;
773
774 r_temp = tcg_temp_new(TCG_TYPE_I64);
775 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
776
777 tcg_gen_extu_i32_i64(r_temp, src2);
778 tcg_gen_extu_i32_i64(r_temp2, src1);
779 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
780
781 tcg_gen_shri_i64(r_temp, r_temp2, 32);
782 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
783 tcg_temp_free(r_temp);
784 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
785 #ifdef TARGET_SPARC64
786 tcg_gen_mov_i64(dst, r_temp2);
787 #else
788 tcg_gen_trunc_i64_tl(dst, r_temp2);
789 #endif
790 tcg_temp_free(r_temp2);
791 }
792
793 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
794 {
795 TCGv r_temp, r_temp2;
796
797 r_temp = tcg_temp_new(TCG_TYPE_I64);
798 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
799
800 tcg_gen_ext_i32_i64(r_temp, src2);
801 tcg_gen_ext_i32_i64(r_temp2, src1);
802 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
803
804 tcg_gen_shri_i64(r_temp, r_temp2, 32);
805 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
806 tcg_temp_free(r_temp);
807 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
808 #ifdef TARGET_SPARC64
809 tcg_gen_mov_i64(dst, r_temp2);
810 #else
811 tcg_gen_trunc_i64_tl(dst, r_temp2);
812 #endif
813 tcg_temp_free(r_temp2);
814 }
815
816 #ifdef TARGET_SPARC64
817 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
818 {
819 TCGv r_const;
820 int l1;
821
822 l1 = gen_new_label();
823 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
824 r_const = tcg_const_i32(TT_DIV_ZERO);
825 tcg_gen_helper_0_1(raise_exception, r_const);
826 tcg_temp_free(r_const);
827 gen_set_label(l1);
828 }
829
830 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
831 {
832 int l1, l2;
833
834 l1 = gen_new_label();
835 l2 = gen_new_label();
836 tcg_gen_mov_tl(cpu_cc_src, src1);
837 tcg_gen_mov_tl(cpu_cc_src2, src2);
838 gen_trap_ifdivzero_tl(cpu_cc_src2);
839 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
840 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
841 tcg_gen_movi_i64(dst, INT64_MIN);
842 tcg_gen_br(l2);
843 gen_set_label(l1);
844 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
845 gen_set_label(l2);
846 }
847 #endif
848
849 static inline void gen_op_div_cc(TCGv dst)
850 {
851 int l1;
852
853 tcg_gen_mov_tl(cpu_cc_dst, dst);
854 gen_cc_clear_icc();
855 gen_cc_NZ_icc(cpu_cc_dst);
856 l1 = gen_new_label();
857 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
858 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
859 gen_set_label(l1);
860 }
861
862 static inline void gen_op_logic_cc(TCGv dst)
863 {
864 tcg_gen_mov_tl(cpu_cc_dst, dst);
865
866 gen_cc_clear_icc();
867 gen_cc_NZ_icc(cpu_cc_dst);
868 #ifdef TARGET_SPARC64
869 gen_cc_clear_xcc();
870 gen_cc_NZ_xcc(cpu_cc_dst);
871 #endif
872 }
873
874 // 1
875 static inline void gen_op_eval_ba(TCGv dst)
876 {
877 tcg_gen_movi_tl(dst, 1);
878 }
879
880 // Z
881 static inline void gen_op_eval_be(TCGv dst, TCGv src)
882 {
883 gen_mov_reg_Z(dst, src);
884 }
885
886 // Z | (N ^ V)
887 static inline void gen_op_eval_ble(TCGv dst, TCGv src)
888 {
889 gen_mov_reg_N(cpu_tmp0, src);
890 gen_mov_reg_V(dst, src);
891 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
892 gen_mov_reg_Z(cpu_tmp0, src);
893 tcg_gen_or_tl(dst, dst, cpu_tmp0);
894 }
895
896 // N ^ V
897 static inline void gen_op_eval_bl(TCGv dst, TCGv src)
898 {
899 gen_mov_reg_V(cpu_tmp0, src);
900 gen_mov_reg_N(dst, src);
901 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
902 }
903
904 // C | Z
905 static inline void gen_op_eval_bleu(TCGv dst, TCGv src)
906 {
907 gen_mov_reg_Z(cpu_tmp0, src);
908 gen_mov_reg_C(dst, src);
909 tcg_gen_or_tl(dst, dst, cpu_tmp0);
910 }
911
912 // C
913 static inline void gen_op_eval_bcs(TCGv dst, TCGv src)
914 {
915 gen_mov_reg_C(dst, src);
916 }
917
918 // V
919 static inline void gen_op_eval_bvs(TCGv dst, TCGv src)
920 {
921 gen_mov_reg_V(dst, src);
922 }
923
924 // 0
925 static inline void gen_op_eval_bn(TCGv dst)
926 {
927 tcg_gen_movi_tl(dst, 0);
928 }
929
930 // N
931 static inline void gen_op_eval_bneg(TCGv dst, TCGv src)
932 {
933 gen_mov_reg_N(dst, src);
934 }
935
936 // !Z
937 static inline void gen_op_eval_bne(TCGv dst, TCGv src)
938 {
939 gen_mov_reg_Z(dst, src);
940 tcg_gen_xori_tl(dst, dst, 0x1);
941 }
942
943 // !(Z | (N ^ V))
944 static inline void gen_op_eval_bg(TCGv dst, TCGv src)
945 {
946 gen_mov_reg_N(cpu_tmp0, src);
947 gen_mov_reg_V(dst, src);
948 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
949 gen_mov_reg_Z(cpu_tmp0, src);
950 tcg_gen_or_tl(dst, dst, cpu_tmp0);
951 tcg_gen_xori_tl(dst, dst, 0x1);
952 }
953
954 // !(N ^ V)
955 static inline void gen_op_eval_bge(TCGv dst, TCGv src)
956 {
957 gen_mov_reg_V(cpu_tmp0, src);
958 gen_mov_reg_N(dst, src);
959 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
960 tcg_gen_xori_tl(dst, dst, 0x1);
961 }
962
963 // !(C | Z)
964 static inline void gen_op_eval_bgu(TCGv dst, TCGv src)
965 {
966 gen_mov_reg_Z(cpu_tmp0, src);
967 gen_mov_reg_C(dst, src);
968 tcg_gen_or_tl(dst, dst, cpu_tmp0);
969 tcg_gen_xori_tl(dst, dst, 0x1);
970 }
971
972 // !C
973 static inline void gen_op_eval_bcc(TCGv dst, TCGv src)
974 {
975 gen_mov_reg_C(dst, src);
976 tcg_gen_xori_tl(dst, dst, 0x1);
977 }
978
979 // !N
980 static inline void gen_op_eval_bpos(TCGv dst, TCGv src)
981 {
982 gen_mov_reg_N(dst, src);
983 tcg_gen_xori_tl(dst, dst, 0x1);
984 }
985
986 // !V
987 static inline void gen_op_eval_bvc(TCGv dst, TCGv src)
988 {
989 gen_mov_reg_V(dst, src);
990 tcg_gen_xori_tl(dst, dst, 0x1);
991 }
992
993 /*
994 FPSR bit field FCC1 | FCC0:
995 0 =
996 1 <
997 2 >
998 3 unordered
999 */
1000 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
1001 unsigned int fcc_offset)
1002 {
1003 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
1004 tcg_gen_andi_tl(reg, reg, 0x1);
1005 }
1006
1007 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
1008 unsigned int fcc_offset)
1009 {
1010 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
1011 tcg_gen_andi_tl(reg, reg, 0x1);
1012 }
1013
1014 // !0: FCC0 | FCC1
1015 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
1016 unsigned int fcc_offset)
1017 {
1018 gen_mov_reg_FCC0(dst, src, fcc_offset);
1019 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1020 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1021 }
1022
1023 // 1 or 2: FCC0 ^ FCC1
1024 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
1025 unsigned int fcc_offset)
1026 {
1027 gen_mov_reg_FCC0(dst, src, fcc_offset);
1028 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1029 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1030 }
1031
1032 // 1 or 3: FCC0
1033 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
1034 unsigned int fcc_offset)
1035 {
1036 gen_mov_reg_FCC0(dst, src, fcc_offset);
1037 }
1038
1039 // 1: FCC0 & !FCC1
1040 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1041 unsigned int fcc_offset)
1042 {
1043 gen_mov_reg_FCC0(dst, src, fcc_offset);
1044 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1045 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1046 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1047 }
1048
1049 // 2 or 3: FCC1
1050 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1051 unsigned int fcc_offset)
1052 {
1053 gen_mov_reg_FCC1(dst, src, fcc_offset);
1054 }
1055
1056 // 2: !FCC0 & FCC1
1057 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1058 unsigned int fcc_offset)
1059 {
1060 gen_mov_reg_FCC0(dst, src, fcc_offset);
1061 tcg_gen_xori_tl(dst, dst, 0x1);
1062 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1063 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1064 }
1065
1066 // 3: FCC0 & FCC1
1067 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1068 unsigned int fcc_offset)
1069 {
1070 gen_mov_reg_FCC0(dst, src, fcc_offset);
1071 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1072 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1073 }
1074
1075 // 0: !(FCC0 | FCC1)
1076 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1077 unsigned int fcc_offset)
1078 {
1079 gen_mov_reg_FCC0(dst, src, fcc_offset);
1080 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1081 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1082 tcg_gen_xori_tl(dst, dst, 0x1);
1083 }
1084
1085 // 0 or 3: !(FCC0 ^ FCC1)
1086 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1087 unsigned int fcc_offset)
1088 {
1089 gen_mov_reg_FCC0(dst, src, fcc_offset);
1090 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1091 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1092 tcg_gen_xori_tl(dst, dst, 0x1);
1093 }
1094
1095 // 0 or 2: !FCC0
1096 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1097 unsigned int fcc_offset)
1098 {
1099 gen_mov_reg_FCC0(dst, src, fcc_offset);
1100 tcg_gen_xori_tl(dst, dst, 0x1);
1101 }
1102
1103 // !1: !(FCC0 & !FCC1)
1104 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1105 unsigned int fcc_offset)
1106 {
1107 gen_mov_reg_FCC0(dst, src, fcc_offset);
1108 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1109 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1110 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1111 tcg_gen_xori_tl(dst, dst, 0x1);
1112 }
1113
1114 // 0 or 1: !FCC1
1115 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1116 unsigned int fcc_offset)
1117 {
1118 gen_mov_reg_FCC1(dst, src, fcc_offset);
1119 tcg_gen_xori_tl(dst, dst, 0x1);
1120 }
1121
1122 // !2: !(!FCC0 & FCC1)
1123 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1124 unsigned int fcc_offset)
1125 {
1126 gen_mov_reg_FCC0(dst, src, fcc_offset);
1127 tcg_gen_xori_tl(dst, dst, 0x1);
1128 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1129 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1130 tcg_gen_xori_tl(dst, dst, 0x1);
1131 }
1132
1133 // !3: !(FCC0 & FCC1)
1134 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1135 unsigned int fcc_offset)
1136 {
1137 gen_mov_reg_FCC0(dst, src, fcc_offset);
1138 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1139 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1140 tcg_gen_xori_tl(dst, dst, 0x1);
1141 }
1142
1143 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1144 target_ulong pc2, TCGv r_cond)
1145 {
1146 int l1;
1147
1148 l1 = gen_new_label();
1149
1150 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1151
1152 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1153
1154 gen_set_label(l1);
1155 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1156 }
1157
1158 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1159 target_ulong pc2, TCGv r_cond)
1160 {
1161 int l1;
1162
1163 l1 = gen_new_label();
1164
1165 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1166
1167 gen_goto_tb(dc, 0, pc2, pc1);
1168
1169 gen_set_label(l1);
1170 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1171 }
1172
1173 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1174 TCGv r_cond)
1175 {
1176 int l1, l2;
1177
1178 l1 = gen_new_label();
1179 l2 = gen_new_label();
1180
1181 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1182
1183 tcg_gen_movi_tl(cpu_npc, npc1);
1184 tcg_gen_br(l2);
1185
1186 gen_set_label(l1);
1187 tcg_gen_movi_tl(cpu_npc, npc2);
1188 gen_set_label(l2);
1189 }
1190
1191 /* call this function before using the condition register as it may
1192 have been set for a jump */
1193 static inline void flush_cond(DisasContext *dc, TCGv cond)
1194 {
1195 if (dc->npc == JUMP_PC) {
1196 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1197 dc->npc = DYNAMIC_PC;
1198 }
1199 }
1200
1201 static inline void save_npc(DisasContext *dc, TCGv cond)
1202 {
1203 if (dc->npc == JUMP_PC) {
1204 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1205 dc->npc = DYNAMIC_PC;
1206 } else if (dc->npc != DYNAMIC_PC) {
1207 tcg_gen_movi_tl(cpu_npc, dc->npc);
1208 }
1209 }
1210
1211 static inline void save_state(DisasContext *dc, TCGv cond)
1212 {
1213 tcg_gen_movi_tl(cpu_pc, dc->pc);
1214 save_npc(dc, cond);
1215 }
1216
1217 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1218 {
1219 if (dc->npc == JUMP_PC) {
1220 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1221 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1222 dc->pc = DYNAMIC_PC;
1223 } else if (dc->npc == DYNAMIC_PC) {
1224 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1225 dc->pc = DYNAMIC_PC;
1226 } else {
1227 dc->pc = dc->npc;
1228 }
1229 }
1230
1231 static inline void gen_op_next_insn(void)
1232 {
1233 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1234 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1235 }
1236
1237 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond)
1238 {
1239 TCGv r_src;
1240
1241 #ifdef TARGET_SPARC64
1242 if (cc)
1243 r_src = cpu_xcc;
1244 else
1245 r_src = cpu_psr;
1246 #else
1247 r_src = cpu_psr;
1248 #endif
1249 switch (cond) {
1250 case 0x0:
1251 gen_op_eval_bn(r_dst);
1252 break;
1253 case 0x1:
1254 gen_op_eval_be(r_dst, r_src);
1255 break;
1256 case 0x2:
1257 gen_op_eval_ble(r_dst, r_src);
1258 break;
1259 case 0x3:
1260 gen_op_eval_bl(r_dst, r_src);
1261 break;
1262 case 0x4:
1263 gen_op_eval_bleu(r_dst, r_src);
1264 break;
1265 case 0x5:
1266 gen_op_eval_bcs(r_dst, r_src);
1267 break;
1268 case 0x6:
1269 gen_op_eval_bneg(r_dst, r_src);
1270 break;
1271 case 0x7:
1272 gen_op_eval_bvs(r_dst, r_src);
1273 break;
1274 case 0x8:
1275 gen_op_eval_ba(r_dst);
1276 break;
1277 case 0x9:
1278 gen_op_eval_bne(r_dst, r_src);
1279 break;
1280 case 0xa:
1281 gen_op_eval_bg(r_dst, r_src);
1282 break;
1283 case 0xb:
1284 gen_op_eval_bge(r_dst, r_src);
1285 break;
1286 case 0xc:
1287 gen_op_eval_bgu(r_dst, r_src);
1288 break;
1289 case 0xd:
1290 gen_op_eval_bcc(r_dst, r_src);
1291 break;
1292 case 0xe:
1293 gen_op_eval_bpos(r_dst, r_src);
1294 break;
1295 case 0xf:
1296 gen_op_eval_bvc(r_dst, r_src);
1297 break;
1298 }
1299 }
1300
1301 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1302 {
1303 unsigned int offset;
1304
1305 switch (cc) {
1306 default:
1307 case 0x0:
1308 offset = 0;
1309 break;
1310 case 0x1:
1311 offset = 32 - 10;
1312 break;
1313 case 0x2:
1314 offset = 34 - 10;
1315 break;
1316 case 0x3:
1317 offset = 36 - 10;
1318 break;
1319 }
1320
1321 switch (cond) {
1322 case 0x0:
1323 gen_op_eval_bn(r_dst);
1324 break;
1325 case 0x1:
1326 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1327 break;
1328 case 0x2:
1329 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1330 break;
1331 case 0x3:
1332 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1333 break;
1334 case 0x4:
1335 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1336 break;
1337 case 0x5:
1338 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1339 break;
1340 case 0x6:
1341 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1342 break;
1343 case 0x7:
1344 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1345 break;
1346 case 0x8:
1347 gen_op_eval_ba(r_dst);
1348 break;
1349 case 0x9:
1350 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1351 break;
1352 case 0xa:
1353 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1354 break;
1355 case 0xb:
1356 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1357 break;
1358 case 0xc:
1359 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1360 break;
1361 case 0xd:
1362 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1363 break;
1364 case 0xe:
1365 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1366 break;
1367 case 0xf:
1368 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1369 break;
1370 }
1371 }
1372
1373 #ifdef TARGET_SPARC64
1374 // Inverted logic
1375 static const int gen_tcg_cond_reg[8] = {
1376 -1,
1377 TCG_COND_NE,
1378 TCG_COND_GT,
1379 TCG_COND_GE,
1380 -1,
1381 TCG_COND_EQ,
1382 TCG_COND_LE,
1383 TCG_COND_LT,
1384 };
1385
1386 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1387 {
1388 int l1;
1389
1390 l1 = gen_new_label();
1391 tcg_gen_movi_tl(r_dst, 0);
1392 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1393 tcg_gen_movi_tl(r_dst, 1);
1394 gen_set_label(l1);
1395 }
1396 #endif
1397
1398 /* XXX: potentially incorrect if dynamic npc */
1399 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1400 TCGv r_cond)
1401 {
1402 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1403 target_ulong target = dc->pc + offset;
1404
1405 if (cond == 0x0) {
1406 /* unconditional not taken */
1407 if (a) {
1408 dc->pc = dc->npc + 4;
1409 dc->npc = dc->pc + 4;
1410 } else {
1411 dc->pc = dc->npc;
1412 dc->npc = dc->pc + 4;
1413 }
1414 } else if (cond == 0x8) {
1415 /* unconditional taken */
1416 if (a) {
1417 dc->pc = target;
1418 dc->npc = dc->pc + 4;
1419 } else {
1420 dc->pc = dc->npc;
1421 dc->npc = target;
1422 }
1423 } else {
1424 flush_cond(dc, r_cond);
1425 gen_cond(r_cond, cc, cond);
1426 if (a) {
1427 gen_branch_a(dc, target, dc->npc, r_cond);
1428 dc->is_br = 1;
1429 } else {
1430 dc->pc = dc->npc;
1431 dc->jump_pc[0] = target;
1432 dc->jump_pc[1] = dc->npc + 4;
1433 dc->npc = JUMP_PC;
1434 }
1435 }
1436 }
1437
1438 /* XXX: potentially incorrect if dynamic npc */
1439 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1440 TCGv r_cond)
1441 {
1442 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1443 target_ulong target = dc->pc + offset;
1444
1445 if (cond == 0x0) {
1446 /* unconditional not taken */
1447 if (a) {
1448 dc->pc = dc->npc + 4;
1449 dc->npc = dc->pc + 4;
1450 } else {
1451 dc->pc = dc->npc;
1452 dc->npc = dc->pc + 4;
1453 }
1454 } else if (cond == 0x8) {
1455 /* unconditional taken */
1456 if (a) {
1457 dc->pc = target;
1458 dc->npc = dc->pc + 4;
1459 } else {
1460 dc->pc = dc->npc;
1461 dc->npc = target;
1462 }
1463 } else {
1464 flush_cond(dc, r_cond);
1465 gen_fcond(r_cond, cc, cond);
1466 if (a) {
1467 gen_branch_a(dc, target, dc->npc, r_cond);
1468 dc->is_br = 1;
1469 } else {
1470 dc->pc = dc->npc;
1471 dc->jump_pc[0] = target;
1472 dc->jump_pc[1] = dc->npc + 4;
1473 dc->npc = JUMP_PC;
1474 }
1475 }
1476 }
1477
1478 #ifdef TARGET_SPARC64
1479 /* XXX: potentially incorrect if dynamic npc */
1480 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1481 TCGv r_cond, TCGv r_reg)
1482 {
1483 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1484 target_ulong target = dc->pc + offset;
1485
1486 flush_cond(dc, r_cond);
1487 gen_cond_reg(r_cond, cond, r_reg);
1488 if (a) {
1489 gen_branch_a(dc, target, dc->npc, r_cond);
1490 dc->is_br = 1;
1491 } else {
1492 dc->pc = dc->npc;
1493 dc->jump_pc[0] = target;
1494 dc->jump_pc[1] = dc->npc + 4;
1495 dc->npc = JUMP_PC;
1496 }
1497 }
1498
1499 static GenOpFunc * const gen_fcmps[4] = {
1500 helper_fcmps,
1501 helper_fcmps_fcc1,
1502 helper_fcmps_fcc2,
1503 helper_fcmps_fcc3,
1504 };
1505
1506 static GenOpFunc * const gen_fcmpd[4] = {
1507 helper_fcmpd,
1508 helper_fcmpd_fcc1,
1509 helper_fcmpd_fcc2,
1510 helper_fcmpd_fcc3,
1511 };
1512
1513 static GenOpFunc * const gen_fcmpq[4] = {
1514 helper_fcmpq,
1515 helper_fcmpq_fcc1,
1516 helper_fcmpq_fcc2,
1517 helper_fcmpq_fcc3,
1518 };
1519
1520 static GenOpFunc * const gen_fcmpes[4] = {
1521 helper_fcmpes,
1522 helper_fcmpes_fcc1,
1523 helper_fcmpes_fcc2,
1524 helper_fcmpes_fcc3,
1525 };
1526
1527 static GenOpFunc * const gen_fcmped[4] = {
1528 helper_fcmped,
1529 helper_fcmped_fcc1,
1530 helper_fcmped_fcc2,
1531 helper_fcmped_fcc3,
1532 };
1533
1534 static GenOpFunc * const gen_fcmpeq[4] = {
1535 helper_fcmpeq,
1536 helper_fcmpeq_fcc1,
1537 helper_fcmpeq_fcc2,
1538 helper_fcmpeq_fcc3,
1539 };
1540
1541 static inline void gen_op_fcmps(int fccno)
1542 {
1543 tcg_gen_helper_0_0(gen_fcmps[fccno]);
1544 }
1545
1546 static inline void gen_op_fcmpd(int fccno)
1547 {
1548 tcg_gen_helper_0_0(gen_fcmpd[fccno]);
1549 }
1550
1551 static inline void gen_op_fcmpq(int fccno)
1552 {
1553 tcg_gen_helper_0_0(gen_fcmpq[fccno]);
1554 }
1555
1556 static inline void gen_op_fcmpes(int fccno)
1557 {
1558 tcg_gen_helper_0_0(gen_fcmpes[fccno]);
1559 }
1560
1561 static inline void gen_op_fcmped(int fccno)
1562 {
1563 tcg_gen_helper_0_0(gen_fcmped[fccno]);
1564 }
1565
1566 static inline void gen_op_fcmpeq(int fccno)
1567 {
1568 tcg_gen_helper_0_0(gen_fcmpeq[fccno]);
1569 }
1570
1571 #else
1572
1573 static inline void gen_op_fcmps(int fccno)
1574 {
1575 tcg_gen_helper_0_0(helper_fcmps);
1576 }
1577
1578 static inline void gen_op_fcmpd(int fccno)
1579 {
1580 tcg_gen_helper_0_0(helper_fcmpd);
1581 }
1582
1583 static inline void gen_op_fcmpq(int fccno)
1584 {
1585 tcg_gen_helper_0_0(helper_fcmpq);
1586 }
1587
1588 static inline void gen_op_fcmpes(int fccno)
1589 {
1590 tcg_gen_helper_0_0(helper_fcmpes);
1591 }
1592
1593 static inline void gen_op_fcmped(int fccno)
1594 {
1595 tcg_gen_helper_0_0(helper_fcmped);
1596 }
1597
1598 static inline void gen_op_fcmpeq(int fccno)
1599 {
1600 tcg_gen_helper_0_0(helper_fcmpeq);
1601 }
1602 #endif
1603
1604 static inline void gen_op_fpexception_im(int fsr_flags)
1605 {
1606 TCGv r_const;
1607
1608 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1609 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1610 r_const = tcg_const_i32(TT_FP_EXCP);
1611 tcg_gen_helper_0_1(raise_exception, r_const);
1612 tcg_temp_free(r_const);
1613 }
1614
1615 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1616 {
1617 #if !defined(CONFIG_USER_ONLY)
1618 if (!dc->fpu_enabled) {
1619 TCGv r_const;
1620
1621 save_state(dc, r_cond);
1622 r_const = tcg_const_i32(TT_NFPU_INSN);
1623 tcg_gen_helper_0_1(raise_exception, r_const);
1624 tcg_temp_free(r_const);
1625 dc->is_br = 1;
1626 return 1;
1627 }
1628 #endif
1629 return 0;
1630 }
1631
1632 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1633 {
1634 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1635 }
1636
1637 static inline void gen_clear_float_exceptions(void)
1638 {
1639 tcg_gen_helper_0_0(helper_clear_float_exceptions);
1640 }
1641
1642 /* asi moves */
1643 #ifdef TARGET_SPARC64
1644 static inline TCGv gen_get_asi(int insn, TCGv r_addr)
1645 {
1646 int asi;
1647 TCGv r_asi;
1648
1649 if (IS_IMM) {
1650 r_asi = tcg_temp_new(TCG_TYPE_I32);
1651 tcg_gen_mov_i32(r_asi, cpu_asi);
1652 } else {
1653 asi = GET_FIELD(insn, 19, 26);
1654 r_asi = tcg_const_i32(asi);
1655 }
1656 return r_asi;
1657 }
1658
1659 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1660 int sign)
1661 {
1662 TCGv r_asi, r_size, r_sign;
1663
1664 r_asi = gen_get_asi(insn, addr);
1665 r_size = tcg_const_i32(size);
1666 r_sign = tcg_const_i32(sign);
1667 tcg_gen_helper_1_4(helper_ld_asi, dst, addr, r_asi, r_size, r_sign);
1668 tcg_temp_free(r_sign);
1669 tcg_temp_free(r_size);
1670 tcg_temp_free(r_asi);
1671 }
1672
1673 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1674 {
1675 TCGv r_asi, r_size;
1676
1677 r_asi = gen_get_asi(insn, addr);
1678 r_size = tcg_const_i32(size);
1679 tcg_gen_helper_0_4(helper_st_asi, addr, src, r_asi, r_size);
1680 tcg_temp_free(r_size);
1681 tcg_temp_free(r_asi);
1682 }
1683
1684 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1685 {
1686 TCGv r_asi, r_size, r_rd;
1687
1688 r_asi = gen_get_asi(insn, addr);
1689 r_size = tcg_const_i32(size);
1690 r_rd = tcg_const_i32(rd);
1691 tcg_gen_helper_0_4(helper_ldf_asi, addr, r_asi, r_size, r_rd);
1692 tcg_temp_free(r_rd);
1693 tcg_temp_free(r_size);
1694 tcg_temp_free(r_asi);
1695 }
1696
1697 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1698 {
1699 TCGv r_asi, r_size, r_rd;
1700
1701 r_asi = gen_get_asi(insn, addr);
1702 r_size = tcg_const_i32(size);
1703 r_rd = tcg_const_i32(rd);
1704 tcg_gen_helper_0_4(helper_stf_asi, addr, r_asi, r_size, r_rd);
1705 tcg_temp_free(r_rd);
1706 tcg_temp_free(r_size);
1707 tcg_temp_free(r_asi);
1708 }
1709
1710 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1711 {
1712 TCGv r_asi, r_size, r_sign;
1713
1714 r_asi = gen_get_asi(insn, addr);
1715 r_size = tcg_const_i32(4);
1716 r_sign = tcg_const_i32(0);
1717 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1718 tcg_temp_free(r_sign);
1719 tcg_gen_helper_0_4(helper_st_asi, addr, dst, r_asi, r_size);
1720 tcg_temp_free(r_size);
1721 tcg_temp_free(r_asi);
1722 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1723 }
1724
1725 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1726 {
1727 TCGv r_asi, r_rd;
1728
1729 r_asi = gen_get_asi(insn, addr);
1730 r_rd = tcg_const_i32(rd);
1731 tcg_gen_helper_0_3(helper_ldda_asi, addr, r_asi, r_rd);
1732 tcg_temp_free(r_rd);
1733 tcg_temp_free(r_asi);
1734 }
1735
1736 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1737 {
1738 TCGv r_temp, r_asi, r_size;
1739
1740 r_temp = tcg_temp_new(TCG_TYPE_TL);
1741 gen_movl_reg_TN(rd + 1, r_temp);
1742 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, hi,
1743 r_temp);
1744 tcg_temp_free(r_temp);
1745 r_asi = gen_get_asi(insn, addr);
1746 r_size = tcg_const_i32(8);
1747 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1748 tcg_temp_free(r_size);
1749 tcg_temp_free(r_asi);
1750 }
1751
1752 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1753 int rd)
1754 {
1755 TCGv r_val1, r_asi;
1756
1757 r_val1 = tcg_temp_new(TCG_TYPE_TL);
1758 gen_movl_reg_TN(rd, r_val1);
1759 r_asi = gen_get_asi(insn, addr);
1760 tcg_gen_helper_1_4(helper_cas_asi, dst, addr, r_val1, val2, r_asi);
1761 tcg_temp_free(r_asi);
1762 tcg_temp_free(r_val1);
1763 }
1764
1765 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1766 int rd)
1767 {
1768 TCGv r_asi;
1769
1770 gen_movl_reg_TN(rd, cpu_tmp64);
1771 r_asi = gen_get_asi(insn, addr);
1772 tcg_gen_helper_1_4(helper_casx_asi, dst, addr, cpu_tmp64, val2, r_asi);
1773 tcg_temp_free(r_asi);
1774 }
1775
1776 #elif !defined(CONFIG_USER_ONLY)
1777
1778 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1779 int sign)
1780 {
1781 TCGv r_asi, r_size, r_sign;
1782
1783 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1784 r_size = tcg_const_i32(size);
1785 r_sign = tcg_const_i32(sign);
1786 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1787 tcg_temp_free(r_sign);
1788 tcg_temp_free(r_size);
1789 tcg_temp_free(r_asi);
1790 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1791 }
1792
1793 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1794 {
1795 TCGv r_asi, r_size;
1796
1797 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1798 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1799 r_size = tcg_const_i32(size);
1800 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1801 tcg_temp_free(r_size);
1802 tcg_temp_free(r_asi);
1803 }
1804
1805 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1806 {
1807 TCGv r_asi, r_size, r_sign;
1808
1809 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1810 r_size = tcg_const_i32(4);
1811 r_sign = tcg_const_i32(0);
1812 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1813 tcg_temp_free(r_sign);
1814 tcg_gen_helper_0_4(helper_st_asi, addr, dst, r_asi, r_size);
1815 tcg_temp_free(r_size);
1816 tcg_temp_free(r_asi);
1817 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1818 }
1819
1820 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1821 {
1822 TCGv r_asi, r_size, r_sign;
1823
1824 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1825 r_size = tcg_const_i32(8);
1826 r_sign = tcg_const_i32(0);
1827 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1828 tcg_temp_free(r_sign);
1829 tcg_temp_free(r_size);
1830 tcg_temp_free(r_asi);
1831 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1832 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1833 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1834 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1835 gen_movl_TN_reg(rd, hi);
1836 }
1837
1838 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1839 {
1840 TCGv r_temp, r_asi, r_size;
1841
1842 r_temp = tcg_temp_new(TCG_TYPE_TL);
1843 gen_movl_reg_TN(rd + 1, r_temp);
1844 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, hi, r_temp);
1845 tcg_temp_free(r_temp);
1846 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1847 r_size = tcg_const_i32(8);
1848 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1849 tcg_temp_free(r_size);
1850 tcg_temp_free(r_asi);
1851 }
1852 #endif
1853
1854 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1855 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1856 {
1857 TCGv r_val, r_asi, r_size;
1858
1859 gen_ld_asi(dst, addr, insn, 1, 0);
1860
1861 r_val = tcg_const_i64(0xffULL);
1862 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1863 r_size = tcg_const_i32(1);
1864 tcg_gen_helper_0_4(helper_st_asi, addr, r_val, r_asi, r_size);
1865 tcg_temp_free(r_size);
1866 tcg_temp_free(r_asi);
1867 tcg_temp_free(r_val);
1868 }
1869 #endif
1870
1871 static inline TCGv get_src1(unsigned int insn, TCGv def)
1872 {
1873 TCGv r_rs1 = def;
1874 unsigned int rs1;
1875
1876 rs1 = GET_FIELD(insn, 13, 17);
1877 if (rs1 == 0)
1878 r_rs1 = tcg_const_tl(0); // XXX how to free?
1879 else if (rs1 < 8)
1880 r_rs1 = cpu_gregs[rs1];
1881 else
1882 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1883 return r_rs1;
1884 }
1885
1886 static inline TCGv get_src2(unsigned int insn, TCGv def)
1887 {
1888 TCGv r_rs2 = def;
1889 unsigned int rs2;
1890
1891 if (IS_IMM) { /* immediate */
1892 rs2 = GET_FIELDs(insn, 19, 31);
1893 r_rs2 = tcg_const_tl((int)rs2); // XXX how to free?
1894 } else { /* register */
1895 rs2 = GET_FIELD(insn, 27, 31);
1896 if (rs2 == 0)
1897 r_rs2 = tcg_const_tl(0); // XXX how to free?
1898 else if (rs2 < 8)
1899 r_rs2 = cpu_gregs[rs2];
1900 else
1901 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1902 }
1903 return r_rs2;
1904 }
1905
1906 #define CHECK_IU_FEATURE(dc, FEATURE) \
1907 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1908 goto illegal_insn;
1909 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1910 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1911 goto nfpu_insn;
1912
1913 /* before an instruction, dc->pc must be static */
1914 static void disas_sparc_insn(DisasContext * dc)
1915 {
1916 unsigned int insn, opc, rs1, rs2, rd;
1917
1918 if (unlikely(loglevel & CPU_LOG_TB_OP))
1919 tcg_gen_debug_insn_start(dc->pc);
1920 insn = ldl_code(dc->pc);
1921 opc = GET_FIELD(insn, 0, 1);
1922
1923 rd = GET_FIELD(insn, 2, 6);
1924
1925 cpu_src1 = tcg_temp_new(TCG_TYPE_TL); // const
1926 cpu_src2 = tcg_temp_new(TCG_TYPE_TL); // const
1927
1928 switch (opc) {
1929 case 0: /* branches/sethi */
1930 {
1931 unsigned int xop = GET_FIELD(insn, 7, 9);
1932 int32_t target;
1933 switch (xop) {
1934 #ifdef TARGET_SPARC64
1935 case 0x1: /* V9 BPcc */
1936 {
1937 int cc;
1938
1939 target = GET_FIELD_SP(insn, 0, 18);
1940 target = sign_extend(target, 18);
1941 target <<= 2;
1942 cc = GET_FIELD_SP(insn, 20, 21);
1943 if (cc == 0)
1944 do_branch(dc, target, insn, 0, cpu_cond);
1945 else if (cc == 2)
1946 do_branch(dc, target, insn, 1, cpu_cond);
1947 else
1948 goto illegal_insn;
1949 goto jmp_insn;
1950 }
1951 case 0x3: /* V9 BPr */
1952 {
1953 target = GET_FIELD_SP(insn, 0, 13) |
1954 (GET_FIELD_SP(insn, 20, 21) << 14);
1955 target = sign_extend(target, 16);
1956 target <<= 2;
1957 cpu_src1 = get_src1(insn, cpu_src1);
1958 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1959 goto jmp_insn;
1960 }
1961 case 0x5: /* V9 FBPcc */
1962 {
1963 int cc = GET_FIELD_SP(insn, 20, 21);
1964 if (gen_trap_ifnofpu(dc, cpu_cond))
1965 goto jmp_insn;
1966 target = GET_FIELD_SP(insn, 0, 18);
1967 target = sign_extend(target, 19);
1968 target <<= 2;
1969 do_fbranch(dc, target, insn, cc, cpu_cond);
1970 goto jmp_insn;
1971 }
1972 #else
1973 case 0x7: /* CBN+x */
1974 {
1975 goto ncp_insn;
1976 }
1977 #endif
1978 case 0x2: /* BN+x */
1979 {
1980 target = GET_FIELD(insn, 10, 31);
1981 target = sign_extend(target, 22);
1982 target <<= 2;
1983 do_branch(dc, target, insn, 0, cpu_cond);
1984 goto jmp_insn;
1985 }
1986 case 0x6: /* FBN+x */
1987 {
1988 if (gen_trap_ifnofpu(dc, cpu_cond))
1989 goto jmp_insn;
1990 target = GET_FIELD(insn, 10, 31);
1991 target = sign_extend(target, 22);
1992 target <<= 2;
1993 do_fbranch(dc, target, insn, 0, cpu_cond);
1994 goto jmp_insn;
1995 }
1996 case 0x4: /* SETHI */
1997 if (rd) { // nop
1998 uint32_t value = GET_FIELD(insn, 10, 31);
1999 TCGv r_const;
2000
2001 r_const = tcg_const_tl(value << 10);
2002 gen_movl_TN_reg(rd, r_const);
2003 tcg_temp_free(r_const);
2004 }
2005 break;
2006 case 0x0: /* UNIMPL */
2007 default:
2008 goto illegal_insn;
2009 }
2010 break;
2011 }
2012 break;
2013 case 1:
2014 /*CALL*/ {
2015 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2016 TCGv r_const;
2017
2018 r_const = tcg_const_tl(dc->pc);
2019 gen_movl_TN_reg(15, r_const);
2020 tcg_temp_free(r_const);
2021 target += dc->pc;
2022 gen_mov_pc_npc(dc, cpu_cond);
2023 dc->npc = target;
2024 }
2025 goto jmp_insn;
2026 case 2: /* FPU & Logical Operations */
2027 {
2028 unsigned int xop = GET_FIELD(insn, 7, 12);
2029 if (xop == 0x3a) { /* generate trap */
2030 int cond;
2031
2032 cpu_src1 = get_src1(insn, cpu_src1);
2033 if (IS_IMM) {
2034 rs2 = GET_FIELD(insn, 25, 31);
2035 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2036 } else {
2037 rs2 = GET_FIELD(insn, 27, 31);
2038 if (rs2 != 0) {
2039 gen_movl_reg_TN(rs2, cpu_src2);
2040 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2041 } else
2042 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2043 }
2044 cond = GET_FIELD(insn, 3, 6);
2045 if (cond == 0x8) {
2046 save_state(dc, cpu_cond);
2047 tcg_gen_helper_0_1(helper_trap, cpu_dst);
2048 } else if (cond != 0) {
2049 TCGv r_cond = tcg_temp_new(TCG_TYPE_TL);
2050 #ifdef TARGET_SPARC64
2051 /* V9 icc/xcc */
2052 int cc = GET_FIELD_SP(insn, 11, 12);
2053
2054 save_state(dc, cpu_cond);
2055 if (cc == 0)
2056 gen_cond(r_cond, 0, cond);
2057 else if (cc == 2)
2058 gen_cond(r_cond, 1, cond);
2059 else
2060 goto illegal_insn;
2061 #else
2062 save_state(dc, cpu_cond);
2063 gen_cond(r_cond, 0, cond);
2064 #endif
2065 tcg_gen_helper_0_2(helper_trapcc, cpu_dst, r_cond);
2066 tcg_temp_free(r_cond);
2067 }
2068 gen_op_next_insn();
2069 tcg_gen_exit_tb(0);
2070 dc->is_br = 1;
2071 goto jmp_insn;
2072 } else if (xop == 0x28) {
2073 rs1 = GET_FIELD(insn, 13, 17);
2074 switch(rs1) {
2075 case 0: /* rdy */
2076 #ifndef TARGET_SPARC64
2077 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2078 manual, rdy on the microSPARC
2079 II */
2080 case 0x0f: /* stbar in the SPARCv8 manual,
2081 rdy on the microSPARC II */
2082 case 0x10 ... 0x1f: /* implementation-dependent in the
2083 SPARCv8 manual, rdy on the
2084 microSPARC II */
2085 #endif
2086 gen_movl_TN_reg(rd, cpu_y);
2087 break;
2088 #ifdef TARGET_SPARC64
2089 case 0x2: /* V9 rdccr */
2090 tcg_gen_helper_1_0(helper_rdccr, cpu_dst);
2091 gen_movl_TN_reg(rd, cpu_dst);
2092 break;
2093 case 0x3: /* V9 rdasi */
2094 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2095 gen_movl_TN_reg(rd, cpu_dst);
2096 break;
2097 case 0x4: /* V9 rdtick */
2098 {
2099 TCGv r_tickptr;
2100
2101 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2102 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2103 offsetof(CPUState, tick));
2104 tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst,
2105 r_tickptr);
2106 tcg_temp_free(r_tickptr);
2107 gen_movl_TN_reg(rd, cpu_dst);
2108 }
2109 break;
2110 case 0x5: /* V9 rdpc */
2111 {
2112 TCGv r_const;
2113
2114 r_const = tcg_const_tl(dc->pc);
2115 gen_movl_TN_reg(rd, r_const);
2116 tcg_temp_free(r_const);
2117 }
2118 break;
2119 case 0x6: /* V9 rdfprs */
2120 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2121 gen_movl_TN_reg(rd, cpu_dst);
2122 break;
2123 case 0xf: /* V9 membar */
2124 break; /* no effect */
2125 case 0x13: /* Graphics Status */
2126 if (gen_trap_ifnofpu(dc, cpu_cond))
2127 goto jmp_insn;
2128 gen_movl_TN_reg(rd, cpu_gsr);
2129 break;
2130 case 0x17: /* Tick compare */
2131 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2132 break;
2133 case 0x18: /* System tick */
2134 {
2135 TCGv r_tickptr;
2136
2137 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2138 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2139 offsetof(CPUState, stick));
2140 tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst,
2141 r_tickptr);
2142 tcg_temp_free(r_tickptr);
2143 gen_movl_TN_reg(rd, cpu_dst);
2144 }
2145 break;
2146 case 0x19: /* System tick compare */
2147 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2148 break;
2149 case 0x10: /* Performance Control */
2150 case 0x11: /* Performance Instrumentation Counter */
2151 case 0x12: /* Dispatch Control */
2152 case 0x14: /* Softint set, WO */
2153 case 0x15: /* Softint clear, WO */
2154 case 0x16: /* Softint write */
2155 #endif
2156 default:
2157 goto illegal_insn;
2158 }
2159 #if !defined(CONFIG_USER_ONLY)
2160 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2161 #ifndef TARGET_SPARC64
2162 if (!supervisor(dc))
2163 goto priv_insn;
2164 tcg_gen_helper_1_0(helper_rdpsr, cpu_dst);
2165 #else
2166 CHECK_IU_FEATURE(dc, HYPV);
2167 if (!hypervisor(dc))
2168 goto priv_insn;
2169 rs1 = GET_FIELD(insn, 13, 17);
2170 switch (rs1) {
2171 case 0: // hpstate
2172 // gen_op_rdhpstate();
2173 break;
2174 case 1: // htstate
2175 // gen_op_rdhtstate();
2176 break;
2177 case 3: // hintp
2178 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2179 break;
2180 case 5: // htba
2181 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2182 break;
2183 case 6: // hver
2184 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2185 break;
2186 case 31: // hstick_cmpr
2187 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2188 break;
2189 default:
2190 goto illegal_insn;
2191 }
2192 #endif
2193 gen_movl_TN_reg(rd, cpu_dst);
2194 break;
2195 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2196 if (!supervisor(dc))
2197 goto priv_insn;
2198 #ifdef TARGET_SPARC64
2199 rs1 = GET_FIELD(insn, 13, 17);
2200 switch (rs1) {
2201 case 0: // tpc
2202 {
2203 TCGv r_tsptr;
2204
2205 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2206 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2207 offsetof(CPUState, tsptr));
2208 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2209 offsetof(trap_state, tpc));
2210 tcg_temp_free(r_tsptr);
2211 }
2212 break;
2213 case 1: // tnpc
2214 {
2215 TCGv r_tsptr;
2216
2217 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2218 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2219 offsetof(CPUState, tsptr));
2220 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2221 offsetof(trap_state, tnpc));
2222 tcg_temp_free(r_tsptr);
2223 }
2224 break;
2225 case 2: // tstate
2226 {
2227 TCGv r_tsptr;
2228
2229 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2230 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2231 offsetof(CPUState, tsptr));
2232 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2233 offsetof(trap_state, tstate));
2234 tcg_temp_free(r_tsptr);
2235 }
2236 break;
2237 case 3: // tt
2238 {
2239 TCGv r_tsptr;
2240
2241 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2242 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2243 offsetof(CPUState, tsptr));
2244 tcg_gen_ld_i32(cpu_tmp0, r_tsptr,
2245 offsetof(trap_state, tt));
2246 tcg_temp_free(r_tsptr);
2247 }
2248 break;
2249 case 4: // tick
2250 {
2251 TCGv r_tickptr;
2252
2253 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2254 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2255 offsetof(CPUState, tick));
2256 tcg_gen_helper_1_1(helper_tick_get_count, cpu_tmp0,
2257 r_tickptr);
2258 gen_movl_TN_reg(rd, cpu_tmp0);
2259 tcg_temp_free(r_tickptr);
2260 }
2261 break;
2262 case 5: // tba
2263 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2264 break;
2265 case 6: // pstate
2266 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2267 offsetof(CPUSPARCState, pstate));
2268 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2269 break;
2270 case 7: // tl
2271 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2272 offsetof(CPUSPARCState, tl));
2273 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2274 break;
2275 case 8: // pil
2276 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2277 offsetof(CPUSPARCState, psrpil));
2278 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2279 break;
2280 case 9: // cwp
2281 tcg_gen_helper_1_0(helper_rdcwp, cpu_tmp0);
2282 break;
2283 case 10: // cansave
2284 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2285 offsetof(CPUSPARCState, cansave));
2286 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2287 break;
2288 case 11: // canrestore
2289 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2290 offsetof(CPUSPARCState, canrestore));
2291 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2292 break;
2293 case 12: // cleanwin
2294 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2295 offsetof(CPUSPARCState, cleanwin));
2296 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2297 break;
2298 case 13: // otherwin
2299 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2300 offsetof(CPUSPARCState, otherwin));
2301 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2302 break;
2303 case 14: // wstate
2304 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2305 offsetof(CPUSPARCState, wstate));
2306 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2307 break;
2308 case 16: // UA2005 gl
2309 CHECK_IU_FEATURE(dc, GL);
2310 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2311 offsetof(CPUSPARCState, gl));
2312 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2313 break;
2314 case 26: // UA2005 strand status
2315 CHECK_IU_FEATURE(dc, HYPV);
2316 if (!hypervisor(dc))
2317 goto priv_insn;
2318 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_ssr);
2319 break;
2320 case 31: // ver
2321 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2322 break;
2323 case 15: // fq
2324 default:
2325 goto illegal_insn;
2326 }
2327 #else
2328 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2329 #endif
2330 gen_movl_TN_reg(rd, cpu_tmp0);
2331 break;
2332 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2333 #ifdef TARGET_SPARC64
2334 save_state(dc, cpu_cond);
2335 tcg_gen_helper_0_0(helper_flushw);
2336 #else
2337 if (!supervisor(dc))
2338 goto priv_insn;
2339 gen_movl_TN_reg(rd, cpu_tbr);
2340 #endif
2341 break;
2342 #endif
2343 } else if (xop == 0x34) { /* FPU Operations */
2344 if (gen_trap_ifnofpu(dc, cpu_cond))
2345 goto jmp_insn;
2346 gen_op_clear_ieee_excp_and_FTT();
2347 rs1 = GET_FIELD(insn, 13, 17);
2348 rs2 = GET_FIELD(insn, 27, 31);
2349 xop = GET_FIELD(insn, 18, 26);
2350 switch (xop) {
2351 case 0x1: /* fmovs */
2352 gen_op_load_fpr_FT0(rs2);
2353 gen_op_store_FT0_fpr(rd);
2354 break;
2355 case 0x5: /* fnegs */
2356 gen_op_load_fpr_FT1(rs2);
2357 tcg_gen_helper_0_0(helper_fnegs);
2358 gen_op_store_FT0_fpr(rd);
2359 break;
2360 case 0x9: /* fabss */
2361 gen_op_load_fpr_FT1(rs2);
2362 tcg_gen_helper_0_0(helper_fabss);
2363 gen_op_store_FT0_fpr(rd);
2364 break;
2365 case 0x29: /* fsqrts */
2366 CHECK_FPU_FEATURE(dc, FSQRT);
2367 gen_op_load_fpr_FT1(rs2);
2368 gen_clear_float_exceptions();
2369 tcg_gen_helper_0_0(helper_fsqrts);
2370 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2371 gen_op_store_FT0_fpr(rd);
2372 break;
2373 case 0x2a: /* fsqrtd */
2374 CHECK_FPU_FEATURE(dc, FSQRT);
2375 gen_op_load_fpr_DT1(DFPREG(rs2));
2376 gen_clear_float_exceptions();
2377 tcg_gen_helper_0_0(helper_fsqrtd);
2378 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2379 gen_op_store_DT0_fpr(DFPREG(rd));
2380 break;
2381 case 0x2b: /* fsqrtq */
2382 CHECK_FPU_FEATURE(dc, FLOAT128);
2383 gen_op_load_fpr_QT1(QFPREG(rs2));
2384 gen_clear_float_exceptions();
2385 tcg_gen_helper_0_0(helper_fsqrtq);
2386 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2387 gen_op_store_QT0_fpr(QFPREG(rd));
2388 break;
2389 case 0x41:
2390 gen_op_load_fpr_FT0(rs1);
2391 gen_op_load_fpr_FT1(rs2);
2392 gen_clear_float_exceptions();
2393 tcg_gen_helper_0_0(helper_fadds);
2394 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2395 gen_op_store_FT0_fpr(rd);
2396 break;
2397 case 0x42:
2398 gen_op_load_fpr_DT0(DFPREG(rs1));
2399 gen_op_load_fpr_DT1(DFPREG(rs2));
2400 gen_clear_float_exceptions();
2401 tcg_gen_helper_0_0(helper_faddd);
2402 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2403 gen_op_store_DT0_fpr(DFPREG(rd));
2404 break;
2405 case 0x43: /* faddq */
2406 CHECK_FPU_FEATURE(dc, FLOAT128);
2407 gen_op_load_fpr_QT0(QFPREG(rs1));
2408 gen_op_load_fpr_QT1(QFPREG(rs2));
2409 gen_clear_float_exceptions();
2410 tcg_gen_helper_0_0(helper_faddq);
2411 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2412 gen_op_store_QT0_fpr(QFPREG(rd));
2413 break;
2414 case 0x45:
2415 gen_op_load_fpr_FT0(rs1);
2416 gen_op_load_fpr_FT1(rs2);
2417 gen_clear_float_exceptions();
2418 tcg_gen_helper_0_0(helper_fsubs);
2419 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2420 gen_op_store_FT0_fpr(rd);
2421 break;
2422 case 0x46:
2423 gen_op_load_fpr_DT0(DFPREG(rs1));
2424 gen_op_load_fpr_DT1(DFPREG(rs2));
2425 gen_clear_float_exceptions();
2426 tcg_gen_helper_0_0(helper_fsubd);
2427 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2428 gen_op_store_DT0_fpr(DFPREG(rd));
2429 break;
2430 case 0x47: /* fsubq */
2431 CHECK_FPU_FEATURE(dc, FLOAT128);
2432 gen_op_load_fpr_QT0(QFPREG(rs1));
2433 gen_op_load_fpr_QT1(QFPREG(rs2));
2434 gen_clear_float_exceptions();
2435 tcg_gen_helper_0_0(helper_fsubq);
2436 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2437 gen_op_store_QT0_fpr(QFPREG(rd));
2438 break;
2439 case 0x49: /* fmuls */
2440 CHECK_FPU_FEATURE(dc, FMUL);
2441 gen_op_load_fpr_FT0(rs1);
2442 gen_op_load_fpr_FT1(rs2);
2443 gen_clear_float_exceptions();
2444 tcg_gen_helper_0_0(helper_fmuls);
2445 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2446 gen_op_store_FT0_fpr(rd);
2447 break;
2448 case 0x4a: /* fmuld */
2449 CHECK_FPU_FEATURE(dc, FMUL);
2450 gen_op_load_fpr_DT0(DFPREG(rs1));
2451 gen_op_load_fpr_DT1(DFPREG(rs2));
2452 gen_clear_float_exceptions();
2453 tcg_gen_helper_0_0(helper_fmuld);
2454 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2455 gen_op_store_DT0_fpr(DFPREG(rd));
2456 break;
2457 case 0x4b: /* fmulq */
2458 CHECK_FPU_FEATURE(dc, FLOAT128);
2459 CHECK_FPU_FEATURE(dc, FMUL);
2460 gen_op_load_fpr_QT0(QFPREG(rs1));
2461 gen_op_load_fpr_QT1(QFPREG(rs2));
2462 gen_clear_float_exceptions();
2463 tcg_gen_helper_0_0(helper_fmulq);
2464 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2465 gen_op_store_QT0_fpr(QFPREG(rd));
2466 break;
2467 case 0x4d:
2468 gen_op_load_fpr_FT0(rs1);
2469 gen_op_load_fpr_FT1(rs2);
2470 gen_clear_float_exceptions();
2471 tcg_gen_helper_0_0(helper_fdivs);
2472 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2473 gen_op_store_FT0_fpr(rd);
2474 break;
2475 case 0x4e:
2476 gen_op_load_fpr_DT0(DFPREG(rs1));
2477 gen_op_load_fpr_DT1(DFPREG(rs2));
2478 gen_clear_float_exceptions();
2479 tcg_gen_helper_0_0(helper_fdivd);
2480 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2481 gen_op_store_DT0_fpr(DFPREG(rd));
2482 break;
2483 case 0x4f: /* fdivq */
2484 CHECK_FPU_FEATURE(dc, FLOAT128);
2485 gen_op_load_fpr_QT0(QFPREG(rs1));
2486 gen_op_load_fpr_QT1(QFPREG(rs2));
2487 gen_clear_float_exceptions();
2488 tcg_gen_helper_0_0(helper_fdivq);
2489 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2490 gen_op_store_QT0_fpr(QFPREG(rd));
2491 break;
2492 case 0x69:
2493 CHECK_FPU_FEATURE(dc, FSMULD);
2494 gen_op_load_fpr_FT0(rs1);
2495 gen_op_load_fpr_FT1(rs2);
2496 gen_clear_float_exceptions();
2497 tcg_gen_helper_0_0(helper_fsmuld);
2498 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2499 gen_op_store_DT0_fpr(DFPREG(rd));
2500 break;
2501 case 0x6e: /* fdmulq */
2502 CHECK_FPU_FEATURE(dc, FLOAT128);
2503 gen_op_load_fpr_DT0(DFPREG(rs1));
2504 gen_op_load_fpr_DT1(DFPREG(rs2));
2505 gen_clear_float_exceptions();
2506 tcg_gen_helper_0_0(helper_fdmulq);
2507 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2508 gen_op_store_QT0_fpr(QFPREG(rd));
2509 break;
2510 case 0xc4:
2511 gen_op_load_fpr_FT1(rs2);
2512 gen_clear_float_exceptions();
2513 tcg_gen_helper_0_0(helper_fitos);
2514 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2515 gen_op_store_FT0_fpr(rd);
2516 break;
2517 case 0xc6:
2518 gen_op_load_fpr_DT1(DFPREG(rs2));
2519 gen_clear_float_exceptions();
2520 tcg_gen_helper_0_0(helper_fdtos);
2521 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2522 gen_op_store_FT0_fpr(rd);
2523 break;
2524 case 0xc7: /* fqtos */
2525 CHECK_FPU_FEATURE(dc, FLOAT128);
2526 gen_op_load_fpr_QT1(QFPREG(rs2));
2527 gen_clear_float_exceptions();
2528 tcg_gen_helper_0_0(helper_fqtos);
2529 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2530 gen_op_store_FT0_fpr(rd);
2531 break;
2532 case 0xc8:
2533 gen_op_load_fpr_FT1(rs2);
2534 tcg_gen_helper_0_0(helper_fitod);
2535 gen_op_store_DT0_fpr(DFPREG(rd));
2536 break;
2537 case 0xc9:
2538 gen_op_load_fpr_FT1(rs2);
2539 tcg_gen_helper_0_0(helper_fstod);
2540 gen_op_store_DT0_fpr(DFPREG(rd));
2541 break;
2542 case 0xcb: /* fqtod */
2543 CHECK_FPU_FEATURE(dc, FLOAT128);
2544 gen_op_load_fpr_QT1(QFPREG(rs2));
2545 gen_clear_float_exceptions();
2546 tcg_gen_helper_0_0(helper_fqtod);
2547 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2548 gen_op_store_DT0_fpr(DFPREG(rd));
2549 break;
2550 case 0xcc: /* fitoq */
2551 CHECK_FPU_FEATURE(dc, FLOAT128);
2552 gen_op_load_fpr_FT1(rs2);
2553 tcg_gen_helper_0_0(helper_fitoq);
2554 gen_op_store_QT0_fpr(QFPREG(rd));
2555 break;
2556 case 0xcd: /* fstoq */
2557 CHECK_FPU_FEATURE(dc, FLOAT128);
2558 gen_op_load_fpr_FT1(rs2);
2559 tcg_gen_helper_0_0(helper_fstoq);
2560 gen_op_store_QT0_fpr(QFPREG(rd));
2561 break;
2562 case 0xce: /* fdtoq */
2563 CHECK_FPU_FEATURE(dc, FLOAT128);
2564 gen_op_load_fpr_DT1(DFPREG(rs2));
2565 tcg_gen_helper_0_0(helper_fdtoq);
2566 gen_op_store_QT0_fpr(QFPREG(rd));
2567 break;
2568 case 0xd1:
2569 gen_op_load_fpr_FT1(rs2);
2570 gen_clear_float_exceptions();
2571 tcg_gen_helper_0_0(helper_fstoi);
2572 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2573 gen_op_store_FT0_fpr(rd);
2574 break;
2575 case 0xd2:
2576 gen_op_load_fpr_DT1(DFPREG(rs2));
2577 gen_clear_float_exceptions();
2578 tcg_gen_helper_0_0(helper_fdtoi);
2579 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2580 gen_op_store_FT0_fpr(rd);
2581 break;
2582 case 0xd3: /* fqtoi */
2583 CHECK_FPU_FEATURE(dc, FLOAT128);
2584 gen_op_load_fpr_QT1(QFPREG(rs2));
2585 gen_clear_float_exceptions();
2586 tcg_gen_helper_0_0(helper_fqtoi);
2587 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2588 gen_op_store_FT0_fpr(rd);
2589 break;
2590 #ifdef TARGET_SPARC64
2591 case 0x2: /* V9 fmovd */
2592 gen_op_load_fpr_DT0(DFPREG(rs2));
2593 gen_op_store_DT0_fpr(DFPREG(rd));
2594 break;
2595 case 0x3: /* V9 fmovq */
2596 CHECK_FPU_FEATURE(dc, FLOAT128);
2597 gen_op_load_fpr_QT0(QFPREG(rs2));
2598 gen_op_store_QT0_fpr(QFPREG(rd));
2599 break;
2600 case 0x6: /* V9 fnegd */
2601 gen_op_load_fpr_DT1(DFPREG(rs2));
2602 tcg_gen_helper_0_0(helper_fnegd);
2603 gen_op_store_DT0_fpr(DFPREG(rd));
2604 break;
2605 case 0x7: /* V9 fnegq */
2606 CHECK_FPU_FEATURE(dc, FLOAT128);
2607 gen_op_load_fpr_QT1(QFPREG(rs2));
2608 tcg_gen_helper_0_0(helper_fnegq);
2609 gen_op_store_QT0_fpr(QFPREG(rd));
2610 break;
2611 case 0xa: /* V9 fabsd */
2612 gen_op_load_fpr_DT1(DFPREG(rs2));
2613 tcg_gen_helper_0_0(helper_fabsd);
2614 gen_op_store_DT0_fpr(DFPREG(rd));
2615 break;
2616 case 0xb: /* V9 fabsq */
2617 CHECK_FPU_FEATURE(dc, FLOAT128);
2618 gen_op_load_fpr_QT1(QFPREG(rs2));
2619 tcg_gen_helper_0_0(helper_fabsq);
2620 gen_op_store_QT0_fpr(QFPREG(rd));
2621 break;
2622 case 0x81: /* V9 fstox */
2623 gen_op_load_fpr_FT1(rs2);
2624 gen_clear_float_exceptions();
2625 tcg_gen_helper_0_0(helper_fstox);
2626 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2627 gen_op_store_DT0_fpr(DFPREG(rd));
2628 break;
2629 case 0x82: /* V9 fdtox */
2630 gen_op_load_fpr_DT1(DFPREG(rs2));
2631 gen_clear_float_exceptions();
2632 tcg_gen_helper_0_0(helper_fdtox);
2633 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2634 gen_op_store_DT0_fpr(DFPREG(rd));
2635 break;
2636 case 0x83: /* V9 fqtox */
2637 CHECK_FPU_FEATURE(dc, FLOAT128);
2638 gen_op_load_fpr_QT1(QFPREG(rs2));
2639 gen_clear_float_exceptions();
2640 tcg_gen_helper_0_0(helper_fqtox);
2641 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2642 gen_op_store_DT0_fpr(DFPREG(rd));
2643 break;
2644 case 0x84: /* V9 fxtos */
2645 gen_op_load_fpr_DT1(DFPREG(rs2));
2646 gen_clear_float_exceptions();
2647 tcg_gen_helper_0_0(helper_fxtos);
2648 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2649 gen_op_store_FT0_fpr(rd);
2650 break;
2651 case 0x88: /* V9 fxtod */
2652 gen_op_load_fpr_DT1(DFPREG(rs2));
2653 gen_clear_float_exceptions();
2654 tcg_gen_helper_0_0(helper_fxtod);
2655 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2656 gen_op_store_DT0_fpr(DFPREG(rd));
2657 break;
2658 case 0x8c: /* V9 fxtoq */
2659 CHECK_FPU_FEATURE(dc, FLOAT128);
2660 gen_op_load_fpr_DT1(DFPREG(rs2));
2661 gen_clear_float_exceptions();
2662 tcg_gen_helper_0_0(helper_fxtoq);
2663 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2664 gen_op_store_QT0_fpr(QFPREG(rd));
2665 break;
2666 #endif
2667 default:
2668 goto illegal_insn;
2669 }
2670 } else if (xop == 0x35) { /* FPU Operations */
2671 #ifdef TARGET_SPARC64
2672 int cond;
2673 #endif
2674 if (gen_trap_ifnofpu(dc, cpu_cond))
2675 goto jmp_insn;
2676 gen_op_clear_ieee_excp_and_FTT();
2677 rs1 = GET_FIELD(insn, 13, 17);
2678 rs2 = GET_FIELD(insn, 27, 31);
2679 xop = GET_FIELD(insn, 18, 26);
2680 #ifdef TARGET_SPARC64
2681 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2682 int l1;
2683
2684 l1 = gen_new_label();
2685 cond = GET_FIELD_SP(insn, 14, 17);
2686 cpu_src1 = get_src1(insn, cpu_src1);
2687 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2688 0, l1);
2689 gen_op_load_fpr_FT0(rs2);
2690 gen_op_store_FT0_fpr(rd);
2691 gen_set_label(l1);
2692 break;
2693 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2694 int l1;
2695
2696 l1 = gen_new_label();
2697 cond = GET_FIELD_SP(insn, 14, 17);
2698 cpu_src1 = get_src1(insn, cpu_src1);
2699 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2700 0, l1);
2701 gen_op_load_fpr_DT0(DFPREG(rs2));
2702 gen_op_store_DT0_fpr(DFPREG(rd));
2703 gen_set_label(l1);
2704 break;
2705 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2706 int l1;
2707
2708 CHECK_FPU_FEATURE(dc, FLOAT128);
2709 l1 = gen_new_label();
2710 cond = GET_FIELD_SP(insn, 14, 17);
2711 cpu_src1 = get_src1(insn, cpu_src1);
2712 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2713 0, l1);
2714 gen_op_load_fpr_QT0(QFPREG(rs2));
2715 gen_op_store_QT0_fpr(QFPREG(rd));
2716 gen_set_label(l1);
2717 break;
2718 }
2719 #endif
2720 switch (xop) {
2721 #ifdef TARGET_SPARC64
2722 #define FMOVCC(size_FDQ, fcc) \
2723 { \
2724 TCGv r_cond; \
2725 int l1; \
2726 \
2727 l1 = gen_new_label(); \
2728 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2729 cond = GET_FIELD_SP(insn, 14, 17); \
2730 gen_fcond(r_cond, fcc, cond); \
2731 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2732 0, l1); \
2733 glue(glue(gen_op_load_fpr_, size_FDQ), T0) \
2734 (glue(size_FDQ, FPREG(rs2))); \
2735 glue(glue(gen_op_store_, size_FDQ), T0_fpr) \
2736 (glue(size_FDQ, FPREG(rd))); \
2737 gen_set_label(l1); \
2738 tcg_temp_free(r_cond); \
2739 }
2740 case 0x001: /* V9 fmovscc %fcc0 */
2741 FMOVCC(F, 0);
2742 break;
2743 case 0x002: /* V9 fmovdcc %fcc0 */
2744 FMOVCC(D, 0);
2745 break;
2746 case 0x003: /* V9 fmovqcc %fcc0 */
2747 CHECK_FPU_FEATURE(dc, FLOAT128);
2748 FMOVCC(Q, 0);
2749 break;
2750 case 0x041: /* V9 fmovscc %fcc1 */
2751 FMOVCC(F, 1);
2752 break;
2753 case 0x042: /* V9 fmovdcc %fcc1 */
2754 FMOVCC(D, 1);
2755 break;
2756 case 0x043: /* V9 fmovqcc %fcc1 */
2757 CHECK_FPU_FEATURE(dc, FLOAT128);
2758 FMOVCC(Q, 1);
2759 break;
2760 case 0x081: /* V9 fmovscc %fcc2 */
2761 FMOVCC(F, 2);
2762 break;
2763 case 0x082: /* V9 fmovdcc %fcc2 */
2764 FMOVCC(D, 2);
2765 break;
2766 case 0x083: /* V9 fmovqcc %fcc2 */
2767 CHECK_FPU_FEATURE(dc, FLOAT128);
2768 FMOVCC(Q, 2);
2769 break;
2770 case 0x0c1: /* V9 fmovscc %fcc3 */
2771 FMOVCC(F, 3);
2772 break;
2773 case 0x0c2: /* V9 fmovdcc %fcc3 */
2774 FMOVCC(D, 3);
2775 break;
2776 case 0x0c3: /* V9 fmovqcc %fcc3 */
2777 CHECK_FPU_FEATURE(dc, FLOAT128);
2778 FMOVCC(Q, 3);
2779 break;
2780 #undef FMOVCC
2781 #define FMOVCC(size_FDQ, icc) \
2782 { \
2783 TCGv r_cond; \
2784 int l1; \
2785 \
2786 l1 = gen_new_label(); \
2787 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2788 cond = GET_FIELD_SP(insn, 14, 17); \
2789 gen_cond(r_cond, icc, cond); \
2790 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2791 0, l1); \
2792 glue(glue(gen_op_load_fpr_, size_FDQ), T0) \
2793 (glue(size_FDQ, FPREG(rs2))); \
2794 glue(glue(gen_op_store_, size_FDQ), T0_fpr) \
2795 (glue(size_FDQ, FPREG(rd))); \
2796 gen_set_label(l1); \
2797 tcg_temp_free(r_cond); \
2798 }
2799
2800 case 0x101: /* V9 fmovscc %icc */
2801 FMOVCC(F, 0);
2802 break;
2803 case 0x102: /* V9 fmovdcc %icc */
2804 FMOVCC(D, 0);
2805 case 0x103: /* V9 fmovqcc %icc */
2806 CHECK_FPU_FEATURE(dc, FLOAT128);
2807 FMOVCC(Q, 0);
2808 break;
2809 case 0x181: /* V9 fmovscc %xcc */
2810 FMOVCC(F, 1);
2811 break;
2812 case 0x182: /* V9 fmovdcc %xcc */
2813 FMOVCC(D, 1);
2814 break;
2815 case 0x183: /* V9 fmovqcc %xcc */
2816 CHECK_FPU_FEATURE(dc, FLOAT128);
2817 FMOVCC(Q, 1);
2818 break;
2819 #undef FMOVCC
2820 #endif
2821 case 0x51: /* fcmps, V9 %fcc */
2822 gen_op_load_fpr_FT0(rs1);
2823 gen_op_load_fpr_FT1(rs2);
2824 gen_op_fcmps(rd & 3);
2825 break;
2826 case 0x52: /* fcmpd, V9 %fcc */
2827 gen_op_load_fpr_DT0(DFPREG(rs1));
2828 gen_op_load_fpr_DT1(DFPREG(rs2));
2829 gen_op_fcmpd(rd & 3);
2830 break;
2831 case 0x53: /* fcmpq, V9 %fcc */
2832 CHECK_FPU_FEATURE(dc, FLOAT128);
2833 gen_op_load_fpr_QT0(QFPREG(rs1));
2834 gen_op_load_fpr_QT1(QFPREG(rs2));
2835 gen_op_fcmpq(rd & 3);
2836 break;
2837 case 0x55: /* fcmpes, V9 %fcc */
2838 gen_op_load_fpr_FT0(rs1);
2839 gen_op_load_fpr_FT1(rs2);
2840 gen_op_fcmpes(rd & 3);
2841 break;
2842 case 0x56: /* fcmped, V9 %fcc */
2843 gen_op_load_fpr_DT0(DFPREG(rs1));
2844 gen_op_load_fpr_DT1(DFPREG(rs2));
2845 gen_op_fcmped(rd & 3);
2846 break;
2847 case 0x57: /* fcmpeq, V9 %fcc */
2848 CHECK_FPU_FEATURE(dc, FLOAT128);
2849 gen_op_load_fpr_QT0(QFPREG(rs1));
2850 gen_op_load_fpr_QT1(QFPREG(rs2));
2851 gen_op_fcmpeq(rd & 3);
2852 break;
2853 default:
2854 goto illegal_insn;
2855 }
2856 } else if (xop == 0x2) {
2857 // clr/mov shortcut
2858
2859 rs1 = GET_FIELD(insn, 13, 17);
2860 if (rs1 == 0) {
2861 // or %g0, x, y -> mov T0, x; mov y, T0
2862 if (IS_IMM) { /* immediate */
2863 TCGv r_const;
2864
2865 rs2 = GET_FIELDs(insn, 19, 31);
2866 r_const = tcg_const_tl((int)rs2);
2867 gen_movl_TN_reg(rd, r_const);
2868 tcg_temp_free(r_const);
2869 } else { /* register */
2870 rs2 = GET_FIELD(insn, 27, 31);
2871 gen_movl_reg_TN(rs2, cpu_dst);
2872 gen_movl_TN_reg(rd, cpu_dst);
2873 }
2874 } else {
2875 cpu_src1 = get_src1(insn, cpu_src1);
2876 if (IS_IMM) { /* immediate */
2877 rs2 = GET_FIELDs(insn, 19, 31);
2878 tcg_gen_ori_tl(cpu_dst, cpu_src1, (int)rs2);
2879 gen_movl_TN_reg(rd, cpu_dst);
2880 } else { /* register */
2881 // or x, %g0, y -> mov T1, x; mov y, T1
2882 rs2 = GET_FIELD(insn, 27, 31);
2883 if (rs2 != 0) {
2884 gen_movl_reg_TN(rs2, cpu_src2);
2885 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2886 gen_movl_TN_reg(rd, cpu_dst);
2887 } else
2888 gen_movl_TN_reg(rd, cpu_src1);
2889 }
2890 }
2891 #ifdef TARGET_SPARC64
2892 } else if (xop == 0x25) { /* sll, V9 sllx */
2893 cpu_src1 = get_src1(insn, cpu_src1);
2894 if (IS_IMM) { /* immediate */
2895 rs2 = GET_FIELDs(insn, 20, 31);
2896 if (insn & (1 << 12)) {
2897 tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
2898 } else {
2899 tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x1f);
2900 }
2901 } else { /* register */
2902 rs2 = GET_FIELD(insn, 27, 31);
2903 gen_movl_reg_TN(rs2, cpu_src2);
2904 if (insn & (1 << 12)) {
2905 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2906 } else {
2907 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2908 }
2909 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2910 }
2911 gen_movl_TN_reg(rd, cpu_dst);
2912 } else if (xop == 0x26) { /* srl, V9 srlx */
2913 cpu_src1 = get_src1(insn, cpu_src1);
2914 if (IS_IMM) { /* immediate */
2915 rs2 = GET_FIELDs(insn, 20, 31);
2916 if (insn & (1 << 12)) {
2917 tcg_gen_shri_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
2918 } else {
2919 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2920 tcg_gen_shri_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
2921 }
2922 } else { /* register */
2923 rs2 = GET_FIELD(insn, 27, 31);
2924 gen_movl_reg_TN(rs2, cpu_src2);
2925 if (insn & (1 << 12)) {
2926 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2927 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2928 } else {
2929 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2930 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2931 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2932 }
2933 }
2934 gen_movl_TN_reg(rd, cpu_dst);
2935 } else if (xop == 0x27) { /* sra, V9 srax */
2936 cpu_src1 = get_src1(insn, cpu_src1);
2937 if (IS_IMM) { /* immediate */
2938 rs2 = GET_FIELDs(insn, 20, 31);
2939 if (insn & (1 << 12)) {
2940 tcg_gen_sari_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
2941 } else {
2942 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2943 tcg_gen_ext_i32_i64(cpu_dst, cpu_dst);
2944 tcg_gen_sari_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
2945 }
2946 } else { /* register */
2947 rs2 = GET_FIELD(insn, 27, 31);
2948 gen_movl_reg_TN(rs2, cpu_src2);
2949 if (insn & (1 << 12)) {
2950 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2951 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
2952 } else {
2953 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2954 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2955 tcg_gen_ext_i32_i64(cpu_dst, cpu_dst);
2956 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
2957 }
2958 }
2959 gen_movl_TN_reg(rd, cpu_dst);
2960 #endif
2961 } else if (xop < 0x36) {
2962 cpu_src1 = get_src1(insn, cpu_src1);
2963 cpu_src2 = get_src2(insn, cpu_src2);
2964 if (xop < 0x20) {
2965 switch (xop & ~0x10) {
2966 case 0x0:
2967 if (xop & 0x10)
2968 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
2969 else
2970 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2971 break;
2972 case 0x1:
2973 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
2974 if (xop & 0x10)
2975 gen_op_logic_cc(cpu_dst);
2976 break;
2977 case 0x2:
2978 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2979 if (xop & 0x10)
2980 gen_op_logic_cc(cpu_dst);
2981 break;
2982 case 0x3:
2983 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
2984 if (xop & 0x10)
2985 gen_op_logic_cc(cpu_dst);
2986 break;
2987 case 0x4:
2988 if (xop & 0x10)
2989 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
2990 else
2991 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
2992 break;
2993 case 0x5:
2994 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
2995 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_tmp0);
2996 if (xop & 0x10)
2997 gen_op_logic_cc(cpu_dst);
2998 break;
2999 case 0x6:
3000 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3001 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_tmp0);
3002 if (xop & 0x10)
3003 gen_op_logic_cc(cpu_dst);
3004 break;
3005 case 0x7:
3006 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3007 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3008 if (xop & 0x10)
3009 gen_op_logic_cc(cpu_dst);
3010 break;
3011 case 0x8:
3012 if (xop & 0x10)
3013 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3014 else {
3015 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3016 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3017 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3018 }
3019 break;
3020 #ifdef TARGET_SPARC64
3021 case 0x9: /* V9 mulx */
3022 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3023 break;
3024 #endif
3025 case 0xa:
3026 CHECK_IU_FEATURE(dc, MUL);
3027 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3028 if (xop & 0x10)
3029 gen_op_logic_cc(cpu_dst);
3030 break;
3031 case 0xb:
3032 CHECK_IU_FEATURE(dc, MUL);
3033 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3034 if (xop & 0x10)
3035 gen_op_logic_cc(cpu_dst);
3036 break;
3037 case 0xc:
3038 if (xop & 0x10)
3039 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3040 else {
3041 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3042 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3043 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3044 }
3045 break;
3046 #ifdef TARGET_SPARC64
3047 case 0xd: /* V9 udivx */
3048 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3049 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3050 gen_trap_ifdivzero_tl(cpu_cc_src2);
3051 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3052 break;
3053 #endif
3054 case 0xe:
3055 CHECK_IU_FEATURE(dc, DIV);
3056 tcg_gen_helper_1_2(helper_udiv, cpu_dst, cpu_src1,
3057 cpu_src2);
3058 if (xop & 0x10)
3059 gen_op_div_cc(cpu_dst);
3060 break;
3061 case 0xf:
3062 CHECK_IU_FEATURE(dc, DIV);
3063 tcg_gen_helper_1_2(helper_sdiv, cpu_dst, cpu_src1,
3064 cpu_src2);
3065 if (xop & 0x10)
3066 gen_op_div_cc(cpu_dst);
3067 break;
3068 default:
3069 goto illegal_insn;
3070 }
3071 gen_movl_TN_reg(rd, cpu_dst);
3072 } else {
3073 switch (xop) {
3074 case 0x20: /* taddcc */
3075 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3076 gen_movl_TN_reg(rd, cpu_dst);
3077 break;
3078 case 0x21: /* tsubcc */
3079 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3080 gen_movl_TN_reg(rd, cpu_dst);
3081 break;
3082 case 0x22: /* taddcctv */
3083 save_state(dc, cpu_cond);
3084 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3085 gen_movl_TN_reg(rd, cpu_dst);
3086 break;
3087 case 0x23: /* tsubcctv */
3088 save_state(dc, cpu_cond);
3089 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3090 gen_movl_TN_reg(rd, cpu_dst);
3091 break;
3092 case 0x24: /* mulscc */
3093 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3094 gen_movl_TN_reg(rd, cpu_dst);
3095 break;
3096 #ifndef TARGET_SPARC64
3097 case 0x25: /* sll */
3098 if (IS_IMM) { /* immediate */
3099 rs2 = GET_FIELDs(insn, 20, 31);
3100 tcg_gen_shli_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3101 } else { /* register */
3102 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3103 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3104 }
3105 gen_movl_TN_reg(rd, cpu_dst);
3106 break;
3107 case 0x26: /* srl */
3108 if (IS_IMM) { /* immediate */
3109 rs2 = GET_FIELDs(insn, 20, 31);
3110 tcg_gen_shri_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3111 } else { /* register */
3112 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3113 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3114 }
3115 gen_movl_TN_reg(rd, cpu_dst);
3116 break;
3117 case 0x27: /* sra */
3118 if (IS_IMM) { /* immediate */
3119 rs2 = GET_FIELDs(insn, 20, 31);
3120 tcg_gen_sari_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3121 } else { /* register */
3122 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3123 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3124 }
3125 gen_movl_TN_reg(rd, cpu_dst);
3126 break;
3127 #endif
3128 case 0x30:
3129 {
3130 switch(rd) {
3131 case 0: /* wry */
3132 tcg_gen_xor_tl(cpu_y, cpu_src1, cpu_src2);
3133 break;
3134 #ifndef TARGET_SPARC64
3135 case 0x01 ... 0x0f: /* undefined in the
3136 SPARCv8 manual, nop
3137 on the microSPARC
3138 II */
3139 case 0x10 ... 0x1f: /* implementation-dependent
3140 in the SPARCv8
3141 manual, nop on the
3142 microSPARC II */
3143 break;
3144 #else
3145 case 0x2: /* V9 wrccr */
3146 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3147 tcg_gen_helper_0_1(helper_wrccr, cpu_dst);
3148 break;
3149 case 0x3: /* V9 wrasi */
3150 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3151 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3152 break;
3153 case 0x6: /* V9 wrfprs */
3154 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3155 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3156 save_state(dc, cpu_cond);
3157 gen_op_next_insn();
3158 tcg_gen_exit_tb(0);
3159 dc->is_br = 1;
3160 break;
3161 case 0xf: /* V9 sir, nop if user */
3162 #if !defined(CONFIG_USER_ONLY)
3163 if (supervisor(dc))
3164 ; // XXX
3165 #endif
3166 break;
3167 case 0x13: /* Graphics Status */
3168 if (gen_trap_ifnofpu(dc, cpu_cond))
3169 goto jmp_insn;
3170 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3171 break;
3172 case 0x17: /* Tick compare */
3173 #if !defined(CONFIG_USER_ONLY)
3174 if (!supervisor(dc))
3175 goto illegal_insn;
3176 #endif
3177 {
3178 TCGv r_tickptr;
3179
3180 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3181 cpu_src2);
3182 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3183 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3184 offsetof(CPUState, tick));
3185 tcg_gen_helper_0_2(helper_tick_set_limit,
3186 r_tickptr, cpu_tick_cmpr);
3187 tcg_temp_free(r_tickptr);
3188 }
3189 break;
3190 case 0x18: /* System tick */
3191 #if !defined(CONFIG_USER_ONLY)
3192 if (!supervisor(dc))
3193 goto illegal_insn;
3194 #endif
3195 {
3196 TCGv r_tickptr;
3197
3198 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3199 cpu_src2);
3200 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3201 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3202 offsetof(CPUState, stick));
3203 tcg_gen_helper_0_2(helper_tick_set_count,
3204 r_tickptr, cpu_dst);
3205 tcg_temp_free(r_tickptr);
3206 }
3207 break;
3208 case 0x19: /* System tick compare */
3209 #if !defined(CONFIG_USER_ONLY)
3210 if (!supervisor(dc))
3211 goto illegal_insn;
3212 #endif
3213 {
3214 TCGv r_tickptr;
3215
3216 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3217 cpu_src2);
3218 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3219 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3220 offsetof(CPUState, stick));
3221 tcg_gen_helper_0_2(helper_tick_set_limit,
3222 r_tickptr, cpu_stick_cmpr);
3223 tcg_temp_free(r_tickptr);
3224 }
3225 break;
3226
3227 case 0x10: /* Performance Control */
3228 case 0x11: /* Performance Instrumentation
3229 Counter */
3230 case 0x12: /* Dispatch Control */
3231 case 0x14: /* Softint set */
3232 case 0x15: /* Softint clear */
3233 case 0x16: /* Softint write */
3234 #endif
3235 default:
3236 goto illegal_insn;
3237 }
3238 }
3239 break;
3240 #if !defined(CONFIG_USER_ONLY)
3241 case 0x31: /* wrpsr, V9 saved, restored */
3242 {
3243 if (!supervisor(dc))
3244 goto priv_insn;
3245 #ifdef TARGET_SPARC64
3246 switch (rd) {
3247 case 0:
3248 tcg_gen_helper_0_0(helper_saved);
3249 break;
3250 case 1:
3251 tcg_gen_helper_0_0(helper_restored);
3252 break;
3253 case 2: /* UA2005 allclean */
3254 case 3: /* UA2005 otherw */
3255 case 4: /* UA2005 normalw */
3256 case 5: /* UA2005 invalw */
3257 // XXX
3258 default:
3259 goto illegal_insn;
3260 }
3261 #else
3262 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3263 tcg_gen_helper_0_1(helper_wrpsr, cpu_dst);
3264 save_state(dc, cpu_cond);
3265 gen_op_next_insn();
3266 tcg_gen_exit_tb(0);
3267 dc->is_br = 1;
3268 #endif
3269 }
3270 break;
3271 case 0x32: /* wrwim, V9 wrpr */
3272 {
3273 if (!supervisor(dc))
3274 goto priv_insn;
3275 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3276 #ifdef TARGET_SPARC64
3277 switch (rd) {
3278 case 0: // tpc
3279 {
3280 TCGv r_tsptr;
3281
3282 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3283 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3284 offsetof(CPUState, tsptr));
3285 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3286 offsetof(trap_state, tpc));
3287 tcg_temp_free(r_tsptr);
3288 }
3289 break;
3290 case 1: // tnpc
3291 {
3292 TCGv r_tsptr;
3293
3294 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3295 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3296 offsetof(CPUState, tsptr));
3297 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3298 offsetof(trap_state, tnpc));
3299 tcg_temp_free(r_tsptr);
3300 }
3301 break;
3302 case 2: // tstate
3303 {
3304 TCGv r_tsptr;
3305
3306 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3307 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3308 offsetof(CPUState, tsptr));
3309 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3310 offsetof(trap_state,
3311 tstate));
3312 tcg_temp_free(r_tsptr);
3313 }
3314 break;
3315 case 3: // tt
3316 {
3317 TCGv r_tsptr;
3318
3319 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3320 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3321 offsetof(CPUState, tsptr));
3322 tcg_gen_st_i32(cpu_tmp0, r_tsptr,
3323 offsetof(trap_state, tt));
3324 tcg_temp_free(r_tsptr);
3325 }
3326 break;
3327 case 4: // tick
3328 {
3329 TCGv r_tickptr;
3330
3331 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3332 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3333 offsetof(CPUState, tick));
3334 tcg_gen_helper_0_2(helper_tick_set_count,
3335 r_tickptr, cpu_tmp0);
3336 tcg_temp_free(r_tickptr);
3337 }
3338 break;
3339 case 5: // tba
3340 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3341 break;
3342 case 6: // pstate
3343 save_state(dc, cpu_cond);
3344 tcg_gen_helper_0_1(helper_wrpstate, cpu_tmp0);
3345 gen_op_next_insn();
3346 tcg_gen_exit_tb(0);
3347 dc->is_br = 1;
3348 break;
3349 case 7: // tl
3350 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3351 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3352 offsetof(CPUSPARCState, tl));
3353 break;
3354 case 8: // pil
3355 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3356 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3357 offsetof(CPUSPARCState,
3358 psrpil));
3359 break;
3360 case 9: // cwp
3361 tcg_gen_helper_0_1(helper_wrcwp, cpu_tmp0);
3362 break;
3363 case 10: // cansave
3364 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3365 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3366 offsetof(CPUSPARCState,
3367 cansave));
3368 break;
3369 case 11: // canrestore
3370 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3371 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3372 offsetof(CPUSPARCState,
3373 canrestore));
3374 break;
3375 case 12: // cleanwin
3376 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3377 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3378 offsetof(CPUSPARCState,
3379 cleanwin));
3380 break;
3381 case 13: // otherwin
3382 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3383 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3384 offsetof(CPUSPARCState,
3385 otherwin));
3386 break;
3387 case 14: // wstate
3388 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3389 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3390 offsetof(CPUSPARCState,
3391 wstate));
3392 break;
3393 case 16: // UA2005 gl
3394 CHECK_IU_FEATURE(dc, GL);
3395 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3396 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3397 offsetof(CPUSPARCState, gl));
3398 break;
3399 case 26: // UA2005 strand status
3400 CHECK_IU_FEATURE(dc, HYPV);
3401 if (!hypervisor(dc))
3402 goto priv_insn;
3403 tcg_gen_trunc_tl_i32(cpu_ssr, cpu_tmp0);
3404 break;
3405 default:
3406 goto illegal_insn;
3407 }
3408 #else
3409 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3410 if (dc->def->nwindows != 32)
3411 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3412 (1 << dc->def->nwindows) - 1);
3413 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3414 #endif
3415 }
3416 break;
3417 case 0x33: /* wrtbr, UA2005 wrhpr */
3418 {
3419 #ifndef TARGET_SPARC64
3420 if (!supervisor(dc))
3421 goto priv_insn;
3422 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3423 #else
3424 CHECK_IU_FEATURE(dc, HYPV);
3425 if (!hypervisor(dc))
3426 goto priv_insn;
3427 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3428 switch (rd) {
3429 case 0: // hpstate
3430 // XXX gen_op_wrhpstate();
3431 save_state(dc, cpu_cond);
3432 gen_op_next_insn();
3433 tcg_gen_exit_tb(0);
3434 dc->is_br = 1;
3435 break;
3436 case 1: // htstate
3437 // XXX gen_op_wrhtstate();
3438 break;
3439 case 3: // hintp
3440 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3441 break;
3442 case 5: // htba
3443 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3444 break;
3445 case 31: // hstick_cmpr
3446 {
3447 TCGv r_tickptr;
3448
3449 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3450 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3451 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3452 offsetof(CPUState, hstick));
3453 tcg_gen_helper_0_2(helper_tick_set_limit,
3454 r_tickptr, cpu_hstick_cmpr);
3455 tcg_temp_free(r_tickptr);
3456 }
3457 break;
3458 case 6: // hver readonly
3459 default:
3460 goto illegal_insn;
3461 }
3462 #endif
3463 }
3464 break;
3465 #endif
3466 #ifdef TARGET_SPARC64
3467 case 0x2c: /* V9 movcc */
3468 {
3469 int cc = GET_FIELD_SP(insn, 11, 12);
3470 int cond = GET_FIELD_SP(insn, 14, 17);
3471 TCGv r_cond;
3472 int l1;
3473
3474 r_cond = tcg_temp_new(TCG_TYPE_TL);
3475 if (insn & (1 << 18)) {
3476 if (cc == 0)
3477 gen_cond(r_cond, 0, cond);
3478 else if (cc == 2)
3479 gen_cond(r_cond, 1, cond);
3480 else
3481 goto illegal_insn;
3482 } else {
3483 gen_fcond(r_cond, cc, cond);
3484 }
3485
3486 l1 = gen_new_label();
3487
3488 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3489 if (IS_IMM) { /* immediate */
3490 TCGv r_const;
3491
3492 rs2 = GET_FIELD_SPs(insn, 0, 10);
3493 r_const = tcg_const_tl((int)rs2);
3494 gen_movl_TN_reg(rd, r_const);
3495 tcg_temp_free(r_const);
3496 } else {
3497 rs2 = GET_FIELD_SP(insn, 0, 4);
3498 gen_movl_reg_TN(rs2, cpu_tmp0);
3499 gen_movl_TN_reg(rd, cpu_tmp0);
3500 }
3501 gen_set_label(l1);
3502 tcg_temp_free(r_cond);
3503 break;
3504 }
3505 case 0x2d: /* V9 sdivx */
3506 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3507 gen_movl_TN_reg(rd, cpu_dst);
3508 break;
3509 case 0x2e: /* V9 popc */
3510 {
3511 cpu_src2 = get_src2(insn, cpu_src2);
3512 tcg_gen_helper_1_1(helper_popc, cpu_dst,
3513 cpu_src2);
3514 gen_movl_TN_reg(rd, cpu_dst);
3515 }
3516 case 0x2f: /* V9 movr */
3517 {
3518 int cond = GET_FIELD_SP(insn, 10, 12);
3519 int l1;
3520
3521 cpu_src1 = get_src1(insn, cpu_src1);
3522
3523 l1 = gen_new_label();
3524
3525 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3526 cpu_src1, 0, l1);
3527 if (IS_IMM) { /* immediate */
3528 TCGv r_const;
3529
3530 rs2 = GET_FIELD_SPs(insn, 0, 9);
3531 r_const = tcg_const_tl((int)rs2);
3532 gen_movl_TN_reg(rd, r_const);
3533 tcg_temp_free(r_const);
3534 } else {
3535 rs2 = GET_FIELD_SP(insn, 0, 4);
3536 gen_movl_reg_TN(rs2, cpu_tmp0);
3537 gen_movl_TN_reg(rd, cpu_tmp0);
3538 }
3539 gen_set_label(l1);
3540 break;
3541 }
3542 #endif
3543 default:
3544 goto illegal_insn;
3545 }
3546 }
3547 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3548 #ifdef TARGET_SPARC64
3549 int opf = GET_FIELD_SP(insn, 5, 13);
3550 rs1 = GET_FIELD(insn, 13, 17);
3551 rs2 = GET_FIELD(insn, 27, 31);
3552 if (gen_trap_ifnofpu(dc, cpu_cond))
3553 goto jmp_insn;
3554
3555 switch (opf) {
3556 case 0x000: /* VIS I edge8cc */
3557 case 0x001: /* VIS II edge8n */
3558 case 0x002: /* VIS I edge8lcc */
3559 case 0x003: /* VIS II edge8ln */
3560 case 0x004: /* VIS I edge16cc */
3561 case 0x005: /* VIS II edge16n */
3562 case 0x006: /* VIS I edge16lcc */
3563 case 0x007: /* VIS II edge16ln */
3564 case 0x008: /* VIS I edge32cc */
3565 case 0x009: /* VIS II edge32n */
3566 case 0x00a: /* VIS I edge32lcc */
3567 case 0x00b: /* VIS II edge32ln */
3568 // XXX
3569 goto illegal_insn;
3570 case 0x010: /* VIS I array8 */
3571 CHECK_FPU_FEATURE(dc, VIS1);
3572 cpu_src1 = get_src1(insn, cpu_src1);
3573 gen_movl_reg_TN(rs2, cpu_src2);
3574 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3575 cpu_src2);
3576 gen_movl_TN_reg(rd, cpu_dst);
3577 break;
3578 case 0x012: /* VIS I array16 */
3579 CHECK_FPU_FEATURE(dc, VIS1);
3580 cpu_src1 = get_src1(insn, cpu_src1);
3581 gen_movl_reg_TN(rs2, cpu_src2);
3582 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3583 cpu_src2);
3584 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3585 gen_movl_TN_reg(rd, cpu_dst);
3586 break;
3587 case 0x014: /* VIS I array32 */
3588 CHECK_FPU_FEATURE(dc, VIS1);
3589 cpu_src1 = get_src1(insn, cpu_src1);
3590 gen_movl_reg_TN(rs2, cpu_src2);
3591 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3592 cpu_src2);
3593 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3594 gen_movl_TN_reg(rd, cpu_dst);
3595 break;
3596 case 0x018: /* VIS I alignaddr */
3597 CHECK_FPU_FEATURE(dc, VIS1);
3598 cpu_src1 = get_src1(insn, cpu_src1);
3599 gen_movl_reg_TN(rs2, cpu_src2);
3600 tcg_gen_helper_1_2(helper_alignaddr, cpu_dst, cpu_src1,
3601 cpu_src2);
3602 gen_movl_TN_reg(rd, cpu_dst);
3603 break;
3604 case 0x019: /* VIS II bmask */
3605 case 0x01a: /* VIS I alignaddrl */
3606 // XXX
3607 goto illegal_insn;
3608 case 0x020: /* VIS I fcmple16 */
3609 CHECK_FPU_FEATURE(dc, VIS1);
3610 gen_op_load_fpr_DT0(DFPREG(rs1));
3611 gen_op_load_fpr_DT1(DFPREG(rs2));
3612 tcg_gen_helper_0_0(helper_fcmple16);
3613 gen_op_store_DT0_fpr(DFPREG(rd));
3614 break;
3615 case 0x022: /* VIS I fcmpne16 */
3616 CHECK_FPU_FEATURE(dc, VIS1);
3617 gen_op_load_fpr_DT0(DFPREG(rs1));
3618 gen_op_load_fpr_DT1(DFPREG(rs2));
3619 tcg_gen_helper_0_0(helper_fcmpne16);
3620 gen_op_store_DT0_fpr(DFPREG(rd));
3621 break;
3622 case 0x024: /* VIS I fcmple32 */
3623 CHECK_FPU_FEATURE(dc, VIS1);
3624 gen_op_load_fpr_DT0(DFPREG(rs1));
3625 gen_op_load_fpr_DT1(DFPREG(rs2));
3626 tcg_gen_helper_0_0(helper_fcmple32);
3627 gen_op_store_DT0_fpr(DFPREG(rd));
3628 break;
3629 case 0x026: /* VIS I fcmpne32 */
3630 CHECK_FPU_FEATURE(dc, VIS1);
3631 gen_op_load_fpr_DT0(DFPREG(rs1));
3632 gen_op_load_fpr_DT1(DFPREG(rs2));
3633 tcg_gen_helper_0_0(helper_fcmpne32);
3634 gen_op_store_DT0_fpr(DFPREG(rd));
3635 break;
3636 case 0x028: /* VIS I fcmpgt16 */
3637 CHECK_FPU_FEATURE(dc, VIS1);
3638 gen_op_load_fpr_DT0(DFPREG(rs1));
3639 gen_op_load_fpr_DT1(DFPREG(rs2));
3640 tcg_gen_helper_0_0(helper_fcmpgt16);
3641 gen_op_store_DT0_fpr(DFPREG(rd));
3642 break;
3643 case 0x02a: /* VIS I fcmpeq16 */
3644 CHECK_FPU_FEATURE(dc, VIS1);
3645 gen_op_load_fpr_DT0(DFPREG(rs1));
3646 gen_op_load_fpr_DT1(DFPREG(rs2));
3647 tcg_gen_helper_0_0(helper_fcmpeq16);
3648 gen_op_store_DT0_fpr(DFPREG(rd));
3649 break;
3650 case 0x02c: /* VIS I fcmpgt32 */
3651 CHECK_FPU_FEATURE(dc, VIS1);
3652 gen_op_load_fpr_DT0(DFPREG(rs1));
3653 gen_op_load_fpr_DT1(DFPREG(rs2));
3654 tcg_gen_helper_0_0(helper_fcmpgt32);
3655 gen_op_store_DT0_fpr(DFPREG(rd));
3656 break;
3657 case 0x02e: /* VIS I fcmpeq32 */
3658 CHECK_FPU_FEATURE(dc, VIS1);
3659 gen_op_load_fpr_DT0(DFPREG(rs1));
3660 gen_op_load_fpr_DT1(DFPREG(rs2));
3661 tcg_gen_helper_0_0(helper_fcmpeq32);
3662 gen_op_store_DT0_fpr(DFPREG(rd));
3663 break;
3664 case 0x031: /* VIS I fmul8x16 */
3665 CHECK_FPU_FEATURE(dc, VIS1);
3666 gen_op_load_fpr_DT0(DFPREG(rs1));
3667 gen_op_load_fpr_DT1(DFPREG(rs2));
3668 tcg_gen_helper_0_0(helper_fmul8x16);
3669 gen_op_store_DT0_fpr(DFPREG(rd));
3670 break;
3671 case 0x033: /* VIS I fmul8x16au */
3672 CHECK_FPU_FEATURE(dc, VIS1);
3673 gen_op_load_fpr_DT0(DFPREG(rs1));
3674 gen_op_load_fpr_DT1(DFPREG(rs2));
3675 tcg_gen_helper_0_0(helper_fmul8x16au);
3676 gen_op_store_DT0_fpr(DFPREG(rd));
3677 break;
3678 case 0x035: /* VIS I fmul8x16al */
3679 CHECK_FPU_FEATURE(dc, VIS1);
3680 gen_op_load_fpr_DT0(DFPREG(rs1));
3681 gen_op_load_fpr_DT1(DFPREG(rs2));
3682 tcg_gen_helper_0_0(helper_fmul8x16al);
3683 gen_op_store_DT0_fpr(DFPREG(rd));
3684 break;
3685 case 0x036: /* VIS I fmul8sux16 */
3686 CHECK_FPU_FEATURE(dc, VIS1);
3687 gen_op_load_fpr_DT0(DFPREG(rs1));
3688 gen_op_load_fpr_DT1(DFPREG(rs2));
3689 tcg_gen_helper_0_0(helper_fmul8sux16);
3690 gen_op_store_DT0_fpr(DFPREG(rd));
3691 break;
3692 case 0x037: /* VIS I fmul8ulx16 */
3693 CHECK_FPU_FEATURE(dc, VIS1);
3694 gen_op_load_fpr_DT0(DFPREG(rs1));
3695 gen_op_load_fpr_DT1(DFPREG(rs2));
3696 tcg_gen_helper_0_0(helper_fmul8ulx16);
3697 gen_op_store_DT0_fpr(DFPREG(rd));
3698 break;
3699 case 0x038: /* VIS I fmuld8sux16 */
3700 CHECK_FPU_FEATURE(dc, VIS1);
3701 gen_op_load_fpr_DT0(DFPREG(rs1));
3702 gen_op_load_fpr_DT1(DFPREG(rs2));
3703 tcg_gen_helper_0_0(helper_fmuld8sux16);
3704 gen_op_store_DT0_fpr(DFPREG(rd));
3705 break;
3706 case 0x039: /* VIS I fmuld8ulx16 */
3707 CHECK_FPU_FEATURE(dc, VIS1);
3708 gen_op_load_fpr_DT0(DFPREG(rs1));
3709 gen_op_load_fpr_DT1(DFPREG(rs2));
3710 tcg_gen_helper_0_0(helper_fmuld8ulx16);
3711 gen_op_store_DT0_fpr(DFPREG(rd));
3712 break;
3713 case 0x03a: /* VIS I fpack32 */
3714 case 0x03b: /* VIS I fpack16 */
3715 case 0x03d: /* VIS I fpackfix */
3716 case 0x03e: /* VIS I pdist */
3717 // XXX
3718 goto illegal_insn;
3719 case 0x048: /* VIS I faligndata */
3720 CHECK_FPU_FEATURE(dc, VIS1);
3721 gen_op_load_fpr_DT0(DFPREG(rs1));
3722 gen_op_load_fpr_DT1(DFPREG(rs2));
3723 tcg_gen_helper_0_0(helper_faligndata);
3724 gen_op_store_DT0_fpr(DFPREG(rd));
3725 break;
3726 case 0x04b: /* VIS I fpmerge */
3727 CHECK_FPU_FEATURE(dc, VIS1);
3728 gen_op_load_fpr_DT0(DFPREG(rs1));
3729 gen_op_load_fpr_DT1(DFPREG(rs2));
3730 tcg_gen_helper_0_0(helper_fpmerge);
3731 gen_op_store_DT0_fpr(DFPREG(rd));
3732 break;
3733 case 0x04c: /* VIS II bshuffle */
3734 // XXX
3735 goto illegal_insn;
3736 case 0x04d: /* VIS I fexpand */
3737 CHECK_FPU_FEATURE(dc, VIS1);
3738 gen_op_load_fpr_DT0(DFPREG(rs1));
3739 gen_op_load_fpr_DT1(DFPREG(rs2));
3740 tcg_gen_helper_0_0(helper_fexpand);
3741 gen_op_store_DT0_fpr(DFPREG(rd));
3742 break;
3743 case 0x050: /* VIS I fpadd16 */
3744 CHECK_FPU_FEATURE(dc, VIS1);
3745 gen_op_load_fpr_DT0(DFPREG(rs1));
3746 gen_op_load_fpr_DT1(DFPREG(rs2));
3747 tcg_gen_helper_0_0(helper_fpadd16);
3748 gen_op_store_DT0_fpr(DFPREG(rd));
3749 break;
3750 case 0x051: /* VIS I fpadd16s */
3751 CHECK_FPU_FEATURE(dc, VIS1);
3752 gen_op_load_fpr_FT0(rs1);
3753 gen_op_load_fpr_FT1(rs2);
3754 tcg_gen_helper_0_0(helper_fpadd16s);
3755 gen_op_store_FT0_fpr(rd);
3756 break;
3757 case 0x052: /* VIS I fpadd32 */
3758 CHECK_FPU_FEATURE(dc, VIS1);
3759 gen_op_load_fpr_DT0(DFPREG(rs1));
3760 gen_op_load_fpr_DT1(DFPREG(rs2));
3761 tcg_gen_helper_0_0(helper_fpadd32);
3762 gen_op_store_DT0_fpr(DFPREG(rd));
3763 break;
3764 case 0x053: /* VIS I fpadd32s */
3765 CHECK_FPU_FEATURE(dc, VIS1);
3766 gen_op_load_fpr_FT0(rs1);
3767 gen_op_load_fpr_FT1(rs2);
3768 tcg_gen_helper_0_0(helper_fpadd32s);
3769 gen_op_store_FT0_fpr(rd);
3770 break;
3771 case 0x054: /* VIS I fpsub16 */
3772 CHECK_FPU_FEATURE(dc, VIS1);
3773 gen_op_load_fpr_DT0(DFPREG(rs1));
3774 gen_op_load_fpr_DT1(DFPREG(rs2));
3775 tcg_gen_helper_0_0(helper_fpsub16);
3776 gen_op_store_DT0_fpr(DFPREG(rd));
3777 break;
3778 case 0x055: /* VIS I fpsub16s */
3779 CHECK_FPU_FEATURE(dc, VIS1);
3780 gen_op_load_fpr_FT0(rs1);
3781 gen_op_load_fpr_FT1(rs2);
3782 tcg_gen_helper_0_0(helper_fpsub16s);
3783 gen_op_store_FT0_fpr(rd);
3784 break;
3785 case 0x056: /* VIS I fpsub32 */
3786 CHECK_FPU_FEATURE(dc, VIS1);
3787 gen_op_load_fpr_DT0(DFPREG(rs1));
3788 gen_op_load_fpr_DT1(DFPREG(rs2));
3789 tcg_gen_helper_0_0(helper_fpsub32);
3790 gen_op_store_DT0_fpr(DFPREG(rd));
3791 break;
3792 case 0x057: /* VIS I fpsub32s */
3793 CHECK_FPU_FEATURE(dc, VIS1);
3794 gen_op_load_fpr_FT0(rs1);
3795 gen_op_load_fpr_FT1(rs2);
3796 tcg_gen_helper_0_0(helper_fpsub32s);
3797 gen_op_store_FT0_fpr(rd);
3798 break;
3799 case 0x060: /* VIS I fzero */
3800 CHECK_FPU_FEATURE(dc, VIS1);
3801 tcg_gen_helper_0_0(helper_movl_DT0_0);
3802 gen_op_store_DT0_fpr(DFPREG(rd));
3803 break;
3804 case 0x061: /* VIS I fzeros */
3805 CHECK_FPU_FEATURE(dc, VIS1);
3806 tcg_gen_helper_0_0(helper_movl_FT0_0);
3807 gen_op_store_FT0_fpr(rd);
3808 break;
3809 case 0x062: /* VIS I fnor */
3810 CHECK_FPU_FEATURE(dc, VIS1);
3811 gen_op_load_fpr_DT0(DFPREG(rs1));
3812 gen_op_load_fpr_DT1(DFPREG(rs2));
3813 tcg_gen_helper_0_0(helper_fnor);
3814 gen_op_store_DT0_fpr(DFPREG(rd));
3815 break;
3816 case 0x063: /* VIS I fnors */
3817 CHECK_FPU_FEATURE(dc, VIS1);
3818 gen_op_load_fpr_FT0(rs1);
3819 gen_op_load_fpr_FT1(rs2);
3820 tcg_gen_helper_0_0(helper_fnors);
3821 gen_op_store_FT0_fpr(rd);
3822 break;
3823 case 0x064: /* VIS I fandnot2 */
3824 CHECK_FPU_FEATURE(dc, VIS1);
3825 gen_op_load_fpr_DT1(DFPREG(rs1));
3826 gen_op_load_fpr_DT0(DFPREG(rs2));
3827 tcg_gen_helper_0_0(helper_fandnot);
3828 gen_op_store_DT0_fpr(DFPREG(rd));
3829 break;
3830 case 0x065: /* VIS I fandnot2s */
3831 CHECK_FPU_FEATURE(dc, VIS1);
3832 gen_op_load_fpr_FT1(rs1);
3833 gen_op_load_fpr_FT0(rs2);
3834 tcg_gen_helper_0_0(helper_fandnots);
3835 gen_op_store_FT0_fpr(rd);
3836 break;
3837 case 0x066: /* VIS I fnot2 */
3838 CHECK_FPU_FEATURE(dc, VIS1);
3839 gen_op_load_fpr_DT1(DFPREG(rs2));
3840 tcg_gen_helper_0_0(helper_fnot);
3841 gen_op_store_DT0_fpr(DFPREG(rd));
3842 break;
3843 case 0x067: /* VIS I fnot2s */
3844 CHECK_FPU_FEATURE(dc, VIS1);
3845 gen_op_load_fpr_FT1(rs2);
3846 tcg_gen_helper_0_0(helper_fnot);
3847 gen_op_store_FT0_fpr(rd);
3848 break;
3849 case 0x068: /* VIS I fandnot1 */
3850 CHECK_FPU_FEATURE(dc, VIS1);
3851 gen_op_load_fpr_DT0(DFPREG(rs1));
3852 gen_op_load_fpr_DT1(DFPREG(rs2));
3853 tcg_gen_helper_0_0(helper_fandnot);
3854 gen_op_store_DT0_fpr(DFPREG(rd));
3855 break;
3856 case 0x069: /* VIS I fandnot1s */
3857 CHECK_FPU_FEATURE(dc, VIS1);
3858 gen_op_load_fpr_FT0(rs1);
3859 gen_op_load_fpr_FT1(rs2);
3860 tcg_gen_helper_0_0(helper_fandnots);
3861 gen_op_store_FT0_fpr(rd);
3862 break;
3863 case 0x06a: /* VIS I fnot1 */
3864 CHECK_FPU_FEATURE(dc, VIS1);
3865 gen_op_load_fpr_DT1(DFPREG(rs1));
3866 tcg_gen_helper_0_0(helper_fnot);
3867 gen_op_store_DT0_fpr(DFPREG(rd));
3868 break;
3869 case 0x06b: /* VIS I fnot1s */
3870 CHECK_FPU_FEATURE(dc, VIS1);
3871 gen_op_load_fpr_FT1(rs1);
3872 tcg_gen_helper_0_0(helper_fnot);
3873 gen_op_store_FT0_fpr(rd);
3874 break;
3875 case 0x06c: /* VIS I fxor */
3876 CHECK_FPU_FEATURE(dc, VIS1);
3877 gen_op_load_fpr_DT0(DFPREG(rs1));
3878 gen_op_load_fpr_DT1(DFPREG(rs2));
3879 tcg_gen_helper_0_0(helper_fxor);
3880 gen_op_store_DT0_fpr(DFPREG(rd));
3881 break;
3882 case 0x06d: /* VIS I fxors */
3883 CHECK_FPU_FEATURE(dc, VIS1);
3884 gen_op_load_fpr_FT0(rs1);
3885 gen_op_load_fpr_FT1(rs2);
3886 tcg_gen_helper_0_0(helper_fxors);
3887 gen_op_store_FT0_fpr(rd);
3888 break;
3889 case 0x06e: /* VIS I fnand */
3890 CHECK_FPU_FEATURE(dc, VIS1);
3891 gen_op_load_fpr_DT0(DFPREG(rs1));
3892 gen_op_load_fpr_DT1(DFPREG(rs2));
3893 tcg_gen_helper_0_0(helper_fnand);
3894 gen_op_store_DT0_fpr(DFPREG(rd));
3895 break;
3896 case 0x06f: /* VIS I fnands */
3897 CHECK_FPU_FEATURE(dc, VIS1);
3898 gen_op_load_fpr_FT0(rs1);
3899 gen_op_load_fpr_FT1(rs2);
3900 tcg_gen_helper_0_0(helper_fnands);
3901 gen_op_store_FT0_fpr(rd);
3902 break;
3903 case 0x070: /* VIS I fand */
3904 CHECK_FPU_FEATURE(dc, VIS1);
3905 gen_op_load_fpr_DT0(DFPREG(rs1));
3906 gen_op_load_fpr_DT1(DFPREG(rs2));
3907 tcg_gen_helper_0_0(helper_fand);
3908 gen_op_store_DT0_fpr(DFPREG(rd));
3909 break;
3910 case 0x071: /* VIS I fands */
3911 CHECK_FPU_FEATURE(dc, VIS1);
3912 gen_op_load_fpr_FT0(rs1);
3913 gen_op_load_fpr_FT1(rs2);
3914 tcg_gen_helper_0_0(helper_fands);
3915 gen_op_store_FT0_fpr(rd);
3916 break;
3917 case 0x072: /* VIS I fxnor */
3918 CHECK_FPU_FEATURE(dc, VIS1);
3919 gen_op_load_fpr_DT0(DFPREG(rs1));
3920 gen_op_load_fpr_DT1(DFPREG(rs2));
3921 tcg_gen_helper_0_0(helper_fxnor);
3922 gen_op_store_DT0_fpr(DFPREG(rd));
3923 break;
3924 case 0x073: /* VIS I fxnors */
3925 CHECK_FPU_FEATURE(dc, VIS1);
3926 gen_op_load_fpr_FT0(rs1);
3927 gen_op_load_fpr_FT1(rs2);
3928 tcg_gen_helper_0_0(helper_fxnors);
3929 gen_op_store_FT0_fpr(rd);
3930 break;
3931 case 0x074: /* VIS I fsrc1 */
3932 CHECK_FPU_FEATURE(dc, VIS1);
3933 gen_op_load_fpr_DT0(DFPREG(rs1));
3934 gen_op_store_DT0_fpr(DFPREG(rd));
3935 break;
3936 case 0x075: /* VIS I fsrc1s */
3937 CHECK_FPU_FEATURE(dc, VIS1);
3938 gen_op_load_fpr_FT0(rs1);
3939 gen_op_store_FT0_fpr(rd);
3940 break;
3941 case 0x076: /* VIS I fornot2 */
3942 CHECK_FPU_FEATURE(dc, VIS1);
3943 gen_op_load_fpr_DT1(DFPREG(rs1));
3944 gen_op_load_fpr_DT0(DFPREG(rs2));
3945 tcg_gen_helper_0_0(helper_fornot);
3946 gen_op_store_DT0_fpr(DFPREG(rd));
3947 break;
3948 case 0x077: /* VIS I fornot2s */
3949 CHECK_FPU_FEATURE(dc, VIS1);
3950 gen_op_load_fpr_FT1(rs1);
3951 gen_op_load_fpr_FT0(rs2);
3952 tcg_gen_helper_0_0(helper_fornots);
3953 gen_op_store_FT0_fpr(rd);
3954 break;
3955 case 0x078: /* VIS I fsrc2 */
3956 CHECK_FPU_FEATURE(dc, VIS1);
3957 gen_op_load_fpr_DT0(DFPREG(rs2));
3958 gen_op_store_DT0_fpr(DFPREG(rd));
3959 break;
3960 case 0x079: /* VIS I fsrc2s */
3961 CHECK_FPU_FEATURE(dc, VIS1);
3962 gen_op_load_fpr_FT0(rs2);
3963 gen_op_store_FT0_fpr(rd);
3964 break;
3965 case 0x07a: /* VIS I fornot1 */
3966 CHECK_FPU_FEATURE(dc, VIS1);
3967 gen_op_load_fpr_DT0(DFPREG(rs1));
3968 gen_op_load_fpr_DT1(DFPREG(rs2));
3969 tcg_gen_helper_0_0(helper_fornot);
3970 gen_op_store_DT0_fpr(DFPREG(rd));
3971 break;
3972 case 0x07b: /* VIS I fornot1s */
3973 CHECK_FPU_FEATURE(dc, VIS1);
3974 gen_op_load_fpr_FT0(rs1);
3975 gen_op_load_fpr_FT1(rs2);
3976 tcg_gen_helper_0_0(helper_fornots);
3977 gen_op_store_FT0_fpr(rd);
3978 break;
3979 case 0x07c: /* VIS I for */
3980 CHECK_FPU_FEATURE(dc, VIS1);
3981 gen_op_load_fpr_DT0(DFPREG(rs1));
3982 gen_op_load_fpr_DT1(DFPREG(rs2));
3983 tcg_gen_helper_0_0(helper_for);
3984 gen_op_store_DT0_fpr(DFPREG(rd));
3985 break;
3986 case 0x07d: /* VIS I fors */
3987 CHECK_FPU_FEATURE(dc, VIS1);
3988 gen_op_load_fpr_FT0(rs1);
3989 gen_op_load_fpr_FT1(rs2);
3990 tcg_gen_helper_0_0(helper_fors);
3991 gen_op_store_FT0_fpr(rd);
3992 break;
3993 case 0x07e: /* VIS I fone */
3994 CHECK_FPU_FEATURE(dc, VIS1);
3995 tcg_gen_helper_0_0(helper_movl_DT0_1);
3996 gen_op_store_DT0_fpr(DFPREG(rd));
3997 break;
3998 case 0x07f: /* VIS I fones */
3999 CHECK_FPU_FEATURE(dc, VIS1);
4000 tcg_gen_helper_0_0(helper_movl_FT0_1);
4001 gen_op_store_FT0_fpr(rd);
4002 break;
4003 case 0x080: /* VIS I shutdown */
4004 case 0x081: /* VIS II siam */
4005 // XXX
4006 goto illegal_insn;
4007 default:
4008 goto illegal_insn;
4009 }
4010 #else
4011 goto ncp_insn;
4012 #endif
4013 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4014 #ifdef TARGET_SPARC64
4015 goto illegal_insn;
4016 #else
4017 goto ncp_insn;
4018 #endif
4019 #ifdef TARGET_SPARC64
4020 } else if (xop == 0x39) { /* V9 return */
4021 TCGv r_const;
4022
4023 save_state(dc, cpu_cond);
4024 cpu_src1 = get_src1(insn, cpu_src1);
4025 if (IS_IMM) { /* immediate */
4026 rs2 = GET_FIELDs(insn, 19, 31);
4027 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
4028 } else { /* register */
4029 rs2 = GET_FIELD(insn, 27, 31);
4030 if (rs2) {
4031 gen_movl_reg_TN(rs2, cpu_src2);
4032 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4033 } else
4034 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4035 }
4036 tcg_gen_helper_0_0(helper_restore);
4037 gen_mov_pc_npc(dc, cpu_cond);
4038 r_const = tcg_const_i32(3);
4039 tcg_gen_helper_0_2(helper_check_align, cpu_dst, r_const);
4040 tcg_temp_free(r_const);
4041 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4042 dc->npc = DYNAMIC_PC;
4043 goto jmp_insn;
4044 #endif
4045 } else {
4046 cpu_src1 = get_src1(insn, cpu_src1);
4047 if (IS_IMM) { /* immediate */
4048 rs2 = GET_FIELDs(insn, 19, 31);
4049 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
4050 } else { /* register */
4051 rs2 = GET_FIELD(insn, 27, 31);
4052 if (rs2) {
4053 gen_movl_reg_TN(rs2, cpu_src2);
4054 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4055 } else
4056 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4057 }
4058 switch (xop) {
4059 case 0x38: /* jmpl */
4060 {
4061 TCGv r_const;
4062
4063 r_const = tcg_const_tl(dc->pc);
4064 gen_movl_TN_reg(rd, r_const);
4065 tcg_temp_free(r_const);
4066 gen_mov_pc_npc(dc, cpu_cond);
4067 r_const = tcg_const_i32(3);
4068 tcg_gen_helper_0_2(helper_check_align, cpu_dst,
4069 r_const);
4070 tcg_temp_free(r_const);
4071 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4072 dc->npc = DYNAMIC_PC;
4073 }
4074 goto jmp_insn;
4075 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4076 case 0x39: /* rett, V9 return */
4077 {
4078 TCGv r_const;
4079
4080 if (!supervisor(dc))
4081 goto priv_insn;
4082 gen_mov_pc_npc(dc, cpu_cond);
4083 r_const = tcg_const_i32(3);
4084 tcg_gen_helper_0_2(helper_check_align, cpu_dst,
4085 r_const);
4086 tcg_temp_free(r_const);
4087 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4088 dc->npc = DYNAMIC_PC;
4089 tcg_gen_helper_0_0(helper_rett);
4090 }
4091 goto jmp_insn;
4092 #endif
4093 case 0x3b: /* flush */
4094 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4095 goto unimp_flush;
4096 tcg_gen_helper_0_1(helper_flush, cpu_dst);
4097 break;
4098 case 0x3c: /* save */
4099 save_state(dc, cpu_cond);
4100 tcg_gen_helper_0_0(helper_save);
4101 gen_movl_TN_reg(rd, cpu_dst);
4102 break;
4103 case 0x3d: /* restore */
4104 save_state(dc, cpu_cond);
4105 tcg_gen_helper_0_0(helper_restore);
4106 gen_movl_TN_reg(rd, cpu_dst);
4107 break;
4108 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4109 case 0x3e: /* V9 done/retry */
4110 {
4111 switch (rd) {
4112 case 0:
4113 if (!supervisor(dc))
4114 goto priv_insn;
4115 dc->npc = DYNAMIC_PC;
4116 dc->pc = DYNAMIC_PC;
4117 tcg_gen_helper_0_0(helper_done);
4118 goto jmp_insn;
4119 case 1:
4120 if (!supervisor(dc))
4121 goto priv_insn;
4122 dc->npc = DYNAMIC_PC;
4123 dc->pc = DYNAMIC_PC;
4124 tcg_gen_helper_0_0(helper_retry);
4125 goto jmp_insn;
4126 default:
4127 goto illegal_insn;
4128 }
4129 }
4130 break;
4131 #endif
4132 default:
4133 goto illegal_insn;
4134 }
4135 }
4136 break;
4137 }
4138 break;
4139 case 3: /* load/store instructions */
4140 {
4141 unsigned int xop = GET_FIELD(insn, 7, 12);
4142
4143 cpu_src1 = get_src1(insn, cpu_src1);
4144 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4145 rs2 = GET_FIELD(insn, 27, 31);
4146 gen_movl_reg_TN(rs2, cpu_src2);
4147 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4148 } else if (IS_IMM) { /* immediate */
4149 rs2 = GET_FIELDs(insn, 19, 31);
4150 tcg_gen_addi_tl(cpu_addr, cpu_src1, (int)rs2);
4151 } else { /* register */
4152 rs2 = GET_FIELD(insn, 27, 31);
4153 if (rs2 != 0) {
4154 gen_movl_reg_TN(rs2, cpu_src2);
4155 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4156 } else
4157 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4158 }
4159 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4160 (xop > 0x17 && xop <= 0x1d ) ||
4161 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4162 switch (xop) {
4163 case 0x0: /* load unsigned word */
4164 gen_address_mask(dc, cpu_addr);
4165 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4166 break;
4167 case 0x1: /* load unsigned byte */
4168 gen_address_mask(dc, cpu_addr);
4169 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4170 break;
4171 case 0x2: /* load unsigned halfword */
4172 gen_address_mask(dc, cpu_addr);
4173 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4174 break;
4175 case 0x3: /* load double word */
4176 if (rd & 1)
4177 goto illegal_insn;
4178 else {
4179 TCGv r_const;
4180
4181 save_state(dc, cpu_cond);
4182 r_const = tcg_const_i32(7);
4183 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4184 r_const); // XXX remove
4185 tcg_temp_free(r_const);
4186 gen_address_mask(dc, cpu_addr);
4187 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4188 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4189 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4190 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4191 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4192 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4193 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4194 }
4195 break;
4196 case 0x9: /* load signed byte */
4197 gen_address_mask(dc, cpu_addr);
4198 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4199 break;
4200 case 0xa: /* load signed halfword */
4201 gen_address_mask(dc, cpu_addr);
4202 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4203 break;
4204 case 0xd: /* ldstub -- XXX: should be atomically */
4205 {
4206 TCGv r_const;
4207
4208 gen_address_mask(dc, cpu_addr);
4209 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4210 r_const = tcg_const_tl(0xff);
4211 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4212 tcg_temp_free(r_const);
4213 }
4214 break;
4215 case 0x0f: /* swap register with memory. Also
4216 atomically */
4217 CHECK_IU_FEATURE(dc, SWAP);
4218 gen_movl_reg_TN(rd, cpu_val);
4219 gen_address_mask(dc, cpu_addr);
4220 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4221 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4222 tcg_gen_extu_i32_tl(cpu_val, cpu_tmp32);
4223 break;
4224 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4225 case 0x10: /* load word alternate */
4226 #ifndef TARGET_SPARC64
4227 if (IS_IMM)
4228 goto illegal_insn;
4229 if (!supervisor(dc))
4230 goto priv_insn;
4231 #endif
4232 save_state(dc, cpu_cond);
4233 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4234 break;
4235 case 0x11: /* load unsigned byte alternate */
4236 #ifndef TARGET_SPARC64
4237 if (IS_IMM)
4238 goto illegal_insn;
4239 if (!supervisor(dc))
4240 goto priv_insn;
4241 #endif
4242 save_state(dc, cpu_cond);
4243 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4244 break;
4245 case 0x12: /* load unsigned halfword alternate */
4246 #ifndef TARGET_SPARC64
4247 if (IS_IMM)
4248 goto illegal_insn;
4249 if (!supervisor(dc))
4250 goto priv_insn;
4251 #endif
4252 save_state(dc, cpu_cond);
4253 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4254 break;
4255 case 0x13: /* load double word alternate */
4256 #ifndef TARGET_SPARC64
4257 if (IS_IMM)
4258 goto illegal_insn;
4259 if (!supervisor(dc))
4260 goto priv_insn;
4261 #endif
4262 if (rd & 1)
4263 goto illegal_insn;
4264 save_state(dc, cpu_cond);
4265 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4266 goto skip_move;
4267 case 0x19: /* load signed byte alternate */
4268 #ifndef TARGET_SPARC64
4269 if (IS_IMM)
4270 goto illegal_insn;
4271 if (!supervisor(dc))
4272 goto priv_insn;
4273 #endif
4274 save_state(dc, cpu_cond);
4275 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4276 break;
4277 case 0x1a: /* load signed halfword alternate */
4278 #ifndef TARGET_SPARC64
4279 if (IS_IMM)
4280 goto illegal_insn;
4281 if (!supervisor(dc))
4282 goto priv_insn;
4283 #endif
4284 save_state(dc, cpu_cond);
4285 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4286 break;
4287 case 0x1d: /* ldstuba -- XXX: should be atomically */
4288 #ifndef TARGET_SPARC64
4289 if (IS_IMM)
4290 goto illegal_insn;
4291 if (!supervisor(dc))
4292 goto priv_insn;
4293 #endif
4294 save_state(dc, cpu_cond);
4295 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4296 break;
4297 case 0x1f: /* swap reg with alt. memory. Also
4298 atomically */
4299 CHECK_IU_FEATURE(dc, SWAP);
4300 #ifndef TARGET_SPARC64
4301 if (IS_IMM)
4302 goto illegal_insn;
4303 if (!supervisor(dc))
4304 goto priv_insn;
4305 #endif
4306 save_state(dc, cpu_cond);
4307 gen_movl_reg_TN(rd, cpu_val);
4308 gen_swap_asi(cpu_val, cpu_addr, insn);
4309 break;
4310
4311 #ifndef TARGET_SPARC64
4312 case 0x30: /* ldc */
4313 case 0x31: /* ldcsr */
4314 case 0x33: /* lddc */
4315 goto ncp_insn;
4316 #endif
4317 #endif
4318 #ifdef TARGET_SPARC64
4319 case 0x08: /* V9 ldsw */
4320 gen_address_mask(dc, cpu_addr);
4321 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4322 break;
4323 case 0x0b: /* V9 ldx */
4324 gen_address_mask(dc, cpu_addr);
4325 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4326 break;
4327 case 0x18: /* V9 ldswa */
4328 save_state(dc, cpu_cond);
4329 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4330 break;
4331 case 0x1b: /* V9 ldxa */
4332 save_state(dc, cpu_cond);
4333 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4334 break;
4335 case 0x2d: /* V9 prefetch, no effect */
4336 goto skip_move;
4337 case 0x30: /* V9 ldfa */
4338 save_state(dc, cpu_cond);
4339 gen_ldf_asi(cpu_addr, insn, 4, rd);
4340 goto skip_move;
4341 case 0x33: /* V9 lddfa */
4342 save_state(dc, cpu_cond);
4343 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4344 goto skip_move;
4345 case 0x3d: /* V9 prefetcha, no effect */
4346 goto skip_move;
4347 case 0x32: /* V9 ldqfa */
4348 CHECK_FPU_FEATURE(dc, FLOAT128);
4349 save_state(dc, cpu_cond);
4350 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4351 goto skip_move;
4352 #endif
4353 default:
4354 goto illegal_insn;
4355 }
4356 gen_movl_TN_reg(rd, cpu_val);
4357 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4358 skip_move: ;
4359 #endif
4360 } else if (xop >= 0x20 && xop < 0x24) {
4361 if (gen_trap_ifnofpu(dc, cpu_cond))
4362 goto jmp_insn;
4363 save_state(dc, cpu_cond);
4364 switch (xop) {
4365 case 0x20: /* load fpreg */
4366 gen_address_mask(dc, cpu_addr);
4367 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4368 tcg_gen_st_i32(cpu_tmp32, cpu_env,
4369 offsetof(CPUState, fpr[rd]));
4370 break;
4371 case 0x21: /* ldfsr, V9 ldxfsr */
4372 #ifdef TARGET_SPARC64
4373 gen_address_mask(dc, cpu_addr);
4374 if (rd == 1) {
4375 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4376 tcg_gen_helper_0_1(helper_ldxfsr, cpu_tmp64);
4377 } else
4378 #else
4379 {
4380 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4381 tcg_gen_helper_0_1(helper_ldfsr, cpu_tmp32);
4382 }
4383 #endif
4384 break;
4385 case 0x22: /* load quad fpreg */
4386 {
4387 TCGv r_const;
4388
4389 CHECK_FPU_FEATURE(dc, FLOAT128);
4390 r_const = tcg_const_i32(dc->mem_idx);
4391 tcg_gen_helper_0_2(helper_ldqf, cpu_addr, r_const);
4392 tcg_temp_free(r_const);
4393 gen_op_store_QT0_fpr(QFPREG(rd));
4394 }
4395 break;
4396 case 0x23: /* load double fpreg */
4397 {
4398 TCGv r_const;
4399
4400 r_const = tcg_const_i32(dc->mem_idx);
4401 tcg_gen_helper_0_2(helper_lddf, cpu_addr, r_const);
4402 tcg_temp_free(r_const);
4403 gen_op_store_DT0_fpr(DFPREG(rd));
4404 }
4405 break;
4406 default:
4407 goto illegal_insn;
4408 }
4409 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \
4410 xop == 0xe || xop == 0x1e) {
4411 gen_movl_reg_TN(rd, cpu_val);
4412 switch (xop) {
4413 case 0x4: /* store word */
4414 gen_address_mask(dc, cpu_addr);
4415 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4416 break;
4417 case 0x5: /* store byte */
4418 gen_address_mask(dc, cpu_addr);
4419 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4420 break;
4421 case 0x6: /* store halfword */
4422 gen_address_mask(dc, cpu_addr);
4423 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4424 break;
4425 case 0x7: /* store double word */
4426 if (rd & 1)
4427 goto illegal_insn;
4428 else {
4429 TCGv r_low, r_const;
4430
4431 save_state(dc, cpu_cond);
4432 gen_address_mask(dc, cpu_addr);
4433 r_const = tcg_const_i32(7);
4434 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4435 r_const); // XXX remove
4436 tcg_temp_free(r_const);
4437 r_low = tcg_temp_new(TCG_TYPE_TL);
4438 gen_movl_reg_TN(rd + 1, r_low);
4439 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, cpu_val,
4440 r_low);
4441 tcg_temp_free(r_low);
4442 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4443 }
4444 break;
4445 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4446 case 0x14: /* store word alternate */
4447 #ifndef TARGET_SPARC64
4448 if (IS_IMM)
4449 goto illegal_insn;
4450 if (!supervisor(dc))
4451 goto priv_insn;
4452 #endif
4453 save_state(dc, cpu_cond);
4454 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4455 break;
4456 case 0x15: /* store byte alternate */
4457 #ifndef TARGET_SPARC64
4458 if (IS_IMM)
4459 goto illegal_insn;
4460 if (!supervisor(dc))
4461 goto priv_insn;
4462 #endif
4463 save_state(dc, cpu_cond);
4464 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4465 break;
4466 case 0x16: /* store halfword alternate */
4467 #ifndef TARGET_SPARC64
4468 if (IS_IMM)
4469 goto illegal_insn;
4470 if (!supervisor(dc))
4471 goto priv_insn;
4472 #endif
4473 save_state(dc, cpu_cond);
4474 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4475 break;
4476 case 0x17: /* store double word alternate */
4477 #ifndef TARGET_SPARC64
4478 if (IS_IMM)
4479 goto illegal_insn;
4480 if (!supervisor(dc))
4481 goto priv_insn;
4482 #endif
4483 if (rd & 1)
4484 goto illegal_insn;
4485 else {
4486 save_state(dc, cpu_cond);
4487 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4488 }
4489 break;
4490 #endif
4491 #ifdef TARGET_SPARC64
4492 case 0x0e: /* V9 stx */
4493 gen_address_mask(dc, cpu_addr);
4494 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4495 break;
4496 case 0x1e: /* V9 stxa */
4497 save_state(dc, cpu_cond);
4498 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4499 break;
4500 #endif
4501 default:
4502 goto illegal_insn;
4503 }
4504 } else if (xop > 0x23 && xop < 0x28) {
4505 if (gen_trap_ifnofpu(dc, cpu_cond))
4506 goto jmp_insn;
4507 save_state(dc, cpu_cond);
4508 switch (xop) {
4509 case 0x24: /* store fpreg */
4510 gen_address_mask(dc, cpu_addr);
4511 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
4512 offsetof(CPUState, fpr[rd]));
4513 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4514 break;
4515 case 0x25: /* stfsr, V9 stxfsr */
4516 #ifdef TARGET_SPARC64
4517 gen_address_mask(dc, cpu_addr);
4518 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4519 if (rd == 1)
4520 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4521 else {
4522 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp64);
4523 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4524 }
4525 #else
4526 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4527 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4528 #endif
4529 break;
4530 case 0x26:
4531 #ifdef TARGET_SPARC64
4532 /* V9 stqf, store quad fpreg */
4533 {
4534 TCGv r_const;
4535
4536 CHECK_FPU_FEATURE(dc, FLOAT128);
4537 gen_op_load_fpr_QT0(QFPREG(rd));
4538 r_const = tcg_const_i32(dc->mem_idx);
4539 tcg_gen_helper_0_2(helper_stqf, cpu_addr, r_const);
4540 tcg_temp_free(r_const);
4541 }
4542 break;
4543 #else /* !TARGET_SPARC64 */
4544 /* stdfq, store floating point queue */
4545 #if defined(CONFIG_USER_ONLY)
4546 goto illegal_insn;
4547 #else
4548 if (!supervisor(dc))
4549 goto priv_insn;
4550 if (gen_trap_ifnofpu(dc, cpu_cond))
4551 goto jmp_insn;
4552 goto nfq_insn;
4553 #endif
4554 #endif
4555 case 0x27: /* store double fpreg */
4556 {
4557 TCGv r_const;
4558
4559 gen_op_load_fpr_DT0(DFPREG(rd));
4560 r_const = tcg_const_i32(dc->mem_idx);
4561 tcg_gen_helper_0_2(helper_stdf, cpu_addr, r_const);
4562 tcg_temp_free(r_const);
4563 }
4564 break;
4565 default:
4566 goto illegal_insn;
4567 }
4568 } else if (xop > 0x33 && xop < 0x3f) {
4569 save_state(dc, cpu_cond);
4570 switch (xop) {
4571 #ifdef TARGET_SPARC64
4572 case 0x34: /* V9 stfa */
4573 gen_op_load_fpr_FT0(rd);
4574 gen_stf_asi(cpu_addr, insn, 4, rd);
4575 break;
4576 case 0x36: /* V9 stqfa */
4577 {
4578 TCGv r_const;
4579
4580 CHECK_FPU_FEATURE(dc, FLOAT128);
4581 r_const = tcg_const_i32(7);
4582 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4583 r_const);
4584 tcg_temp_free(r_const);
4585 gen_op_load_fpr_QT0(QFPREG(rd));
4586 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4587 }
4588 break;
4589 case 0x37: /* V9 stdfa */
4590 gen_op_load_fpr_DT0(DFPREG(rd));
4591 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4592 break;
4593 case 0x3c: /* V9 casa */
4594 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4595 gen_movl_TN_reg(rd, cpu_val);
4596 break;
4597 case 0x3e: /* V9 casxa */
4598 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4599 gen_movl_TN_reg(rd, cpu_val);
4600 break;
4601 #else
4602 case 0x34: /* stc */
4603 case 0x35: /* stcsr */
4604 case 0x36: /* stdcq */
4605 case 0x37: /* stdc */
4606 goto ncp_insn;
4607 #endif
4608 default:
4609 goto illegal_insn;
4610 }
4611 }
4612 else
4613 goto illegal_insn;
4614 }
4615 break;
4616 }
4617 /* default case for non jump instructions */
4618 if (dc->npc == DYNAMIC_PC) {
4619 dc->pc = DYNAMIC_PC;
4620 gen_op_next_insn();
4621 } else if (dc->npc == JUMP_PC) {
4622 /* we can do a static jump */
4623 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4624 dc->is_br = 1;
4625 } else {
4626 dc->pc = dc->npc;
4627 dc->npc = dc->npc + 4;
4628 }
4629 jmp_insn:
4630 return;
4631 illegal_insn:
4632 {
4633 TCGv r_const;
4634
4635 save_state(dc, cpu_cond);
4636 r_const = tcg_const_i32(TT_ILL_INSN);
4637 tcg_gen_helper_0_1(raise_exception, r_const);
4638 tcg_temp_free(r_const);
4639 dc->is_br = 1;
4640 }
4641 return;
4642 unimp_flush:
4643 {
4644 TCGv r_const;
4645
4646 save_state(dc, cpu_cond);
4647 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4648 tcg_gen_helper_0_1(raise_exception, r_const);
4649 tcg_temp_free(r_const);
4650 dc->is_br = 1;
4651 }
4652 return;
4653 #if !defined(CONFIG_USER_ONLY)
4654 priv_insn:
4655 {
4656 TCGv r_const;
4657
4658 save_state(dc, cpu_cond);
4659 r_const = tcg_const_i32(TT_PRIV_INSN);
4660 tcg_gen_helper_0_1(raise_exception, r_const);
4661 tcg_temp_free(r_const);
4662 dc->is_br = 1;
4663 }
4664 return;
4665 #endif
4666 nfpu_insn:
4667 save_state(dc, cpu_cond);
4668 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4669 dc->is_br = 1;
4670 return;
4671 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4672 nfq_insn:
4673 save_state(dc, cpu_cond);
4674 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4675 dc->is_br = 1;
4676 return;
4677 #endif
4678 #ifndef TARGET_SPARC64
4679 ncp_insn:
4680 {
4681 TCGv r_const;
4682
4683 save_state(dc, cpu_cond);
4684 r_const = tcg_const_i32(TT_NCP_INSN);
4685 tcg_gen_helper_0_1(raise_exception, r_const);
4686 tcg_temp_free(r_const);
4687 dc->is_br = 1;
4688 }
4689 return;
4690 #endif
4691 }
4692
4693 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4694 int spc, CPUSPARCState *env)
4695 {
4696 target_ulong pc_start, last_pc;
4697 uint16_t *gen_opc_end;
4698 DisasContext dc1, *dc = &dc1;
4699 int j, lj = -1;
4700 int num_insns;
4701 int max_insns;
4702
4703 memset(dc, 0, sizeof(DisasContext));
4704 dc->tb = tb;
4705 pc_start = tb->pc;
4706 dc->pc = pc_start;
4707 last_pc = dc->pc;
4708 dc->npc = (target_ulong) tb->cs_base;
4709 dc->mem_idx = cpu_mmu_index(env);
4710 dc->def = env->def;
4711 if ((dc->def->features & CPU_FEATURE_FLOAT))
4712 dc->fpu_enabled = cpu_fpu_enabled(env);
4713 else
4714 dc->fpu_enabled = 0;
4715 #ifdef TARGET_SPARC64
4716 dc->address_mask_32bit = env->pstate & PS_AM;
4717 #endif
4718 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4719
4720 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
4721 cpu_tmp32 = tcg_temp_new(TCG_TYPE_I32);
4722 cpu_tmp64 = tcg_temp_new(TCG_TYPE_I64);
4723
4724 cpu_dst = tcg_temp_local_new(TCG_TYPE_TL);
4725
4726 // loads and stores
4727 cpu_val = tcg_temp_local_new(TCG_TYPE_TL);
4728 cpu_addr = tcg_temp_local_new(TCG_TYPE_TL);
4729
4730 num_insns = 0;
4731 max_insns = tb->cflags & CF_COUNT_MASK;
4732 if (max_insns == 0)
4733 max_insns = CF_COUNT_MASK;
4734 gen_icount_start();
4735 do {
4736 if (env->nb_breakpoints > 0) {
4737 for(j = 0; j < env->nb_breakpoints; j++) {
4738 if (env->breakpoints[j] == dc->pc) {
4739 if (dc->pc != pc_start)
4740 save_state(dc, cpu_cond);
4741 tcg_gen_helper_0_0(helper_debug);
4742 tcg_gen_exit_tb(0);
4743 dc->is_br = 1;
4744 goto exit_gen_loop;
4745 }
4746 }
4747 }
4748 if (spc) {
4749 if (loglevel > 0)
4750 fprintf(logfile, "Search PC...\n");
4751 j = gen_opc_ptr - gen_opc_buf;
4752 if (lj < j) {
4753 lj++;
4754 while (lj < j)
4755 gen_opc_instr_start[lj++] = 0;
4756 gen_opc_pc[lj] = dc->pc;
4757 gen_opc_npc[lj] = dc->npc;
4758 gen_opc_instr_start[lj] = 1;
4759 gen_opc_icount[lj] = num_insns;
4760 }
4761 }
4762 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4763 gen_io_start();
4764 last_pc = dc->pc;
4765 disas_sparc_insn(dc);
4766 num_insns++;
4767
4768 if (dc->is_br)
4769 break;
4770 /* if the next PC is different, we abort now */
4771 if (dc->pc != (last_pc + 4))
4772 break;
4773 /* if we reach a page boundary, we stop generation so that the
4774 PC of a TT_TFAULT exception is always in the right page */
4775 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4776 break;
4777 /* if single step mode, we generate only one instruction and
4778 generate an exception */
4779 if (env->singlestep_enabled) {
4780 tcg_gen_movi_tl(cpu_pc, dc->pc);
4781 tcg_gen_exit_tb(0);
4782 break;
4783 }
4784 } while ((gen_opc_ptr < gen_opc_end) &&
4785 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4786 num_insns < max_insns);
4787
4788 exit_gen_loop:
4789 tcg_temp_free(cpu_addr);
4790 tcg_temp_free(cpu_val);
4791 tcg_temp_free(cpu_dst);
4792 tcg_temp_free(cpu_tmp64);
4793 tcg_temp_free(cpu_tmp32);
4794 tcg_temp_free(cpu_tmp0);
4795 if (tb->cflags & CF_LAST_IO)
4796 gen_io_end();
4797 if (!dc->is_br) {
4798 if (dc->pc != DYNAMIC_PC &&
4799 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4800 /* static PC and NPC: we can use direct chaining */
4801 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4802 } else {
4803 if (dc->pc != DYNAMIC_PC)
4804 tcg_gen_movi_tl(cpu_pc, dc->pc);
4805 save_npc(dc, cpu_cond);
4806 tcg_gen_exit_tb(0);
4807 }
4808 }
4809 gen_icount_end(tb, num_insns);
4810 *gen_opc_ptr = INDEX_op_end;
4811 if (spc) {
4812 j = gen_opc_ptr - gen_opc_buf;
4813 lj++;
4814 while (lj <= j)
4815 gen_opc_instr_start[lj++] = 0;
4816 #if 0
4817 if (loglevel > 0) {
4818 page_dump(logfile);
4819 }
4820 #endif
4821 gen_opc_jump_pc[0] = dc->jump_pc[0];
4822 gen_opc_jump_pc[1] = dc->jump_pc[1];
4823 } else {
4824 tb->size = last_pc + 4 - pc_start;
4825 tb->icount = num_insns;
4826 }
4827 #ifdef DEBUG_DISAS
4828 if (loglevel & CPU_LOG_TB_IN_ASM) {
4829 fprintf(logfile, "--------------\n");
4830 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
4831 target_disas(logfile, pc_start, last_pc + 4 - pc_start, 0);
4832 fprintf(logfile, "\n");
4833 }
4834 #endif
4835 }
4836
4837 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4838 {
4839 gen_intermediate_code_internal(tb, 0, env);
4840 }
4841
4842 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4843 {
4844 gen_intermediate_code_internal(tb, 1, env);
4845 }
4846
4847 void gen_intermediate_code_init(CPUSPARCState *env)
4848 {
4849 unsigned int i;
4850 static int inited;
4851 static const char * const gregnames[8] = {
4852 NULL, // g0 not used
4853 "g1",
4854 "g2",
4855 "g3",
4856 "g4",
4857 "g5",
4858 "g6",
4859 "g7",
4860 };
4861
4862 /* init various static tables */
4863 if (!inited) {
4864 inited = 1;
4865
4866 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
4867 cpu_regwptr = tcg_global_mem_new(TCG_TYPE_PTR, TCG_AREG0,
4868 offsetof(CPUState, regwptr),
4869 "regwptr");
4870 #ifdef TARGET_SPARC64
4871 cpu_xcc = tcg_global_mem_new(TCG_TYPE_I32,
4872 TCG_AREG0, offsetof(CPUState, xcc),
4873 "xcc");
4874 cpu_asi = tcg_global_mem_new(TCG_TYPE_I32,
4875 TCG_AREG0, offsetof(CPUState, asi),
4876 "asi");
4877 cpu_fprs = tcg_global_mem_new(TCG_TYPE_I32,
4878 TCG_AREG0, offsetof(CPUState, fprs),
4879 "fprs");
4880 cpu_gsr = tcg_global_mem_new(TCG_TYPE_TL,
4881 TCG_AREG0, offsetof(CPUState, gsr),
4882 "gsr");
4883 cpu_tick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4884 TCG_AREG0,
4885 offsetof(CPUState, tick_cmpr),
4886 "tick_cmpr");
4887 cpu_stick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4888 TCG_AREG0,
4889 offsetof(CPUState, stick_cmpr),
4890 "stick_cmpr");
4891 cpu_hstick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4892 TCG_AREG0,
4893 offsetof(CPUState, hstick_cmpr),
4894 "hstick_cmpr");
4895 cpu_hintp = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4896 offsetof(CPUState, hintp),
4897 "hintp");
4898 cpu_htba = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4899 offsetof(CPUState, htba),
4900 "htba");
4901 cpu_hver = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4902 offsetof(CPUState, hver),
4903 "hver");
4904 cpu_ssr = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4905 offsetof(CPUState, ssr), "ssr");
4906 cpu_ver = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4907 offsetof(CPUState, version), "ver");
4908 #else
4909 cpu_wim = tcg_global_mem_new(TCG_TYPE_I32,
4910 TCG_AREG0, offsetof(CPUState, wim),
4911 "wim");
4912 #endif
4913 cpu_cond = tcg_global_mem_new(TCG_TYPE_TL,
4914 TCG_AREG0, offsetof(CPUState, cond),
4915 "cond");
4916 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
4917 TCG_AREG0, offsetof(CPUState, cc_src),
4918 "cc_src");
4919 cpu_cc_src2 = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4920 offsetof(CPUState, cc_src2),
4921 "cc_src2");
4922 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
4923 TCG_AREG0, offsetof(CPUState, cc_dst),
4924 "cc_dst");
4925 cpu_psr = tcg_global_mem_new(TCG_TYPE_I32,
4926 TCG_AREG0, offsetof(CPUState, psr),
4927 "psr");
4928 cpu_fsr = tcg_global_mem_new(TCG_TYPE_TL,
4929 TCG_AREG0, offsetof(CPUState, fsr),
4930 "fsr");
4931 cpu_pc = tcg_global_mem_new(TCG_TYPE_TL,
4932 TCG_AREG0, offsetof(CPUState, pc),
4933 "pc");
4934 cpu_npc = tcg_global_mem_new(TCG_TYPE_TL,
4935 TCG_AREG0, offsetof(CPUState, npc),
4936 "npc");
4937 cpu_y = tcg_global_mem_new(TCG_TYPE_TL,
4938 TCG_AREG0, offsetof(CPUState, y), "y");
4939 #ifndef CONFIG_USER_ONLY
4940 cpu_tbr = tcg_global_mem_new(TCG_TYPE_TL,
4941 TCG_AREG0, offsetof(CPUState, tbr),
4942 "tbr");
4943 #endif
4944 for (i = 1; i < 8; i++)
4945 cpu_gregs[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4946 offsetof(CPUState, gregs[i]),
4947 gregnames[i]);
4948 /* register helpers */
4949
4950 #undef DEF_HELPER
4951 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
4952 #include "helper.h"
4953 }
4954 }
4955
4956 void gen_pc_load(CPUState *env, TranslationBlock *tb,
4957 unsigned long searched_pc, int pc_pos, void *puc)
4958 {
4959 target_ulong npc;
4960 env->pc = gen_opc_pc[pc_pos];
4961 npc = gen_opc_npc[pc_pos];
4962 if (npc == 1) {
4963 /* dynamic NPC: already stored */
4964 } else if (npc == 2) {
4965 target_ulong t2 = (target_ulong)(unsigned long)puc;
4966 /* jump PC: use T2 and the jump targets of the translation */
4967 if (t2)
4968 env->npc = gen_opc_jump_pc[0];
4969 else
4970 env->npc = gen_opc_jump_pc[1];
4971 } else {
4972 env->npc = npc;
4973 }
4974 }