1 /* $Id: system.h,v 1.54 1999/09/01 08:06:12 davem Exp $ */ 2 #ifndef __SPARC64_SYSTEM_H 3 #define __SPARC64_SYSTEM_H 5 #include <asm/ptrace.h> 6 #include <asm/processor.h> 7 #include <asm/asm_offsets.h> 8 #include <asm/visasm.h> 12 * Sparc (general) CPU types 20 sun4u
=0x05,/* V8 ploos ploos */ 22 ap1000
=0x07,/* almost a sun4m */ 25 #define sparc_cpu_model sun4u 27 /* This cannot ever be a sun4c nor sun4 :) That's just history. */ 28 #define ARCH_SUN4C_SUN4 0 31 externunsigned long empty_bad_page
; 32 externunsigned long empty_zero_page
; 35 #define setipl(__new_ipl) \ 36 __asm__ __volatile__("wrpr %0, %%pil" : :"r" (__new_ipl) :"memory") 39 __asm__ __volatile__("wrpr 15, %%pil" : : :"memory") 42 __asm__ __volatile__("wrpr 0, %%pil" : : :"memory") 45 ({ unsigned long retval; __asm__ __volatile__("rdpr %%pil, %0" :"=r" (retval)); retval; }) 47 #define swap_pil(__new_pil) \ 48 ({ unsigned long retval; \ 49 __asm__ __volatile__("rdpr %%pil, %0\n\t" \ 57 #define read_pil_and_cli() \ 58 ({ unsigned long retval; \ 59 __asm__ __volatile__("rdpr %%pil, %0\n\t" \ 66 #define __save_flags(flags) ((flags) = getipl()) 67 #define __save_and_cli(flags) ((flags) = read_pil_and_cli()) 68 #define __restore_flags(flags) setipl((flags)) 73 #define save_flags(x) __save_flags(x) 74 #define restore_flags(x) __restore_flags(x) 75 #define save_and_cli(x) __save_and_cli(x) 79 externvoid__global_cli(void); 80 externvoid__global_sti(void); 81 externunsigned long__global_save_flags(void); 82 externvoid__global_restore_flags(unsigned long flags
); 85 #define cli() __global_cli() 86 #define sti() __global_sti() 87 #define save_flags(x) ((x) = __global_save_flags()) 88 #define restore_flags(flags) __global_restore_flags(flags) 89 #define save_and_cli(flags) do { save_flags(flags); cli(); } while(0) 93 #define mb() __asm__ __volatile__ ("stbar" : : :"memory") 95 #define nop() __asm__ __volatile__ ("nop") 97 #define membar(type) __asm__ __volatile__ ("membar " type : : :"memory"); 98 #define rmb() membar("#LoadLoad | #LoadStore") 99 #define wmb() membar("#StoreLoad | #StoreStore") 100 #define set_mb(__var, __value) \ 101 do { __var = __value; membar("#StoreLoad | #StoreStore"); } while(0) 102 #define set_rmb(__var, __value) \ 103 do { __var = __value; membar("#StoreLoad"); } while(0) 104 #define set_wmb(__var, __value) \ 105 do { __var = __value; membar("#StoreStore"); } while(0) 107 #define flushi(addr) __asm__ __volatile__ ("flush %0" : :"r" (addr) :"memory") 109 #define flushw_all() __asm__ __volatile__("flushw") 111 /* Performance counter register access. */ 112 #define read_pcr(__p) __asm__ __volatile__("rd %%pcr, %0" :"=r" (__p)) 113 #define write_pcr(__p) __asm__ __volatile__("wr %0, 0x0, %%pcr" : :"r" (__p)); 114 #define read_pic(__p) __asm__ __volatile__("rd %%pic, %0" :"=r" (__p)) 115 #define reset_pic() __asm__ __volatile__("wr %g0, 0x0, %pic"); 119 externvoidsynchronize_user_stack(void); 121 extern __inline__
voidflushw_user(void) 123 __asm__
__volatile__(" 124 rdpr %%otherwin, %%g1 129 1:": : :"g1","g2","g3"); 132 #define flush_user_windows flushw_user 133 #define flush_register_windows flushw_all 134 #define prepare_to_switch flushw_all 136 /* See what happens when you design the chip correctly? 138 * We tell gcc we clobber all non-fixed-usage registers except 139 * for l0/l1. It will use one for 'next' and the other to hold 140 * the output value of 'last'. 'next' is not referenced again 141 * past the invocation of switch_to in the scheduler, so we need 142 * not preserve it's value. Hairy, but it lets us remove 2 loads 143 * and 2 stores in this critical code path. -DaveM 145 #define switch_to(prev, next, last) \ 146 do { if (current->thread.flags & SPARC_FLAG_PERFCTR) { \ 147 unsigned long __tmp; \ 149 current->thread.pcr_reg = __tmp; \ 151 current->thread.kernel_cntd0 += (unsigned int)(__tmp); \ 152 current->thread.kernel_cntd1 += ((__tmp) >> 32); \ 154 save_and_clear_fpu(); \ 155 /* If you are tempted to conditionalize the following */ \ 156 /* so that ASI is only written if it changes, think again. */ \ 157 __asm__ __volatile__("wr %%g0, %0, %%asi" \ 158 : :"r" (next->thread.current_ds.seg)); \ 159 __asm__ __volatile__( \ 160 "mov %%g6, %%g5\n\t" \ 161 "wrpr %%g0, 0x95, %%pstate\n\t" \ 162 "stx %%i6, [%%sp + 2047 + 0x70]\n\t" \ 163 "stx %%i7, [%%sp + 2047 + 0x78]\n\t" \ 164 "rdpr %%wstate, %%o5\n\t" \ 165 "stx %%o6, [%%g6 + %3]\n\t" \ 166 "stb %%o5, [%%g6 + %2]\n\t" \ 167 "rdpr %%cwp, %%o5\n\t" \ 168 "stb %%o5, [%%g6 + %5]\n\t" \ 170 "ldub [%1 + %5], %%g1\n\t" \ 171 "wrpr %%g1, %%cwp\n\t" \ 172 "ldx [%%g6 + %3], %%o6\n\t" \ 173 "ldub [%%g6 + %2], %%o5\n\t" \ 174 "ldub [%%g6 + %4], %%o7\n\t" \ 175 "mov %%g6, %%l2\n\t" \ 176 "wrpr %%o5, 0x0, %%wstate\n\t" \ 177 "ldx [%%sp + 2047 + 0x70], %%i6\n\t" \ 178 "ldx [%%sp + 2047 + 0x78], %%i7\n\t" \ 179 "wrpr %%g0, 0x94, %%pstate\n\t" \ 180 "mov %%l2, %%g6\n\t" \ 181 "wrpr %%g0, 0x96, %%pstate\n\t" \ 182 "andcc %%o7, %6, %%g0\n\t" \ 183 "bne,pn %%icc, ret_from_syscall\n\t" \ 184 " mov %%g5, %0\n\t" \ 187 "i" ((const unsigned long)(&((struct task_struct *)0)->thread.wstate)),\ 188 "i" ((const unsigned long)(&((struct task_struct *)0)->thread.ksp)), \ 189 "i" ((const unsigned long)(&((struct task_struct *)0)->thread.flags)),\ 190 "i" ((const unsigned long)(&((struct task_struct *)0)->thread.cwp)), \ 191 "i" (SPARC_FLAG_NEWCHILD) \ 192 :"cc","g1","g2","g3","g5","g7", \ 193 "l2","l3","l4","l5","l6","l7", \ 194 "i0","i1","i2","i3","i4","i5", \ 195 "o0","o1","o2","o3","o4","o5","o7"); \ 196 /* If you fuck with this, update ret_from_syscall code too. */ \ 197 if (current->thread.flags & SPARC_FLAG_PERFCTR) { \ 198 write_pcr(current->thread.pcr_reg); \ 203 extern __inline__
unsigned longxchg32(__volatile__
unsigned int*m
,unsigned int val
) 205 __asm__
__volatile__(" 212 membar #StoreLoad | #StoreStore 215 :"g5","g7","cc","memory"); 219 extern __inline__
unsigned longxchg64(__volatile__
unsigned long*m
,unsigned long val
) 221 __asm__
__volatile__(" 228 membar #StoreLoad | #StoreStore 231 :"g5","g7","cc","memory"); 235 #define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr)))) 236 #define tas(ptr) (xchg((ptr),1)) 238 externvoid__xchg_called_with_bad_pointer(void); 240 static __inline__
unsigned long__xchg(unsigned long x
, __volatile__
void* ptr
, 245 returnxchg32(ptr
, x
); 247 returnxchg64(ptr
, x
); 249 __xchg_called_with_bad_pointer(); 253 externvoiddie_if_kernel(char*str
,struct pt_regs
*regs
)__attribute__((noreturn
)); 255 #endif/* !(__ASSEMBLY__) */ 257 #endif/* !(__SPARC64_SYSTEM_H) */