usermode/library/atomic_ops/ia64.h

00001 /*
00002  * Copyright (c) 2003 Hewlett-Packard Development Company, L.P.
00003  *
00004  * Permission is hereby granted, free of charge, to any person obtaining a copy
00005  * of this software and associated documentation files (the "Software"), to deal
00006  * in the Software without restriction, including without limitation the rights
00007  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
00008  * copies of the Software, and to permit persons to whom the Software is
00009  * furnished to do so, subject to the following conditions:
00010  *
00011  * The above copyright notice and this permission notice shall be included in
00012  * all copies or substantial portions of the Software.
00013  *
00014  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
00015  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
00016  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
00017  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
00018  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
00019  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
00020  * SOFTWARE.
00021  */
00022 
00023 #include "./aligned_atomic_load_store.h"
00024 
00025 #include "./all_acquire_release_volatile.h"
00026 
00027 #include "./test_and_set_t_is_char.h"
00028 
00029 #ifdef _ILP32
00030   /* 32-bit HP/UX code. */
00031   /* This requires pointer "swizzling".  Pointers need to be expanded   */
00032   /* to 64 bits using the addp4 instruction before use.  This makes it  */
00033   /* hard to share code, but we try anyway.                             */
00034 # define AO_LEN "4"
00035   /* We assume that addr always appears in argument position 1 in asm   */
00036   /* code.  If it is clobbered due to swizzling, we also need it in     */
00037   /* second position.  Any later arguments are referenced symbolically, */
00038   /* so that we don't have to worry about their position.  This requires*/
00039   /* gcc 3.1, but you shouldn't be using anything older than that on    */
00040   /* IA64 anyway.                                                       */
00041   /* The AO_MASK macro is a workaround for the fact that HP/UX gcc      */
00042   /* appears to otherwise store 64-bit pointers in ar.ccv, i.e. it      */
00043   /* doesn't appear to clear high bits in a pointer value we pass into  */
00044   /* assembly code, even if it is supposedly of type AO_t.              */
00045 # define AO_IN_ADDR "1"(addr)
00046 # define AO_OUT_ADDR , "=r"(addr)
00047 # define AO_SWIZZLE "addp4 %1=0,%1;;\n"
00048 # define AO_MASK(ptr) __asm__("zxt4 %1=%1": "=r"(ptr) : "0"(ptr));
00049 #else
00050 # define AO_LEN "8"
00051 # define AO_IN_ADDR "r"(addr)
00052 # define AO_OUT_ADDR
00053 # define AO_SWIZZLE
00054 # define AO_MASK(ptr)
00055 #endif
00056 
00057 AO_INLINE void
00058 AO_nop_full(void)
00059 {
00060   __asm__ __volatile__("mf" : : : "memory");
00061 }
00062 #define AO_HAVE_nop_full
00063 
00064 AO_INLINE AO_t
00065 AO_fetch_and_add1_acquire (volatile AO_t *addr)
00066 {
00067   AO_t result;
00068 
00069   __asm__ __volatile__ (AO_SWIZZLE
00070                         "fetchadd" AO_LEN ".acq %0=[%1],1":
00071                         "=r" (result) AO_OUT_ADDR: AO_IN_ADDR :"memory");
00072   return result;
00073 }
00074 #define AO_HAVE_fetch_and_add1_acquire
00075 
00076 AO_INLINE AO_t
00077 AO_fetch_and_add1_release (volatile AO_t *addr)
00078 {
00079   AO_t result;
00080 
00081   __asm__ __volatile__ (AO_SWIZZLE
00082                         "fetchadd" AO_LEN ".rel %0=[%1],1":
00083                         "=r" (result) AO_OUT_ADDR: AO_IN_ADDR :"memory");
00084   return result;
00085 }
00086 
00087 #define AO_HAVE_fetch_and_add1_release
00088 
00089 AO_INLINE AO_t
00090 AO_fetch_and_sub1_acquire (volatile AO_t *addr)
00091 {
00092   AO_t result;
00093 
00094   __asm__ __volatile__ (AO_SWIZZLE
00095                         "fetchadd" AO_LEN ".acq %0=[%1],-1":
00096                         "=r" (result) AO_OUT_ADDR: AO_IN_ADDR :"memory");
00097   return result;
00098 }
00099 
00100 #define AO_HAVE_fetch_and_sub1_acquire
00101 
00102 AO_INLINE AO_t
00103 AO_fetch_and_sub1_release (volatile AO_t *addr)
00104 {
00105   AO_t result;
00106 
00107   __asm__ __volatile__ (AO_SWIZZLE
00108                         "fetchadd" AO_LEN ".rel %0=[%1],-1":
00109                         "=r" (result) AO_OUT_ADDR: AO_IN_ADDR :"memory");
00110   return result;
00111 }
00112 
00113 #define AO_HAVE_fetch_and_sub1_release
00114 
00115 #ifndef _ILP32
00116 
00117 AO_INLINE unsigned int
00118 AO_int_fetch_and_add1_acquire (volatile unsigned int *addr)
00119 {
00120   unsigned int result;
00121 
00122   __asm__ __volatile__ ("fetchadd4.acq %0=[%1],1":
00123                         "=r" (result): AO_IN_ADDR :"memory");
00124   return result;
00125 }
00126 #define AO_HAVE_int_fetch_and_add1_acquire
00127 
00128 AO_INLINE unsigned int
00129 AO_int_fetch_and_add1_release (volatile unsigned int *addr)
00130 {
00131   unsigned int result;
00132 
00133   __asm__ __volatile__ ("fetchadd4.rel %0=[%1],1":
00134                         "=r" (result): AO_IN_ADDR :"memory");
00135   return result;
00136 }
00137 
00138 #define AO_HAVE_int_fetch_and_add1_release
00139 
00140 AO_INLINE unsigned int
00141 AO_int_fetch_and_sub1_acquire (volatile unsigned int *addr)
00142 {
00143   unsigned int result;
00144 
00145   __asm__ __volatile__ ("fetchadd4.acq %0=[%1],-1":
00146                         "=r" (result): AO_IN_ADDR :"memory");
00147   return result;
00148 }
00149 
00150 #define AO_HAVE_int_fetch_and_sub1_acquire
00151 
00152 AO_INLINE unsigned int
00153 AO_int_fetch_and_sub1_release (volatile unsigned int *addr)
00154 {
00155   unsigned int result;
00156 
00157   __asm__ __volatile__ ("fetchadd4.rel %0=[%1],-1":
00158                         "=r" (result): AO_IN_ADDR :"memory");
00159   return result;
00160 }
00161 
00162 #define AO_HAVE_int_fetch_and_sub1_release
00163 
00164 #endif /* !_ILP32 */
00165 
00166 AO_INLINE int
00167 AO_compare_and_swap_acquire(volatile AO_t *addr,
00168                              AO_t old, AO_t new_val)
00169 {
00170   AO_t oldval;
00171   AO_MASK(old);
00172   __asm__ __volatile__(AO_SWIZZLE
00173                        "mov ar.ccv=%[old] ;; cmpxchg" AO_LEN
00174                        ".acq %0=[%1],%[new_val],ar.ccv"
00175                        : "=r"(oldval) AO_OUT_ADDR
00176                        : AO_IN_ADDR, [new_val]"r"(new_val), [old]"r"(old)
00177                        : "memory");
00178   return (oldval == old);
00179 }
00180 
00181 #define AO_HAVE_compare_and_swap_acquire
00182 
00183 AO_INLINE int
00184 AO_compare_and_swap_release(volatile AO_t *addr,
00185                              AO_t old, AO_t new_val)
00186 {
00187   AO_t oldval;
00188   AO_MASK(old);
00189   __asm__ __volatile__(AO_SWIZZLE
00190                        "mov ar.ccv=%[old] ;; cmpxchg" AO_LEN
00191                        ".rel %0=[%1],%[new_val],ar.ccv"
00192                        : "=r"(oldval) AO_OUT_ADDR
00193                        : AO_IN_ADDR, [new_val]"r"(new_val), [old]"r"(old)
00194                        : "memory");
00195   return (oldval == old);
00196 }
00197 
00198 #define AO_HAVE_compare_and_swap_release
00199 
00200 AO_INLINE int
00201 AO_char_compare_and_swap_acquire(volatile unsigned char *addr,
00202                                  unsigned char old, unsigned char new_val)
00203 {
00204   unsigned char oldval;
00205   __asm__ __volatile__(AO_SWIZZLE
00206                "mov ar.ccv=%[old] ;; cmpxchg1.acq %0=[%1],%[new_val],ar.ccv"
00207                : "=r"(oldval) AO_OUT_ADDR
00208                : AO_IN_ADDR, [new_val]"r"(new_val), [old]"r"((AO_t)old)
00209                : "memory");
00210   return (oldval == old);
00211 }
00212 
00213 #define AO_HAVE_char_compare_and_swap_acquire
00214 
00215 AO_INLINE int
00216 AO_char_compare_and_swap_release(volatile unsigned char *addr,
00217                                  unsigned char old, unsigned char new_val)
00218 {
00219   unsigned char oldval;
00220   __asm__ __volatile__(AO_SWIZZLE
00221                 "mov ar.ccv=%[old] ;; cmpxchg1.rel %0=[%1],%[new_val],ar.ccv"
00222                 : "=r"(oldval) AO_OUT_ADDR
00223                 : AO_IN_ADDR, [new_val]"r"(new_val), [old]"r"((AO_t)old)
00224                 : "memory");
00225   return (oldval == old);
00226 }
00227 
00228 #define AO_HAVE_char_compare_and_swap_release
00229 
00230 AO_INLINE int
00231 AO_short_compare_and_swap_acquire(volatile unsigned short *addr,
00232                                   unsigned short old, unsigned short new_val)
00233 {
00234   unsigned short oldval;
00235   __asm__ __volatile__(AO_SWIZZLE
00236                 "mov ar.ccv=%[old] ;; cmpxchg2.acq %0=[%1],%[new_val],ar.ccv"
00237                 : "=r"(oldval) AO_OUT_ADDR
00238                 : AO_IN_ADDR, [new_val]"r"(new_val), [old]"r"((AO_t)old)
00239                 : "memory");
00240   return (oldval == old);
00241 }
00242 
00243 #define AO_HAVE_short_compare_and_swap_acquire
00244 
00245 AO_INLINE int
00246 AO_short_compare_and_swap_release(volatile unsigned short *addr,
00247                                   unsigned short old, unsigned short new_val)
00248 {
00249   unsigned short oldval;
00250   __asm__ __volatile__(AO_SWIZZLE
00251                 "mov ar.ccv=%[old] ;; cmpxchg2.rel %0=[%1],%[new_val],ar.ccv"
00252                 : "=r"(oldval) AO_OUT_ADDR
00253                 : AO_IN_ADDR, [new_val]"r"(new_val), [old]"r"((AO_t)old)
00254                 : "memory");
00255   return (oldval == old);
00256 }
00257 
00258 #define AO_HAVE_short_compare_and_swap_release
00259 
00260 #ifndef _ILP32
00261 
00262 AO_INLINE int
00263 AO_int_compare_and_swap_acquire(volatile unsigned int *addr,
00264                                 unsigned int old, unsigned int new_val)
00265 {
00266   unsigned int oldval;
00267   __asm__ __volatile__("mov ar.ccv=%3 ;; cmpxchg4.acq %0=[%1],%2,ar.ccv"
00268                        : "=r"(oldval)
00269                        : AO_IN_ADDR, "r"(new_val), "r"((AO_t)old) : "memory");
00270   return (oldval == old);
00271 }
00272 
00273 #define AO_HAVE_int_compare_and_swap_acquire
00274 
00275 AO_INLINE int
00276 AO_int_compare_and_swap_release(volatile unsigned int *addr,
00277                                 unsigned int old, unsigned int new_val)
00278 {
00279   unsigned int oldval;
00280   __asm__ __volatile__("mov ar.ccv=%3 ;; cmpxchg4.rel %0=[%1],%2,ar.ccv"
00281                        : "=r"(oldval)
00282                        : AO_IN_ADDR, "r"(new_val), "r"((AO_t)old) : "memory");
00283   return (oldval == old);
00284 }
00285 
00286 #define AO_HAVE_int_compare_and_swap_release
00287 
00288 #endif /* !_ILP32 */
00289 
00290 /* FIXME: Add compare_and_swap_double as soon as there is widely        */
00291 /* available hardware that implements it.                               */
00292 
00293 /* FIXME: Add compare_double_and_swap_double for the _ILP32 case.       */
00294 
00295 #ifdef _ILP32
00296 # include "./ao_t_is_int.h"
00297 #endif

Generated on Sat Apr 23 11:43:34 2011 for Mnemosyne by  doxygen 1.4.7