* arch_x86.h: trivial definitions for the x86 architecture.
*
* Copyright (c) 2009 Paul E. McKenney, IBM Corporation.
- * Copyright (c) 2009 Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca>
+ * Copyright (c) 2009 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
-*
+ *
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
extern "C" {
#endif
-#define CACHE_LINE_SIZE 128
+#define CAA_CACHE_LINE_SIZE 128
#ifdef CONFIG_RCU_HAVE_FENCE
-#define mb() asm volatile("mfence":::"memory")
-#define rmb() asm volatile("lfence":::"memory")
-#define wmb() asm volatile("sfence"::: "memory")
+#define cmm_mb() asm volatile("mfence":::"memory")
+#define cmm_rmb() asm volatile("lfence":::"memory")
+#define cmm_wmb() asm volatile("sfence"::: "memory")
#else
/*
- * Some non-Intel clones support out of order store. wmb() ceases to be a
+ * Some non-Intel clones support out of order store. cmm_wmb() ceases to be a
* nop for these.
*/
-#define mb() asm volatile("lock; addl $0,0(%%esp)":::"memory")
-#define rmb() asm volatile("lock; addl $0,0(%%esp)":::"memory")
-#define wmb() asm volatile("lock; addl $0,0(%%esp)"::: "memory")
+#define cmm_mb() asm volatile("lock; addl $0,0(%%esp)":::"memory")
+#define cmm_rmb() asm volatile("lock; addl $0,0(%%esp)":::"memory")
+#define cmm_wmb() asm volatile("lock; addl $0,0(%%esp)"::: "memory")
#endif
-#define cpu_relax() asm volatile("rep; nop" : : : "memory");
+#define caa_cpu_relax() asm volatile("rep; nop" : : : "memory");
#define rdtscll(val) \
do { \
typedef unsigned long long cycles_t;
-static inline cycles_t get_cycles(void)
+static inline cycles_t caa_get_cycles(void)
{
cycles_t ret = 0;