/*
* Architectures with cache coherency must _not_ define cmm_mc/cmm_rmc/cmm_wmc.
*
- * For them, cmm_mc/cmm_rmc/cmm_wmc are implemented with a * simple compiler barrier;
- * in addition, we provide defaults for cmm_mb (using GCC builtins) as well as
- * cmm_rmb and cmm_wmb (defaulting to cmm_mb).
+ * For them, cmm_mc/cmm_rmc/cmm_wmc are implemented with a simple
+ * compiler barrier; in addition, we provide defaults for cmm_mb (using
+ * GCC builtins) as well as cmm_rmb and cmm_wmb (defaulting to cmm_mb).
*/
#ifndef cmm_mb
/*
* Architectures without cache coherency need something like the following:
*
- * #define cmm_mc() arch_cache_flush()
+ * #define cmm_mc() arch_cache_flush()
* #define cmm_rmc() arch_cache_flush_read()
* #define cmm_wmc() arch_cache_flush_write()
*
- * Of these, only cmm_mc is mandatory. cmm_rmc and cmm_wmc default to cmm_mc.
- * cmm_mb/cmm_rmb/cmm_wmb use these definitions by default:
+ * Of these, only cmm_mc is mandatory. cmm_rmc and cmm_wmc default to
+ * cmm_mc. cmm_mb/cmm_rmb/cmm_wmb use these definitions by default:
*
- * #define cmm_mb() cmm_mc()
+ * #define cmm_mb() cmm_mc()
* #define cmm_rmb() cmm_rmc()
* #define cmm_wmb() cmm_wmc()
*/
#include <urcu/arch.h>
/*
- * Identify a shared load. A cmm_smp_rmc() or cmm_smp_mc() should come before the load.
+ * Identify a shared load. A cmm_smp_rmc() or cmm_smp_mc() should come
+ * before the load.
*/
#define _CMM_LOAD_SHARED(p) CMM_ACCESS_ONCE(p)
})
/*
- * Identify a shared store. A cmm_smp_wmc() or cmm_smp_mc() should follow the store.
+ * Identify a shared store. A cmm_smp_wmc() or cmm_smp_mc() should
+ * follow the store.
*/
#define _CMM_STORE_SHARED(x, v) ({ CMM_ACCESS_ONCE(x) = (v); })
/*
- * Store v into x, where x is located in shared memory. Performs the required
- * cache flush after writing. Returns v.
+ * Store v into x, where x is located in shared memory. Performs the
+ * required cache flush after writing. Returns v.
*/
#define CMM_STORE_SHARED(x, v) \
({ \
void _uatomic_link_error()
{
#ifdef ILLEGAL_INSTR
- /* generate an illegal instruction. Cannot catch this with linker tricks
- * when optimizations are disabled. */
+ /*
+ * generate an illegal instruction. Cannot catch this with
+ * linker tricks when optimizations are disabled.
+ */
__asm__ __volatile__(ILLEGAL_INSTR);
#else
__builtin_trap ();
}
#endif
}
- /* generate an illegal instruction. Cannot catch this with linker tricks
- * when optimizations are disabled. */
+ /*
+ * generate an illegal instruction. Cannot catch this with
+ * linker tricks when optimizations are disabled.
+ */
__asm__ __volatile__(ILLEGAL_INSTR);
return 0;
}
}
#endif
}
- /* generate an illegal instruction. Cannot catch this with linker tricks
- * when optimizations are disabled. */
+ /*
+ * generate an illegal instruction. Cannot catch this with
+ * linker tricks when optimizations are disabled.
+ */
__asm__ __volatile__(ILLEGAL_INSTR);
return 0;
}
}
#endif
}
- /* generate an illegal instruction. Cannot catch this with linker tricks
- * when optimizations are disabled. */
+ /*
+ * generate an illegal instruction. Cannot catch this with
+ * linker tricks when optimizations are disabled.
+ */
__asm__ __volatile__(ILLEGAL_INSTR);
return 0;
}
}
#endif
}
- /* generate an illegal instruction. Cannot catch this with linker tricks
- * when optimizations are disabled. */
+ /*
+ * generate an illegal instruction. Cannot catch this with
+ * linker tricks when optimizations are disabled.
+ */
__asm__ __volatile__("ud2");
return 0;
}
}
#endif
}
- /* generate an illegal instruction. Cannot catch this with linker tricks
- * when optimizations are disabled. */
+ /*
+ * generate an illegal instruction. Cannot catch this with
+ * linker tricks when optimizations are disabled.
+ */
__asm__ __volatile__("ud2");
return 0;
}
}
#endif
}
- /* generate an illegal instruction. Cannot catch this with linker tricks
- * when optimizations are disabled. */
+ /*
+ * generate an illegal instruction. Cannot catch this with
+ * linker tricks when optimizations are disabled.
+ */
__asm__ __volatile__("ud2");
return 0;
}
}
#endif
}
- /* generate an illegal instruction. Cannot catch this with linker tricks
- * when optimizations are disabled. */
+ /*
+ * generate an illegal instruction. Cannot catch this with
+ * linker tricks when optimizations are disabled.
+ */
__asm__ __volatile__("ud2");
return;
}
}
#endif
}
- /* generate an illegal instruction. Cannot catch this with linker tricks
- * when optimizations are disabled. */
+ /*
+ * generate an illegal instruction. Cannot catch this with
+ * linker tricks when optimizations are disabled.
+ */
__asm__ __volatile__("ud2");
return;
}
}
#endif
}
- /* generate an illegal instruction. Cannot catch this with linker tricks
- * when optimizations are disabled. */
+ /*
+ * generate an illegal instruction. Cannot catch this with
+ * linker tricks when optimizations are disabled.
+ */
__asm__ __volatile__("ud2");
return;
}
}
#endif
}
- /* generate an illegal instruction. Cannot catch this with linker tricks
- * when optimizations are disabled. */
+ /*
+ * generate an illegal instruction. Cannot catch this with
+ * linker tricks when optimizations are disabled.
+ */
__asm__ __volatile__("ud2");
return;
}