for (; (pos) != NULL; (pos) = (pos)->next)
//#define DEBUG
+//#define DEBUG_COUNTERS
#ifdef __linux__
#include <syscall.h>
fprintf(stderr, "[debug rcuja %lu %s()@%s:%u] " fmt, \
(unsigned long) gettid(), __func__, \
__FILE__, __LINE__, ## args)
+
#else
#define dbg_printf(fmt, args...) \
do { \
} while (0)
#endif
+#ifdef DEBUG_COUNTERS
+static inline
+ja_debug_counters(void)
+{
+ return 1;
+}
+#else
+static inline
+ja_debug_counters(void)
+{
+ return 0;
+}
+#endif
+
#endif /* _URCU_RCUJA_INTERNAL_H */
return NULL;
}
memset(p, 0, len);
- uatomic_inc(&ja->nr_nodes_allocated);
+ if (ja_debug_counters())
+ uatomic_inc(&ja->nr_nodes_allocated);
return p;
}
void free_cds_ja_node(struct cds_ja *ja, struct cds_ja_inode *node)
{
free(node);
- if (node)
+ if (ja_debug_counters() && node)
uatomic_inc(&ja->nr_nodes_freed);
}
dbg_printf("Using fallback for %u children, node type index: %u, mode %s\n",
new_shadow_node->nr_child, old_type_index, mode == JA_RECOMPACT_ADD_NEXT ? "add_next" :
(mode == JA_RECOMPACT_DEL ? "del" : "add_same"));
- uatomic_inc(&ja->node_fallback_count_distribution[new_shadow_node->nr_child]);
+ if (ja_debug_counters())
+ uatomic_inc(&ja->node_fallback_count_distribution[new_shadow_node->nr_child]);
}
/* Return pointer to new recompacted node through old_node_flag_ptr */
} else {
new_type_index++;
dbg_printf("Add fallback to type %d\n", new_type_index);
- uatomic_inc(&ja->nr_fallback);
+ if (ja_debug_counters())
+ uatomic_inc(&ja->nr_fallback);
fallback = 1;
goto retry;
}
unsigned long na, nf, nr_fallback;
int ret = 0;
+ if (!ja_debug_counters())
+ return 0;
+
fallback_ratio = (double) uatomic_read(&ja->nr_fallback);
fallback_ratio /= (double) uatomic_read(&ja->nr_nodes_allocated);
nr_fallback = uatomic_read(&ja->nr_fallback);