+static inline guint64 ltt_time_to_uint64(LttTime t1)
+{
+ return (((guint64)t1.tv_sec*LTT_TIME_UINT_SHIFT_CONST) << LTT_TIME_UINT_SHIFT)
+ + (guint64)t1.tv_nsec;
+}
+
+
+#define MAX_TV_SEC_TO_UINT64 0x3FFFFFFFFFFFFFFFULL
+
+/* The likely branch is with sec != 0, because most events in a bloc
+ * will be over 1s from the block start. (see tracefile.c)
+ */
+static inline LttTime ltt_time_from_uint64(guint64 t1)
+{
+ /* We lose precision if tv_sec is > than (2^62)-1
+ * */
+#ifdef EXTRA_CHECK
+ g_assert(t1 <= MAX_TV_SEC_TO_UINT64);
+ if(t1 > MAX_TV_SEC_TO_UINT64)
+ g_warning("Conversion from uint64 to non precise LttTime");
+#endif //EXTRA_CHECK
+ LttTime res;
+ //if(unlikely(t1 >= NANOSECONDS_PER_SECOND)) {
+ if(likely(t1>>LTT_TIME_UINT_SHIFT >= LTT_TIME_UINT_SHIFT_CONST)) {
+ //res.tv_sec = t1/NANOSECONDS_PER_SECOND;
+ res.tv_sec = (t1>>LTT_TIME_UINT_SHIFT)
+ /LTT_TIME_UINT_SHIFT_CONST; // acceleration
+ res.tv_nsec = (t1 - res.tv_sec*NANOSECONDS_PER_SECOND);
+ } else {
+ res.tv_sec = 0;
+ res.tv_nsec = (guint32)t1;
+ }
+ return res;
+}
+