*/
 DEFINE_PER_CPU(unsigned long, last_jiffy);
 
+void __delay(unsigned long loops)
+{
+       unsigned long start;
+       int diff;
+
+       if (__USE_RTC()) {
+               start = get_rtcl();
+               do {
+                       /* the RTCL register wraps at 1000000000 */
+                       diff = get_rtcl() - start;
+                       if (diff < 0)
+                               diff += 1000000000;
+               } while (diff < loops);
+       } else {
+               start = get_tbl();
+               while (get_tbl() - start < loops)
+                       HMT_low();
+               HMT_medium();
+       }
+}
+EXPORT_SYMBOL(__delay);
+
+void udelay(unsigned long usecs)
+{
+       __delay(tb_ticks_per_usec * usecs);
+}
+EXPORT_SYMBOL(udelay);
+
 static __inline__ void timer_check_rtc(void)
 {
         /*
 
  * Anton Blanchard.
  */
 
-extern unsigned long tb_ticks_per_usec;
-
-#ifdef CONFIG_PPC64
-/* define these here to prevent circular dependencies */
-/* these instructions control the thread priority on multi-threaded cpus */
-#define __HMT_low()    asm volatile("or 1,1,1")
-#define __HMT_medium() asm volatile("or 2,2,2")
-#else
-#define __HMT_low()
-#define __HMT_medium()
-#endif
-
-#define __barrier()    asm volatile("" ::: "memory")
-
-static inline unsigned long __get_tb(void)
-{
-       unsigned long rval;
-
-       asm volatile("mftb %0" : "=r" (rval));
-       return rval;
-}
-
-static inline void __delay(unsigned long loops)
-{
-       unsigned long start = __get_tb();
-
-       while((__get_tb() - start) < loops)
-               __HMT_low();
-       __HMT_medium();
-       __barrier();
-}
-
-static inline void udelay(unsigned long usecs)
-{
-       unsigned long loops = tb_ticks_per_usec * usecs;
-
-       __delay(loops);
-}
+extern void __delay(unsigned long loops);
+extern void udelay(unsigned long usecs);
 
 #endif /* _ASM_POWERPC_DELAY_H */