patch-2.4.6 linux/include/asm-mips/unaligned.h

Next file: linux/include/asm-mips/unistd.h
Previous file: linux/include/asm-mips/tx3912.h
Back to the patch index
Back to the overall index

diff -u --recursive --new-file v2.4.5/linux/include/asm-mips/unaligned.h linux/include/asm-mips/unaligned.h
@@ -1,10 +1,10 @@
-/* $Id$
- *
+/*
  * This file is subject to the terms and conditions of the GNU General Public
  * License.  See the file "COPYING" in the main directory of this archive
  * for more details.
  *
- * Copyright (C) 1996, 1999 by Ralf Baechle
+ * Copyright (C) 1996, 1999, 2000 by Ralf Baechle
+ * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
  */
 #ifndef _ASM_UNALIGNED_H
 #define _ASM_UNALIGNED_H
@@ -13,133 +13,147 @@
 extern void __put_unaligned_bad_length(void);
 
 /*
- * Load quad unaligned.
+ * Load double unaligned.
+ *
+ * This could have been implemented in plain C like IA64 but egcs 1.0.3a
+ * inflates this to 23 instructions ...
  */
-extern __inline__ unsigned long ldq_u(const unsigned long long * __addr)
+extern inline unsigned long long __ldq_u(const unsigned long long * __addr)
 {
 	unsigned long long __res;
 
-	__asm__("uld\t%0,(%1)"
-		:"=&r" (__res)
-		:"r" (__addr));
+	__asm__("ulw\t%0, %1\n\t"
+		"ulw\t%D0, 4+%1"
+		: "=&r" (__res)
+		: "m" (*__addr));
 
 	return __res;
 }
 
 /*
- * Load long unaligned.
+ * Load word unaligned.
  */
-extern __inline__ unsigned long ldl_u(const unsigned int * __addr)
+extern inline unsigned long __ldl_u(const unsigned int * __addr)
 {
 	unsigned long __res;
 
-	__asm__("ulw\t%0,(%1)"
-		:"=&r" (__res)
-		:"r" (__addr));
+	__asm__("ulw\t%0,%1"
+		: "=&r" (__res)
+		: "m" (*__addr));
 
 	return __res;
 }
 
 /*
- * Load word unaligned.
+ * Load halfword unaligned.
  */
-extern __inline__ unsigned long ldw_u(const unsigned short * __addr)
+extern inline unsigned long __ldw_u(const unsigned short * __addr)
 {
 	unsigned long __res;
 
-	__asm__("ulh\t%0,(%1)"
-		:"=&r" (__res)
-		:"r" (__addr));
+	__asm__("ulh\t%0,%1"
+		: "=&r" (__res)
+		: "m" (*__addr));
 
 	return __res;
 }
 
 /*
- * Store quad ununaligned.
+ * Store doubleword ununaligned.
  */
-extern __inline__ void stq_u(unsigned long __val, unsigned long long * __addr)
+extern inline void __stq_u(unsigned long __val, unsigned long long * __addr)
 {
-	__asm__ __volatile__(
-		"usd\t%0,(%1)"
-		: /* No results */
-		:"r" (__val),
-		 "r" (__addr));
+	__asm__("usw\t%1, %0\n\t"
+		"usw\t%D1, 4+%0"
+		: "=m" (*__addr)
+		: "r" (__val));
 }
 
 /*
  * Store long ununaligned.
  */
-extern __inline__ void stl_u(unsigned long __val, unsigned int * __addr)
+extern inline void __stl_u(unsigned long __val, unsigned int * __addr)
 {
-	__asm__ __volatile__(
-		"usw\t%0,(%1)"
-		: /* No results */
-		:"r" (__val),
-		 "r" (__addr));
+	__asm__("usw\t%1, %0"
+		: "=m" (*__addr)
+		: "r" (__val));
 }
 
 /*
  * Store word ununaligned.
  */
-extern __inline__ void stw_u(unsigned long __val, unsigned short * __addr)
+extern inline void __stw_u(unsigned long __val, unsigned short * __addr)
 {
-	__asm__ __volatile__(
-		"ush\t%0,(%1)"
-		: /* No results */
-		:"r" (__val),
-		 "r" (__addr));
-}
-
-extern inline unsigned long __get_unaligned(const void *ptr, size_t size)
-{
-	unsigned long val;
-	switch (size) {
-	case 1:
-		val = *(const unsigned char *)ptr;
-		break;
-	case 2:
-		val = ldw_u((const unsigned short *)ptr);
-		break;
-	case 4:
-		val = ldl_u((const unsigned int *)ptr);
-		break;
-	case 8:
-		val = ldq_u((const unsigned long long *)ptr);
-		break;
-	default:
-		__get_unaligned_bad_length();
-		break;
-	}
-	return val;
-}
-
-extern inline void __put_unaligned(unsigned long val, void *ptr, size_t size)
-{
-	switch (size) {
-	case 1:
-		*(unsigned char *)ptr = (val);
-		break;
-	case 2:
-		stw_u(val, (unsigned short *)ptr);
-		break;
-	case 4:
-		stl_u(val, (unsigned int *)ptr);
-		break;
-	case 8:
-		stq_u(val, (unsigned long long *)ptr);
-		break;
-	default:
-		__put_unaligned_bad_length();
-		break;
-	}
-}
-
-/* 
- * The main single-value unaligned transfer routines.
- */
-#define get_unaligned(ptr) \
-	((__typeof__(*(ptr)))__get_unaligned((ptr), sizeof(*(ptr))))
-#define put_unaligned(x,ptr) \
-	__put_unaligned((unsigned long)(x), (ptr), sizeof(*(ptr)))
+	__asm__("ush\t%1, %0"
+		: "=m" (*__addr)
+		: "r" (__val));
+}
+
+/*
+ * get_unaligned - get value from possibly mis-aligned location
+ * @ptr: pointer to value
+ *
+ * This macro should be used for accessing values larger in size than 
+ * single bytes at locations that are expected to be improperly aligned, 
+ * e.g. retrieving a u16 value from a location not u16-aligned.
+ *
+ * Note that unaligned accesses can be very expensive on some architectures.
+ */
+#define get_unaligned(ptr)						\
+({									\
+	__typeof__(*(ptr)) __val;					\
+									\
+	switch (sizeof(*(ptr))) {					\
+	case 1:								\
+		__val = *(const unsigned char *)ptr;			\
+		break;							\
+	case 2:								\
+		__val = __ldw_u((const unsigned short *)ptr);		\
+		break;							\
+	case 4:								\
+		__val = __ldl_u((const unsigned int *)ptr);		\
+		break;							\
+	case 8:								\
+		__val = __ldq_u((const unsigned long long *)ptr);	\
+		break;							\
+	default:							\
+		__get_unaligned_bad_length();				\
+		break;							\
+	}								\
+									\
+	__val;								\
+})
+
+/*
+ * put_unaligned - put value to a possibly mis-aligned location
+ * @val: value to place
+ * @ptr: pointer to location
+ *
+ * This macro should be used for placing values larger in size than 
+ * single bytes at locations that are expected to be improperly aligned, 
+ * e.g. writing a u16 value to a location not u16-aligned.
+ *
+ * Note that unaligned accesses can be very expensive on some architectures.
+ */
+#define put_unaligned(val,ptr)						\
+do {									\
+	switch (sizeof(*(ptr))) {					\
+	case 1:								\
+		*(unsigned char *)(ptr) = (val);			\
+		break;							\
+	case 2:								\
+		__stw_u(val, (unsigned short *)(ptr));			\
+		break;							\
+	case 4:								\
+		__stl_u(val, (unsigned int *)(ptr));			\
+		break;							\
+	case 8:								\
+		__stq_u(val, (unsigned long long *)(ptr));		\
+		break;							\
+	default:							\
+		__put_unaligned_bad_length();				\
+		break;							\
+	}								\
+} while(0)
 
 #endif /* _ASM_UNALIGNED_H */

FUNET's LINUX-ADM group, linux-adm@nic.funet.fi
TCL-scripts by Sam Shen (who was at: slshen@lbl.gov)