aboutsummaryrefslogtreecommitdiffhomepage
path: root/src
diff options
context:
space:
mode:
authorAyke van Laethem <[email protected]>2024-08-23 16:08:43 +0200
committerRon Evans <[email protected]>2024-09-05 10:06:30 +0200
commit4f1b69827d34d350f0148869c49bf118cd8613ee (patch)
treecb9bff8f0657c57786cba362ac0171aa3fa64983 /src
parent73f519b589bf45d8b9a121063bd264357f632c06 (diff)
downloadtinygo-4f1b69827d34d350f0148869c49bf118cd8613ee.tar.gz
tinygo-4f1b69827d34d350f0148869c49bf118cd8613ee.zip
reflect: support big-endian systems
The reflect package needs to know the endianness of the system in a few places. Before this patch, it assumed little-endian systems. But with GOARCH=mips we now have a big-endian system which also needs to be supported. So this patch fixes the reflect package to work on big-endian systems. Also, I've updated the tests for MIPS: instead of running the little-endian tests, I've changed it to run the big-endian tests instead. The two are very similar except for endianness so this should be fine. To be sure we won't accidentally break little-endian support, I've kept a single MIPS little-endian test (the CGo test, which doesn't yet work on big-endian systems anyway).
Diffstat (limited to 'src')
-rw-r--r--src/reflect/endian-big.go36
-rw-r--r--src/reflect/endian-little.go35
-rw-r--r--src/reflect/value.go31
3 files changed, 75 insertions, 27 deletions
diff --git a/src/reflect/endian-big.go b/src/reflect/endian-big.go
new file mode 100644
index 000000000..94951e200
--- /dev/null
+++ b/src/reflect/endian-big.go
@@ -0,0 +1,36 @@
+//go:build mips
+
+package reflect
+
+import "unsafe"
+
+// loadValue loads a value that may or may not be word-aligned. The number of
+// bytes given in size are loaded. The biggest possible size it can load is that
+// of an uintptr.
+func loadValue(ptr unsafe.Pointer, size uintptr) uintptr {
+ loadedValue := uintptr(0)
+ for i := uintptr(0); i < size; i++ {
+ loadedValue <<= 8
+ loadedValue |= uintptr(*(*byte)(ptr))
+ ptr = unsafe.Add(ptr, 1)
+ }
+ return loadedValue
+}
+
+// storeValue is the inverse of loadValue. It stores a value to a pointer that
+// doesn't need to be aligned.
+func storeValue(ptr unsafe.Pointer, size, value uintptr) {
+ // This could perhaps be optimized using bits.ReverseBytes32 if needed.
+ value <<= (unsafe.Sizeof(uintptr(0)) - size) * 8
+ for i := uintptr(0); i < size; i++ {
+ *(*byte)(ptr) = byte(value >> ((unsafe.Sizeof(uintptr(0)) - 1) * 8))
+ ptr = unsafe.Add(ptr, 1)
+ value <<= 8
+ }
+}
+
+// maskAndShift cuts out a part of a uintptr. Note that the offset may not be 0.
+func maskAndShift(value, offset, size uintptr) uintptr {
+ mask := ^uintptr(0) >> ((unsafe.Sizeof(uintptr(0)) - size) * 8)
+ return (uintptr(value) >> ((unsafe.Sizeof(uintptr(0)) - offset - size) * 8)) & mask
+}
diff --git a/src/reflect/endian-little.go b/src/reflect/endian-little.go
new file mode 100644
index 000000000..7d7e30059
--- /dev/null
+++ b/src/reflect/endian-little.go
@@ -0,0 +1,35 @@
+//go:build !mips
+
+package reflect
+
+import "unsafe"
+
+// loadValue loads a value that may or may not be word-aligned. The number of
+// bytes given in size are loaded. The biggest possible size it can load is that
+// of an uintptr.
+func loadValue(ptr unsafe.Pointer, size uintptr) uintptr {
+ loadedValue := uintptr(0)
+ shift := uintptr(0)
+ for i := uintptr(0); i < size; i++ {
+ loadedValue |= uintptr(*(*byte)(ptr)) << shift
+ shift += 8
+ ptr = unsafe.Add(ptr, 1)
+ }
+ return loadedValue
+}
+
+// storeValue is the inverse of loadValue. It stores a value to a pointer that
+// doesn't need to be aligned.
+func storeValue(ptr unsafe.Pointer, size, value uintptr) {
+ for i := uintptr(0); i < size; i++ {
+ *(*byte)(ptr) = byte(value)
+ ptr = unsafe.Add(ptr, 1)
+ value >>= 8
+ }
+}
+
+// maskAndShift cuts out a part of a uintptr. Note that the offset may not be 0.
+func maskAndShift(value, offset, size uintptr) uintptr {
+ mask := ^uintptr(0) >> ((unsafe.Sizeof(uintptr(0)) - size) * 8)
+ return (uintptr(value) >> (offset * 8)) & mask
+}
diff --git a/src/reflect/value.go b/src/reflect/value.go
index 9e602f69d..15a900f9e 100644
--- a/src/reflect/value.go
+++ b/src/reflect/value.go
@@ -887,26 +887,6 @@ func (v Value) Index(i int) Value {
}
}
-// loadValue loads a value that may or may not be word-aligned. The number of
-// bytes given in size are loaded. The biggest possible size it can load is that
-// of an uintptr.
-func loadValue(ptr unsafe.Pointer, size uintptr) uintptr {
- loadedValue := uintptr(0)
- shift := uintptr(0)
- for i := uintptr(0); i < size; i++ {
- loadedValue |= uintptr(*(*byte)(ptr)) << shift
- shift += 8
- ptr = unsafe.Add(ptr, 1)
- }
- return loadedValue
-}
-
-// maskAndShift cuts out a part of a uintptr. Note that the offset may not be 0.
-func maskAndShift(value, offset, size uintptr) uintptr {
- mask := ^uintptr(0) >> ((unsafe.Sizeof(uintptr(0)) - size) * 8)
- return (uintptr(value) >> (offset * 8)) & mask
-}
-
func (v Value) NumMethod() int {
return v.typecode.NumMethod()
}
@@ -1088,9 +1068,7 @@ func (v Value) Set(x Value) {
if v.typecode.Kind() == Interface && x.typecode.Kind() != Interface {
// move the value of x back into the interface, if possible
if x.isIndirect() && x.typecode.Size() <= unsafe.Sizeof(uintptr(0)) {
- var value uintptr
- memcpy(unsafe.Pointer(&value), x.value, x.typecode.Size())
- x.value = unsafe.Pointer(value)
+ x.value = unsafe.Pointer(loadValue(x.value, x.typecode.Size()))
}
intf := composeInterface(unsafe.Pointer(x.typecode), x.value)
@@ -1101,12 +1079,11 @@ func (v Value) Set(x Value) {
}
size := v.typecode.Size()
- xptr := x.value
if size <= unsafe.Sizeof(uintptr(0)) && !x.isIndirect() {
- value := x.value
- xptr = unsafe.Pointer(&value)
+ storeValue(v.value, size, uintptr(x.value))
+ } else {
+ memcpy(v.value, x.value, size)
}
- memcpy(v.value, xptr, size)
}
func (v Value) SetZero() {