1//
2// This is a thin wrapper around sync/atomic to help with alignment issues.
3//
4
5// +build 386
6
7package platform
8
9import "unsafe"
10import orig "sync/atomic"
11
12type AlignedInt64 struct{ data int64 }
13type AlignedUint64 struct{ data uint64 }
14
15func NewAlignedInt64(v int64) AlignedInt64 {
16	var nw AlignedInt64
17	nw.data = v
18	return nw
19}
20
21func NewAlignedUint64(v uint64) AlignedUint64 {
22	var nw AlignedUint64
23	nw.data = v
24	return nw
25}
26
27func SwapInt32(addr *int32, new int32) int32 {
28	return orig.SwapInt32(addr, new)
29}
30
31func SwapInt64(addr *AlignedInt64, new int64) int64 {
32	return orig.SwapInt64(&addr.data, new)
33}
34
35func SwapUint32(addr *uint32, new uint32) uint32 {
36	return orig.SwapUint32(addr, new)
37}
38
39func SwapUint64(addr *AlignedUint64, new uint64) uint64 {
40	return orig.SwapUint64(&(addr.data), new)
41}
42
43func SwapUintptr(addr *uintptr, new uintptr) uintptr {
44	return orig.SwapUintptr(addr, new)
45}
46
47func SwapPointer(addr *unsafe.Pointer, new unsafe.Pointer) unsafe.Pointer {
48	return orig.SwapPointer(addr, new)
49}
50
51func CompareAndSwapInt32(addr *int32, old, new int32) bool {
52	return orig.CompareAndSwapInt32(addr, old, new)
53}
54
55func CompareAndSwapInt64(addr *AlignedInt64, old, new int64) bool {
56	return orig.CompareAndSwapInt64(&addr.data, old, new)
57}
58
59func CompareAndSwapUint32(addr *uint32, old, new uint32) bool {
60	return orig.CompareAndSwapUint32(addr, old, new)
61}
62
63func CompareAndSwapUint64(addr *AlignedUint64, old, new uint64) bool {
64	return orig.CompareAndSwapUint64(&addr.data, old, new)
65}
66
67func CompareAndSwapUintptr(addr *uintptr, old, new uintptr) bool {
68	return orig.CompareAndSwapUintptr(addr, old, new)
69}
70
71func CompareAndSwapPointer(addr *unsafe.Pointer, old, new unsafe.Pointer) bool {
72	return orig.CompareAndSwapPointer(addr, old, new)
73}
74
75func AddInt32(addr *int32, delta int32) int32 {
76	return orig.AddInt32(addr, delta)
77}
78
79func AddUint32(addr *uint32, delta uint32) uint32 {
80	return orig.AddUint32(addr, delta)
81}
82
83func AddInt64(addr *AlignedInt64, delta int64) int64 {
84	return orig.AddInt64(&addr.data, delta)
85}
86
87func AddUint64(addr *AlignedUint64, delta uint64) uint64 {
88	return orig.AddUint64(&addr.data, delta)
89}
90
91func AddUintptr(addr *uintptr, delta uintptr) uintptr {
92	return orig.AddUintptr(addr, delta)
93}
94
95func LoadInt32(addr *int32) int32 {
96	return orig.LoadInt32(addr)
97}
98
99func LoadInt64(addr *AlignedInt64) int64 {
100	return orig.LoadInt64(&addr.data)
101}
102
103func LoadUint32(addr *uint32) uint32 {
104	return orig.LoadUint32(addr)
105}
106
107func LoadUint64(addr *AlignedUint64) uint64 {
108	return orig.LoadUint64(&addr.data)
109}
110
111func LoadUintptr(addr *uintptr) uintptr {
112	return orig.LoadUintptr(addr)
113}
114
115func LoadPointer(addr *unsafe.Pointer) unsafe.Pointer {
116	return orig.LoadPointer(addr)
117}
118
119func StoreInt32(addr *int32, val int32) {
120	orig.StoreInt32(addr, val)
121}
122
123func StoreInt64(addr *AlignedInt64, val int64) {
124	orig.StoreInt64(&addr.data, val)
125}
126
127func StoreUint32(addr *uint32, val uint32) {
128	orig.StoreUint32(addr, val)
129}
130
131func StoreUint64(addr *AlignedUint64, val uint64) {
132	orig.StoreUint64(&addr.data, val)
133}
134
135func StoreUintptr(addr *uintptr, val uintptr) {
136	orig.StoreUintptr(addr, val)
137}
138
139func StorePointer(addr *unsafe.Pointer, val unsafe.Pointer) {
140	orig.StorePointer(addr, val)
141}
142