1//
2// This is a thin wrapper around sync/atomic to help with alignment issues.
3// This is for 64-bit OS and hence is a no-op effectively.
4//
5
6// +build !386
7
8package platform
9
10import "unsafe"
11import orig "sync/atomic"
12
13type AlignedInt64 int64
14type AlignedUint64 uint64
15
16func NewAlignedInt64(v int64) AlignedInt64 {
17	return AlignedInt64(v)
18}
19
20func NewAlignedUint64(v uint64) AlignedUint64 {
21	return AlignedUint64(v)
22}
23
24func SwapInt32(addr *int32, new int32) int32 {
25	return orig.SwapInt32(addr, new)
26}
27
28func SwapInt64(addr *AlignedInt64, new int64) int64 {
29	return orig.SwapInt64((*int64)(addr), new)
30}
31
32func SwapUint32(addr *uint32, new uint32) uint32 {
33	return orig.SwapUint32(addr, new)
34}
35
36func SwapUint64(addr *AlignedUint64, new uint64) uint64 {
37	return orig.SwapUint64((*uint64)(addr), new)
38}
39
40func SwapUintptr(addr *uintptr, new uintptr) uintptr {
41	return orig.SwapUintptr(addr, new)
42}
43
44func SwapPointer(addr *unsafe.Pointer, new unsafe.Pointer) unsafe.Pointer {
45	return orig.SwapPointer(addr, new)
46}
47
48func CompareAndSwapInt32(addr *int32, old, new int32) bool {
49	return orig.CompareAndSwapInt32(addr, old, new)
50}
51
52func CompareAndSwapInt64(addr *AlignedInt64, old, new int64) bool {
53	return orig.CompareAndSwapInt64((*int64)(addr), old, new)
54}
55
56func CompareAndSwapUint32(addr *uint32, old, new uint32) bool {
57	return orig.CompareAndSwapUint32(addr, old, new)
58}
59
60func CompareAndSwapUint64(addr *AlignedUint64, old, new uint64) bool {
61	return orig.CompareAndSwapUint64((*uint64)(addr), old, new)
62}
63
64func CompareAndSwapUintptr(addr *uintptr, old, new uintptr) bool {
65	return orig.CompareAndSwapUintptr(addr, old, new)
66}
67
68func CompareAndSwapPointer(addr *unsafe.Pointer, old, new unsafe.Pointer) bool {
69	return orig.CompareAndSwapPointer(addr, old, new)
70}
71
72func AddInt32(addr *int32, delta int32) int32 {
73	return orig.AddInt32(addr, delta)
74}
75
76func AddUint32(addr *uint32, delta uint32) uint32 {
77	return orig.AddUint32(addr, delta)
78}
79
80func AddInt64(addr *AlignedInt64, delta int64) int64 {
81	return orig.AddInt64((*int64)(addr), delta)
82}
83
84func AddUint64(addr *AlignedUint64, delta uint64) uint64 {
85	return orig.AddUint64((*uint64)(addr), delta)
86}
87
88func AddUintptr(addr *uintptr, delta uintptr) uintptr {
89	return orig.AddUintptr(addr, delta)
90}
91
92func LoadInt32(addr *int32) int32 {
93	return orig.LoadInt32(addr)
94}
95
96func LoadInt64(addr *AlignedInt64) int64 {
97	return orig.LoadInt64((*int64)(addr))
98}
99
100func LoadUint32(addr *uint32) uint32 {
101	return orig.LoadUint32(addr)
102}
103
104func LoadUint64(addr *AlignedUint64) uint64 {
105	return orig.LoadUint64((*uint64)(addr))
106}
107
108func LoadUintptr(addr *uintptr) uintptr {
109	return orig.LoadUintptr(addr)
110}
111
112func LoadPointer(addr *unsafe.Pointer) unsafe.Pointer {
113	return orig.LoadPointer(addr)
114}
115
116func StoreInt32(addr *int32, val int32) {
117	orig.StoreInt32(addr, val)
118}
119
120func StoreInt64(addr *AlignedInt64, val int64) {
121	orig.StoreInt64((*int64)(addr), val)
122}
123
124func StoreUint32(addr *uint32, val uint32) {
125	orig.StoreUint32(addr, val)
126}
127
128func StoreUint64(addr *AlignedUint64, val uint64) {
129	orig.StoreUint64((*uint64)(addr), val)
130}
131
132func StoreUintptr(addr *uintptr, val uintptr) {
133	orig.StoreUintptr(addr, val)
134}
135
136func StorePointer(addr *unsafe.Pointer, val unsafe.Pointer) {
137	orig.StorePointer(addr, val)
138}
139