1// Copyright 2021 The Go Authors. All rights reserved. 2// Use of this source code is governed by a BSD-style 3// license that can be found in the LICENSE file. 4 5package atomic 6 7import "unsafe" 8 9// Int32 is an atomically accessed int32 value. 10// 11// An Int32 must not be copied. 12type Int32 struct { 13 noCopy noCopy 14 value int32 15} 16 17// Load accesses and returns the value atomically. 18// 19//go:nosplit 20func (i *Int32) Load() int32 { 21 return Loadint32(&i.value) 22} 23 24// Store updates the value atomically. 25// 26//go:nosplit 27func (i *Int32) Store(value int32) { 28 Storeint32(&i.value, value) 29} 30 31// CompareAndSwap atomically compares i's value with old, 32// and if they're equal, swaps i's value with new. 33// It reports whether the swap ran. 34// 35//go:nosplit 36func (i *Int32) CompareAndSwap(old, new int32) bool { 37 return Casint32(&i.value, old, new) 38} 39 40// Swap replaces i's value with new, returning 41// i's value before the replacement. 42// 43//go:nosplit 44func (i *Int32) Swap(new int32) int32 { 45 return Xchgint32(&i.value, new) 46} 47 48// Add adds delta to i atomically, returning 49// the new updated value. 50// 51// This operation wraps around in the usual 52// two's-complement way. 53// 54//go:nosplit 55func (i *Int32) Add(delta int32) int32 { 56 return Xaddint32(&i.value, delta) 57} 58 59// Int64 is an atomically accessed int64 value. 60// 61// 8-byte aligned on all platforms, unlike a regular int64. 62// 63// An Int64 must not be copied. 64type Int64 struct { 65 noCopy noCopy 66 _ align64 67 value int64 68} 69 70// Load accesses and returns the value atomically. 71// 72//go:nosplit 73func (i *Int64) Load() int64 { 74 return Loadint64(&i.value) 75} 76 77// Store updates the value atomically. 78// 79//go:nosplit 80func (i *Int64) Store(value int64) { 81 Storeint64(&i.value, value) 82} 83 84// CompareAndSwap atomically compares i's value with old, 85// and if they're equal, swaps i's value with new. 86// It reports whether the swap ran. 87// 88//go:nosplit 89func (i *Int64) CompareAndSwap(old, new int64) bool { 90 return Casint64(&i.value, old, new) 91} 92 93// Swap replaces i's value with new, returning 94// i's value before the replacement. 95// 96//go:nosplit 97func (i *Int64) Swap(new int64) int64 { 98 return Xchgint64(&i.value, new) 99} 100 101// Add adds delta to i atomically, returning 102// the new updated value. 103// 104// This operation wraps around in the usual 105// two's-complement way. 106// 107//go:nosplit 108func (i *Int64) Add(delta int64) int64 { 109 return Xaddint64(&i.value, delta) 110} 111 112// Uint8 is an atomically accessed uint8 value. 113// 114// A Uint8 must not be copied. 115type Uint8 struct { 116 noCopy noCopy 117 value uint8 118} 119 120// Load accesses and returns the value atomically. 121// 122//go:nosplit 123func (u *Uint8) Load() uint8 { 124 return Load8(&u.value) 125} 126 127// Store updates the value atomically. 128// 129//go:nosplit 130func (u *Uint8) Store(value uint8) { 131 Store8(&u.value, value) 132} 133 134// And takes value and performs a bit-wise 135// "and" operation with the value of u, storing 136// the result into u. 137// 138// The full process is performed atomically. 139// 140//go:nosplit 141func (u *Uint8) And(value uint8) { 142 And8(&u.value, value) 143} 144 145// Or takes value and performs a bit-wise 146// "or" operation with the value of u, storing 147// the result into u. 148// 149// The full process is performed atomically. 150// 151//go:nosplit 152func (u *Uint8) Or(value uint8) { 153 Or8(&u.value, value) 154} 155 156// Bool is an atomically accessed bool value. 157// 158// A Bool must not be copied. 159type Bool struct { 160 // Inherits noCopy from Uint8. 161 u Uint8 162} 163 164// Load accesses and returns the value atomically. 165// 166//go:nosplit 167func (b *Bool) Load() bool { 168 return b.u.Load() != 0 169} 170 171// Store updates the value atomically. 172// 173//go:nosplit 174func (b *Bool) Store(value bool) { 175 s := uint8(0) 176 if value { 177 s = 1 178 } 179 b.u.Store(s) 180} 181 182// Uint32 is an atomically accessed uint32 value. 183// 184// A Uint32 must not be copied. 185type Uint32 struct { 186 noCopy noCopy 187 value uint32 188} 189 190// Load accesses and returns the value atomically. 191// 192//go:nosplit 193func (u *Uint32) Load() uint32 { 194 return Load(&u.value) 195} 196 197// LoadAcquire is a partially unsynchronized version 198// of Load that relaxes ordering constraints. Other threads 199// may observe operations that precede this operation to 200// occur after it, but no operation that occurs after it 201// on this thread can be observed to occur before it. 202// 203// WARNING: Use sparingly and with great care. 204// 205//go:nosplit 206func (u *Uint32) LoadAcquire() uint32 { 207 return LoadAcq(&u.value) 208} 209 210// Store updates the value atomically. 211// 212//go:nosplit 213func (u *Uint32) Store(value uint32) { 214 Store(&u.value, value) 215} 216 217// StoreRelease is a partially unsynchronized version 218// of Store that relaxes ordering constraints. Other threads 219// may observe operations that occur after this operation to 220// precede it, but no operation that precedes it 221// on this thread can be observed to occur after it. 222// 223// WARNING: Use sparingly and with great care. 224// 225//go:nosplit 226func (u *Uint32) StoreRelease(value uint32) { 227 StoreRel(&u.value, value) 228} 229 230// CompareAndSwap atomically compares u's value with old, 231// and if they're equal, swaps u's value with new. 232// It reports whether the swap ran. 233// 234//go:nosplit 235func (u *Uint32) CompareAndSwap(old, new uint32) bool { 236 return Cas(&u.value, old, new) 237} 238 239// CompareAndSwapRelease is a partially unsynchronized version 240// of Cas that relaxes ordering constraints. Other threads 241// may observe operations that occur after this operation to 242// precede it, but no operation that precedes it 243// on this thread can be observed to occur after it. 244// It reports whether the swap ran. 245// 246// WARNING: Use sparingly and with great care. 247// 248//go:nosplit 249func (u *Uint32) CompareAndSwapRelease(old, new uint32) bool { 250 return CasRel(&u.value, old, new) 251} 252 253// Swap replaces u's value with new, returning 254// u's value before the replacement. 255// 256//go:nosplit 257func (u *Uint32) Swap(value uint32) uint32 { 258 return Xchg(&u.value, value) 259} 260 261// And takes value and performs a bit-wise 262// "and" operation with the value of u, storing 263// the result into u. 264// 265// The full process is performed atomically. 266// 267//go:nosplit 268func (u *Uint32) And(value uint32) { 269 And(&u.value, value) 270} 271 272// Or takes value and performs a bit-wise 273// "or" operation with the value of u, storing 274// the result into u. 275// 276// The full process is performed atomically. 277// 278//go:nosplit 279func (u *Uint32) Or(value uint32) { 280 Or(&u.value, value) 281} 282 283// Add adds delta to u atomically, returning 284// the new updated value. 285// 286// This operation wraps around in the usual 287// two's-complement way. 288// 289//go:nosplit 290func (u *Uint32) Add(delta int32) uint32 { 291 return Xadd(&u.value, delta) 292} 293 294// Uint64 is an atomically accessed uint64 value. 295// 296// 8-byte aligned on all platforms, unlike a regular uint64. 297// 298// A Uint64 must not be copied. 299type Uint64 struct { 300 noCopy noCopy 301 _ align64 302 value uint64 303} 304 305// Load accesses and returns the value atomically. 306// 307//go:nosplit 308func (u *Uint64) Load() uint64 { 309 return Load64(&u.value) 310} 311 312// Store updates the value atomically. 313// 314//go:nosplit 315func (u *Uint64) Store(value uint64) { 316 Store64(&u.value, value) 317} 318 319// CompareAndSwap atomically compares u's value with old, 320// and if they're equal, swaps u's value with new. 321// It reports whether the swap ran. 322// 323//go:nosplit 324func (u *Uint64) CompareAndSwap(old, new uint64) bool { 325 return Cas64(&u.value, old, new) 326} 327 328// Swap replaces u's value with new, returning 329// u's value before the replacement. 330// 331//go:nosplit 332func (u *Uint64) Swap(value uint64) uint64 { 333 return Xchg64(&u.value, value) 334} 335 336// Add adds delta to u atomically, returning 337// the new updated value. 338// 339// This operation wraps around in the usual 340// two's-complement way. 341// 342//go:nosplit 343func (u *Uint64) Add(delta int64) uint64 { 344 return Xadd64(&u.value, delta) 345} 346 347// Uintptr is an atomically accessed uintptr value. 348// 349// A Uintptr must not be copied. 350type Uintptr struct { 351 noCopy noCopy 352 value uintptr 353} 354 355// Load accesses and returns the value atomically. 356// 357//go:nosplit 358func (u *Uintptr) Load() uintptr { 359 return Loaduintptr(&u.value) 360} 361 362// LoadAcquire is a partially unsynchronized version 363// of Load that relaxes ordering constraints. Other threads 364// may observe operations that precede this operation to 365// occur after it, but no operation that occurs after it 366// on this thread can be observed to occur before it. 367// 368// WARNING: Use sparingly and with great care. 369// 370//go:nosplit 371func (u *Uintptr) LoadAcquire() uintptr { 372 return LoadAcquintptr(&u.value) 373} 374 375// Store updates the value atomically. 376// 377//go:nosplit 378func (u *Uintptr) Store(value uintptr) { 379 Storeuintptr(&u.value, value) 380} 381 382// StoreRelease is a partially unsynchronized version 383// of Store that relaxes ordering constraints. Other threads 384// may observe operations that occur after this operation to 385// precede it, but no operation that precedes it 386// on this thread can be observed to occur after it. 387// 388// WARNING: Use sparingly and with great care. 389// 390//go:nosplit 391func (u *Uintptr) StoreRelease(value uintptr) { 392 StoreReluintptr(&u.value, value) 393} 394 395// CompareAndSwap atomically compares u's value with old, 396// and if they're equal, swaps u's value with new. 397// It reports whether the swap ran. 398// 399//go:nosplit 400func (u *Uintptr) CompareAndSwap(old, new uintptr) bool { 401 return Casuintptr(&u.value, old, new) 402} 403 404// Swap replaces u's value with new, returning 405// u's value before the replacement. 406// 407//go:nosplit 408func (u *Uintptr) Swap(value uintptr) uintptr { 409 return Xchguintptr(&u.value, value) 410} 411 412// Add adds delta to u atomically, returning 413// the new updated value. 414// 415// This operation wraps around in the usual 416// two's-complement way. 417// 418//go:nosplit 419func (u *Uintptr) Add(delta uintptr) uintptr { 420 return Xadduintptr(&u.value, delta) 421} 422 423// Float64 is an atomically accessed float64 value. 424// 425// 8-byte aligned on all platforms, unlike a regular float64. 426// 427// A Float64 must not be copied. 428type Float64 struct { 429 // Inherits noCopy and align64 from Uint64. 430 u Uint64 431} 432 433// Load accesses and returns the value atomically. 434// 435//go:nosplit 436func (f *Float64) Load() float64 { 437 r := f.u.Load() 438 return *(*float64)(unsafe.Pointer(&r)) 439} 440 441// Store updates the value atomically. 442// 443//go:nosplit 444func (f *Float64) Store(value float64) { 445 f.u.Store(*(*uint64)(unsafe.Pointer(&value))) 446} 447 448// UnsafePointer is an atomically accessed unsafe.Pointer value. 449// 450// Note that because of the atomicity guarantees, stores to values 451// of this type never trigger a write barrier, and the relevant 452// methods are suffixed with "NoWB" to indicate that explicitly. 453// As a result, this type should be used carefully, and sparingly, 454// mostly with values that do not live in the Go heap anyway. 455// 456// An UnsafePointer must not be copied. 457type UnsafePointer struct { 458 noCopy noCopy 459 value unsafe.Pointer 460} 461 462// Load accesses and returns the value atomically. 463// 464//go:nosplit 465func (u *UnsafePointer) Load() unsafe.Pointer { 466 return Loadp(unsafe.Pointer(&u.value)) 467} 468 469// StoreNoWB updates the value atomically. 470// 471// WARNING: As the name implies this operation does *not* 472// perform a write barrier on value, and so this operation may 473// hide pointers from the GC. Use with care and sparingly. 474// It is safe to use with values not found in the Go heap. 475// Prefer Store instead. 476// 477//go:nosplit 478func (u *UnsafePointer) StoreNoWB(value unsafe.Pointer) { 479 StorepNoWB(unsafe.Pointer(&u.value), value) 480} 481 482// Store updates the value atomically. 483func (u *UnsafePointer) Store(value unsafe.Pointer) { 484 storePointer(&u.value, value) 485} 486 487// provided by runtime 488// 489//go:linkname storePointer 490func storePointer(ptr *unsafe.Pointer, new unsafe.Pointer) 491 492// CompareAndSwapNoWB atomically (with respect to other methods) 493// compares u's value with old, and if they're equal, 494// swaps u's value with new. 495// It reports whether the swap ran. 496// 497// WARNING: As the name implies this operation does *not* 498// perform a write barrier on value, and so this operation may 499// hide pointers from the GC. Use with care and sparingly. 500// It is safe to use with values not found in the Go heap. 501// Prefer CompareAndSwap instead. 502// 503//go:nosplit 504func (u *UnsafePointer) CompareAndSwapNoWB(old, new unsafe.Pointer) bool { 505 return Casp1(&u.value, old, new) 506} 507 508// CompareAndSwap atomically compares u's value with old, 509// and if they're equal, swaps u's value with new. 510// It reports whether the swap ran. 511func (u *UnsafePointer) CompareAndSwap(old, new unsafe.Pointer) bool { 512 return casPointer(&u.value, old, new) 513} 514 515func casPointer(ptr *unsafe.Pointer, old, new unsafe.Pointer) bool 516 517// Pointer is an atomic pointer of type *T. 518type Pointer[T any] struct { 519 u UnsafePointer 520} 521 522// Load accesses and returns the value atomically. 523// 524//go:nosplit 525func (p *Pointer[T]) Load() *T { 526 return (*T)(p.u.Load()) 527} 528 529// StoreNoWB updates the value atomically. 530// 531// WARNING: As the name implies this operation does *not* 532// perform a write barrier on value, and so this operation may 533// hide pointers from the GC. Use with care and sparingly. 534// It is safe to use with values not found in the Go heap. 535// Prefer Store instead. 536// 537//go:nosplit 538func (p *Pointer[T]) StoreNoWB(value *T) { 539 p.u.StoreNoWB(unsafe.Pointer(value)) 540} 541 542// Store updates the value atomically. 543// 544//go:nosplit 545func (p *Pointer[T]) Store(value *T) { 546 p.u.Store(unsafe.Pointer(value)) 547} 548 549// CompareAndSwapNoWB atomically (with respect to other methods) 550// compares u's value with old, and if they're equal, 551// swaps u's value with new. 552// It reports whether the swap ran. 553// 554// WARNING: As the name implies this operation does *not* 555// perform a write barrier on value, and so this operation may 556// hide pointers from the GC. Use with care and sparingly. 557// It is safe to use with values not found in the Go heap. 558// Prefer CompareAndSwap instead. 559// 560//go:nosplit 561func (p *Pointer[T]) CompareAndSwapNoWB(old, new *T) bool { 562 return p.u.CompareAndSwapNoWB(unsafe.Pointer(old), unsafe.Pointer(new)) 563} 564 565// CompareAndSwap atomically (with respect to other methods) 566// compares u's value with old, and if they're equal, 567// swaps u's value with new. 568// It reports whether the swap ran. 569func (p *Pointer[T]) CompareAndSwap(old, new *T) bool { 570 return p.u.CompareAndSwap(unsafe.Pointer(old), unsafe.Pointer(new)) 571} 572 573// noCopy may be embedded into structs which must not be copied 574// after the first use. 575// 576// See https://golang.org/issues/8005#issuecomment-190753527 577// for details. 578type noCopy struct{} 579 580// Lock is a no-op used by -copylocks checker from `go vet`. 581func (*noCopy) Lock() {} 582func (*noCopy) Unlock() {} 583 584// align64 may be added to structs that must be 64-bit aligned. 585// This struct is recognized by a special case in the compiler 586// and will not work if copied to any other package. 587type align64 struct{} 588