mirror of
https://github.com/superseriousbusiness/gotosocial.git
synced 2024-11-25 05:06:38 +00:00
[chore] update go-sched pkg (#3357)
* update go-sched to v1.2.4 which removes some now unused dependencies * whoops, remove test output
This commit is contained in:
parent
82b9515a9d
commit
f3e2d36d64
6
go.mod
6
go.mod
|
@ -19,8 +19,8 @@ require (
|
|||
codeberg.org/gruf/go-mempool v0.0.0-20240507125005-cef10d64a760
|
||||
codeberg.org/gruf/go-mimetypes v1.2.0
|
||||
codeberg.org/gruf/go-mutexes v1.5.1
|
||||
codeberg.org/gruf/go-runners v1.6.2
|
||||
codeberg.org/gruf/go-sched v1.2.3
|
||||
codeberg.org/gruf/go-runners v1.6.3
|
||||
codeberg.org/gruf/go-sched v1.2.4
|
||||
codeberg.org/gruf/go-storage v0.2.0
|
||||
codeberg.org/gruf/go-structr v0.8.9
|
||||
codeberg.org/superseriousbusiness/exif-terminator v0.9.0
|
||||
|
@ -85,8 +85,6 @@ require (
|
|||
)
|
||||
|
||||
require (
|
||||
codeberg.org/gruf/go-atomics v1.1.0 // indirect
|
||||
codeberg.org/gruf/go-bitutil v1.1.0 // indirect
|
||||
codeberg.org/gruf/go-fastpath/v2 v2.0.0 // indirect
|
||||
codeberg.org/gruf/go-mangler v1.4.1 // indirect
|
||||
codeberg.org/gruf/go-maps v1.0.3 // indirect
|
||||
|
|
14
go.sum
14
go.sum
|
@ -30,11 +30,6 @@ cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0Zeo
|
|||
cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk=
|
||||
cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
|
||||
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
|
||||
codeberg.org/gruf/go-atomics v1.1.0 h1:ni9QXYoRUFYQMXE3akWaUb1wMcPBDc05Md6Rgml7W58=
|
||||
codeberg.org/gruf/go-atomics v1.1.0/go.mod h1:a/4/y/LgvjxjQVnpoy1VVkOSzLS1W9i1g4SJ0nflAa4=
|
||||
codeberg.org/gruf/go-bitutil v1.0.0/go.mod h1:sb8IjlDnjVTz8zPK/8lmHesKxY0Yb3iqHWjUM/SkphA=
|
||||
codeberg.org/gruf/go-bitutil v1.1.0 h1:U1Q+A1mtnPk+npqYrlRBc9ar2C5hYiBd17l1Wrp2Bt8=
|
||||
codeberg.org/gruf/go-bitutil v1.1.0/go.mod h1:rGibFevYTQfYKcPv0Df5KpG8n5xC3AfD4d/UgYeoNy0=
|
||||
codeberg.org/gruf/go-bytes v1.0.2 h1:malqE42Ni+h1nnYWBUAJaDDtEzF4aeN4uPN8DfMNNvo=
|
||||
codeberg.org/gruf/go-bytes v1.0.2/go.mod h1:1v/ibfaosfXSZtRdW2rWaVrDXMc9E3bsi/M9Ekx39cg=
|
||||
codeberg.org/gruf/go-bytesize v1.0.3 h1:Tz8tCxhPLeyM5VryuBNjUHgKmLj4Bx9RbPaUSA3qg6g=
|
||||
|
@ -45,7 +40,6 @@ codeberg.org/gruf/go-cache/v3 v3.5.7 h1:5hut49a8Wp3hdwrCEJYj6pHY2aRR1hyTmkK4+wHV
|
|||
codeberg.org/gruf/go-cache/v3 v3.5.7/go.mod h1:Thahfuf3PgHSv2+1zHpvhRdX97tx1WXurVNGWpZucAM=
|
||||
codeberg.org/gruf/go-debug v1.3.0 h1:PIRxQiWUFKtGOGZFdZ3Y0pqyfI0Xr87j224IYe2snZs=
|
||||
codeberg.org/gruf/go-debug v1.3.0/go.mod h1:N+vSy9uJBQgpQcJUqjctvqFz7tBHJf+S/PIjLILzpLg=
|
||||
codeberg.org/gruf/go-errors/v2 v2.0.0/go.mod h1:ZRhbdhvgoUA3Yw6e56kd9Ox984RrvbEFC2pOXyHDJP4=
|
||||
codeberg.org/gruf/go-errors/v2 v2.3.2 h1:8ItWaOMfhDaqrJK1Pw8MO0Nu+o/tVcQtR5cJ58Vc4zo=
|
||||
codeberg.org/gruf/go-errors/v2 v2.3.2/go.mod h1:LfzD9nkAAJpEDbkUqOZQ2jdaQ8VrK0pnR36zLOMFq6Y=
|
||||
codeberg.org/gruf/go-fastcopy v1.1.3 h1:Jo9VTQjI6KYimlw25PPc7YLA3Xm+XMQhaHwKnM7xD1g=
|
||||
|
@ -72,10 +66,10 @@ codeberg.org/gruf/go-mimetypes v1.2.0 h1:3rZGXY/SkNYbamiddWXs2gETXIBkGIeWYnbWpp2
|
|||
codeberg.org/gruf/go-mimetypes v1.2.0/go.mod h1:YiUWRj/nAdJQc+UFRvcsL6xXZsbc6b6Ic739ycEO8Yg=
|
||||
codeberg.org/gruf/go-mutexes v1.5.1 h1:xICU0WXhWr6wf+Iror4eE3xT+xnXNPrO6o77D/G6QuY=
|
||||
codeberg.org/gruf/go-mutexes v1.5.1/go.mod h1:rPEqQ/y6CmGITaZ3GPTMQVsoZAOzbsAHyIaLsJcOqVE=
|
||||
codeberg.org/gruf/go-runners v1.6.2 h1:oQef9niahfHu/wch14xNxlRMP8i+ABXH1Cb9PzZ4oYo=
|
||||
codeberg.org/gruf/go-runners v1.6.2/go.mod h1:Tq5PrZ/m/rBXbLZz0u5if+yP3nG5Sf6S8O/GnyEePeQ=
|
||||
codeberg.org/gruf/go-sched v1.2.3 h1:H5ViDxxzOBR3uIyGBCf0eH8b1L8wMybOXcdtUUTXZHk=
|
||||
codeberg.org/gruf/go-sched v1.2.3/go.mod h1:vT9uB6KWFIIwnG9vcPY2a0alYNoqdL1mSzRM8I+PK7A=
|
||||
codeberg.org/gruf/go-runners v1.6.3 h1:To/AX7eTrWuXrTkA3RA01YTP5zha1VZ68LQ+0D4RY7E=
|
||||
codeberg.org/gruf/go-runners v1.6.3/go.mod h1:oXAaUmG2VxoKttpCqZGv5nQBeSvZSR2BzIk7h1yTRlU=
|
||||
codeberg.org/gruf/go-sched v1.2.4 h1:ddBB9o0D/2oU8NbQ0ldN5aWxogpXPRBATWi58+p++Hw=
|
||||
codeberg.org/gruf/go-sched v1.2.4/go.mod h1:wad6l+OcYGWMA2TzNLMmLObsrbBDxdJfEy5WvTgBjNk=
|
||||
codeberg.org/gruf/go-storage v0.2.0 h1:mKj3Lx6AavEkuXXtxqPhdq+akW9YwrnP16yQBF7K5ZI=
|
||||
codeberg.org/gruf/go-storage v0.2.0/go.mod h1:o3GzMDE5QNUaRnm/daUzFqvuAaC4utlgXDXYO79sWKU=
|
||||
codeberg.org/gruf/go-structr v0.8.9 h1:OyiSspWYCeJOm356fFPd+bDRumPrard2VAUXAPqZiJ0=
|
||||
|
|
9
vendor/codeberg.org/gruf/go-atomics/LICENSE
generated
vendored
9
vendor/codeberg.org/gruf/go-atomics/LICENSE
generated
vendored
|
@ -1,9 +0,0 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2022 gruf
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
3
vendor/codeberg.org/gruf/go-atomics/README.md
generated
vendored
3
vendor/codeberg.org/gruf/go-atomics/README.md
generated
vendored
|
@ -1,3 +0,0 @@
|
|||
# go-atomics
|
||||
|
||||
This library provides a variety of types for atomic operations on common Go types.
|
57
vendor/codeberg.org/gruf/go-atomics/atomic.tpl
generated
vendored
57
vendor/codeberg.org/gruf/go-atomics/atomic.tpl
generated
vendored
|
@ -1,57 +0,0 @@
|
|||
package atomics
|
||||
|
||||
import (
|
||||
"sync/atomic"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// {{ .Name }} provides user-friendly means of performing atomic operations on {{ .Type }} types.
|
||||
type {{ .Name }} struct{ ptr unsafe.Pointer }
|
||||
|
||||
// New{{ .Name }} will return a new {{ .Name }} instance initialized with zero value.
|
||||
func New{{ .Name }}() *{{ .Name }} {
|
||||
var v {{ .Type }}
|
||||
return &{{ .Name }}{
|
||||
ptr: unsafe.Pointer(&v),
|
||||
}
|
||||
}
|
||||
|
||||
// Store will atomically store {{ .Type }} value in address contained within v.
|
||||
func (v *{{ .Name }}) Store(val {{ .Type }}) {
|
||||
atomic.StorePointer(&v.ptr, unsafe.Pointer(&val))
|
||||
}
|
||||
|
||||
// Load will atomically load {{ .Type }} value at address contained within v.
|
||||
func (v *{{ .Name }}) Load() {{ .Type }} {
|
||||
return *(*{{ .Type }})(atomic.LoadPointer(&v.ptr))
|
||||
}
|
||||
|
||||
// CAS performs a compare-and-swap for a(n) {{ .Type }} value at address contained within v.
|
||||
func (v *{{ .Name }}) CAS(cmp, swp {{ .Type }}) bool {
|
||||
for {
|
||||
// Load current value at address
|
||||
ptr := atomic.LoadPointer(&v.ptr)
|
||||
cur := *(*{{ .Type }})(ptr)
|
||||
|
||||
// Perform comparison against current
|
||||
if !({{ call .Compare "cur" "cmp" }}) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Attempt to replace pointer
|
||||
if atomic.CompareAndSwapPointer(
|
||||
&v.ptr,
|
||||
ptr,
|
||||
unsafe.Pointer(&swp),
|
||||
) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Swap atomically stores new {{ .Type }} value into address contained within v, and returns previous value.
|
||||
func (v *{{ .Name }}) Swap(swp {{ .Type }}) {{ .Type }} {
|
||||
ptr := unsafe.Pointer(&swp)
|
||||
ptr = atomic.SwapPointer(&v.ptr, ptr)
|
||||
return *(*{{ .Type }})(ptr)
|
||||
}
|
60
vendor/codeberg.org/gruf/go-atomics/atomic_test.tpl
generated
vendored
60
vendor/codeberg.org/gruf/go-atomics/atomic_test.tpl
generated
vendored
|
@ -1,60 +0,0 @@
|
|||
package atomics_test
|
||||
|
||||
import (
|
||||
"atomic"
|
||||
"unsafe"
|
||||
"testing"
|
||||
|
||||
"codeberg.org/gruf/go-atomics"
|
||||
)
|
||||
|
||||
func Test{{ .Name }}StoreLoad(t *testing.T) {
|
||||
for _, test := range {{ .Name }}Tests {
|
||||
val := atomics.New{{ .Name }}()
|
||||
|
||||
val.Store(test.V1)
|
||||
|
||||
if !({{ call .Compare "val.Load()" "test.V1" }}) {
|
||||
t.Fatalf("failed testing .Store and .Load: expect=%v actual=%v", val.Load(), test.V1)
|
||||
}
|
||||
|
||||
val.Store(test.V2)
|
||||
|
||||
if !({{ call .Compare "val.Load()" "test.V2" }}) {
|
||||
t.Fatalf("failed testing .Store and .Load: expect=%v actual=%v", val.Load(), test.V2)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Test{{ .Name }}CAS(t *testing.T) {
|
||||
for _, test := range {{ .Name }}Tests {
|
||||
val := atomics.New{{ .Name }}()
|
||||
|
||||
val.Store(test.V1)
|
||||
|
||||
if val.CAS(test.V2, test.V1) {
|
||||
t.Fatalf("failed testing negative .CAS: test=%+v state=%v", test, val.Load())
|
||||
}
|
||||
|
||||
if !val.CAS(test.V1, test.V2) {
|
||||
t.Fatalf("failed testing positive .CAS: test=%+v state=%v", test, val.Load())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Test{{ .Name }}Swap(t *testing.T) {
|
||||
for _, test := range {{ .Name }}Tests {
|
||||
val := atomics.New{{ .Name }}()
|
||||
|
||||
val.Store(test.V1)
|
||||
|
||||
if !({{ call .Compare "val.Swap(test.V2)" "test.V1" }}) {
|
||||
t.Fatal("failed testing .Swap")
|
||||
}
|
||||
|
||||
if !({{ call .Compare "val.Swap(test.V1)" "test.V2" }}) {
|
||||
t.Fatal("failed testing .Swap")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
47
vendor/codeberg.org/gruf/go-atomics/bool.go
generated
vendored
47
vendor/codeberg.org/gruf/go-atomics/bool.go
generated
vendored
|
@ -1,47 +0,0 @@
|
|||
package atomics
|
||||
|
||||
import "sync/atomic"
|
||||
|
||||
// Bool provides user-friendly means of performing atomic operations on bool types.
|
||||
type Bool uint32
|
||||
|
||||
// NewBool will return a new Bool instance initialized with zero value.
|
||||
func NewBool() *Bool {
|
||||
return new(Bool)
|
||||
}
|
||||
|
||||
// Store will atomically store bool value in address contained within i.
|
||||
func (b *Bool) Store(val bool) {
|
||||
atomic.StoreUint32((*uint32)(b), fromBool(val))
|
||||
}
|
||||
|
||||
// Load will atomically load bool value at address contained within i.
|
||||
func (b *Bool) Load() bool {
|
||||
return toBool(atomic.LoadUint32((*uint32)(b)))
|
||||
}
|
||||
|
||||
// CAS performs a compare-and-swap for a(n) bool value at address contained within i.
|
||||
func (b *Bool) CAS(cmp, swp bool) bool {
|
||||
return atomic.CompareAndSwapUint32((*uint32)(b), fromBool(cmp), fromBool(swp))
|
||||
}
|
||||
|
||||
// Swap atomically stores new bool value into address contained within i, and returns previous value.
|
||||
func (b *Bool) Swap(swp bool) bool {
|
||||
return toBool(atomic.SwapUint32((*uint32)(b), fromBool(swp)))
|
||||
}
|
||||
|
||||
// toBool converts uint32 value to bool.
|
||||
func toBool(u uint32) bool {
|
||||
if u == 0 {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// fromBool converts from bool to uint32 value.
|
||||
func fromBool(b bool) uint32 {
|
||||
if b {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
57
vendor/codeberg.org/gruf/go-atomics/bytes.go
generated
vendored
57
vendor/codeberg.org/gruf/go-atomics/bytes.go
generated
vendored
|
@ -1,57 +0,0 @@
|
|||
package atomics
|
||||
|
||||
import (
|
||||
"sync/atomic"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// Bytes provides user-friendly means of performing atomic operations on []byte types.
|
||||
type Bytes struct{ ptr unsafe.Pointer }
|
||||
|
||||
// NewBytes will return a new Bytes instance initialized with zero value.
|
||||
func NewBytes() *Bytes {
|
||||
var v []byte
|
||||
return &Bytes{
|
||||
ptr: unsafe.Pointer(&v),
|
||||
}
|
||||
}
|
||||
|
||||
// Store will atomically store []byte value in address contained within v.
|
||||
func (v *Bytes) Store(val []byte) {
|
||||
atomic.StorePointer(&v.ptr, unsafe.Pointer(&val))
|
||||
}
|
||||
|
||||
// Load will atomically load []byte value at address contained within v.
|
||||
func (v *Bytes) Load() []byte {
|
||||
return *(*[]byte)(atomic.LoadPointer(&v.ptr))
|
||||
}
|
||||
|
||||
// CAS performs a compare-and-swap for a(n) []byte value at address contained within v.
|
||||
func (v *Bytes) CAS(cmp, swp []byte) bool {
|
||||
for {
|
||||
// Load current value at address
|
||||
ptr := atomic.LoadPointer(&v.ptr)
|
||||
cur := *(*[]byte)(ptr)
|
||||
|
||||
// Perform comparison against current
|
||||
if !(string(cur) == string(cmp)) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Attempt to replace pointer
|
||||
if atomic.CompareAndSwapPointer(
|
||||
&v.ptr,
|
||||
ptr,
|
||||
unsafe.Pointer(&swp),
|
||||
) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Swap atomically stores new []byte value into address contained within v, and returns previous value.
|
||||
func (v *Bytes) Swap(swp []byte) []byte {
|
||||
ptr := unsafe.Pointer(&swp)
|
||||
ptr = atomic.SwapPointer(&v.ptr, ptr)
|
||||
return *(*[]byte)(ptr)
|
||||
}
|
57
vendor/codeberg.org/gruf/go-atomics/error.go
generated
vendored
57
vendor/codeberg.org/gruf/go-atomics/error.go
generated
vendored
|
@ -1,57 +0,0 @@
|
|||
package atomics
|
||||
|
||||
import (
|
||||
"sync/atomic"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// Error provides user-friendly means of performing atomic operations on error types.
|
||||
type Error struct{ ptr unsafe.Pointer }
|
||||
|
||||
// NewError will return a new Error instance initialized with zero value.
|
||||
func NewError() *Error {
|
||||
var v error
|
||||
return &Error{
|
||||
ptr: unsafe.Pointer(&v),
|
||||
}
|
||||
}
|
||||
|
||||
// Store will atomically store error value in address contained within v.
|
||||
func (v *Error) Store(val error) {
|
||||
atomic.StorePointer(&v.ptr, unsafe.Pointer(&val))
|
||||
}
|
||||
|
||||
// Load will atomically load error value at address contained within v.
|
||||
func (v *Error) Load() error {
|
||||
return *(*error)(atomic.LoadPointer(&v.ptr))
|
||||
}
|
||||
|
||||
// CAS performs a compare-and-swap for a(n) error value at address contained within v.
|
||||
func (v *Error) CAS(cmp, swp error) bool {
|
||||
for {
|
||||
// Load current value at address
|
||||
ptr := atomic.LoadPointer(&v.ptr)
|
||||
cur := *(*error)(ptr)
|
||||
|
||||
// Perform comparison against current
|
||||
if !(cur == cmp) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Attempt to replace pointer
|
||||
if atomic.CompareAndSwapPointer(
|
||||
&v.ptr,
|
||||
ptr,
|
||||
unsafe.Pointer(&swp),
|
||||
) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Swap atomically stores new error value into address contained within v, and returns previous value.
|
||||
func (v *Error) Swap(swp error) error {
|
||||
ptr := unsafe.Pointer(&swp)
|
||||
ptr = atomic.SwapPointer(&v.ptr, ptr)
|
||||
return *(*error)(ptr)
|
||||
}
|
97
vendor/codeberg.org/gruf/go-atomics/flags.go
generated
vendored
97
vendor/codeberg.org/gruf/go-atomics/flags.go
generated
vendored
|
@ -1,97 +0,0 @@
|
|||
package atomics
|
||||
|
||||
import (
|
||||
"sync/atomic"
|
||||
|
||||
"codeberg.org/gruf/go-bitutil"
|
||||
)
|
||||
|
||||
// Flags32 provides user-friendly means of performing atomic operations on bitutil.Flags32 types.
|
||||
type Flags32 bitutil.Flags32
|
||||
|
||||
// NewFlags32 will return a new Flags32 instance initialized with zero value.
|
||||
func NewFlags32() *Flags32 {
|
||||
return new(Flags32)
|
||||
}
|
||||
|
||||
// Get will atomically load a(n) bitutil.Flags32 value contained within f, and check if bit value is set.
|
||||
func (f *Flags32) Get(bit uint8) bool {
|
||||
return f.Load().Get(bit)
|
||||
}
|
||||
|
||||
// Set performs a compare-and-swap for a(n) bitutil.Flags32 with bit value set, at address contained within f.
|
||||
func (f *Flags32) Set(bit uint8) bool {
|
||||
cur := f.Load()
|
||||
return f.CAS(cur, cur.Set(bit))
|
||||
}
|
||||
|
||||
// Unset performs a compare-and-swap for a(n) bitutil.Flags32 with bit value unset, at address contained within f.
|
||||
func (f *Flags32) Unset(bit uint8) bool {
|
||||
cur := f.Load()
|
||||
return f.CAS(cur, cur.Unset(bit))
|
||||
}
|
||||
|
||||
// Store will atomically store bitutil.Flags32 value in address contained within f.
|
||||
func (f *Flags32) Store(val bitutil.Flags32) {
|
||||
atomic.StoreUint32((*uint32)(f), uint32(val))
|
||||
}
|
||||
|
||||
// Load will atomically load bitutil.Flags32 value at address contained within f.
|
||||
func (f *Flags32) Load() bitutil.Flags32 {
|
||||
return bitutil.Flags32(atomic.LoadUint32((*uint32)(f)))
|
||||
}
|
||||
|
||||
// CAS performs a compare-and-swap for a(n) bitutil.Flags32 value at address contained within f.
|
||||
func (f *Flags32) CAS(cmp, swp bitutil.Flags32) bool {
|
||||
return atomic.CompareAndSwapUint32((*uint32)(f), uint32(cmp), uint32(swp))
|
||||
}
|
||||
|
||||
// Swap atomically stores new bitutil.Flags32 value into address contained within f, and returns previous value.
|
||||
func (f *Flags32) Swap(swp bitutil.Flags32) bitutil.Flags32 {
|
||||
return bitutil.Flags32(atomic.SwapUint32((*uint32)(f), uint32(swp)))
|
||||
}
|
||||
|
||||
// Flags64 provides user-friendly means of performing atomic operations on bitutil.Flags64 types.
|
||||
type Flags64 bitutil.Flags64
|
||||
|
||||
// NewFlags64 will return a new Flags64 instance initialized with zero value.
|
||||
func NewFlags64() *Flags64 {
|
||||
return new(Flags64)
|
||||
}
|
||||
|
||||
// Get will atomically load a(n) bitutil.Flags64 value contained within f, and check if bit value is set.
|
||||
func (f *Flags64) Get(bit uint8) bool {
|
||||
return f.Load().Get(bit)
|
||||
}
|
||||
|
||||
// Set performs a compare-and-swap for a(n) bitutil.Flags64 with bit value set, at address contained within f.
|
||||
func (f *Flags64) Set(bit uint8) bool {
|
||||
cur := f.Load()
|
||||
return f.CAS(cur, cur.Set(bit))
|
||||
}
|
||||
|
||||
// Unset performs a compare-and-swap for a(n) bitutil.Flags64 with bit value unset, at address contained within f.
|
||||
func (f *Flags64) Unset(bit uint8) bool {
|
||||
cur := f.Load()
|
||||
return f.CAS(cur, cur.Unset(bit))
|
||||
}
|
||||
|
||||
// Store will atomically store bitutil.Flags64 value in address contained within f.
|
||||
func (f *Flags64) Store(val bitutil.Flags64) {
|
||||
atomic.StoreUint64((*uint64)(f), uint64(val))
|
||||
}
|
||||
|
||||
// Load will atomically load bitutil.Flags64 value at address contained within f.
|
||||
func (f *Flags64) Load() bitutil.Flags64 {
|
||||
return bitutil.Flags64(atomic.LoadUint64((*uint64)(f)))
|
||||
}
|
||||
|
||||
// CAS performs a compare-and-swap for a(n) bitutil.Flags64 value at address contained within f.
|
||||
func (f *Flags64) CAS(cmp, swp bitutil.Flags64) bool {
|
||||
return atomic.CompareAndSwapUint64((*uint64)(f), uint64(cmp), uint64(swp))
|
||||
}
|
||||
|
||||
// Swap atomically stores new bitutil.Flags64 value into address contained within f, and returns previous value.
|
||||
func (f *Flags64) Swap(swp bitutil.Flags64) bitutil.Flags64 {
|
||||
return bitutil.Flags64(atomic.SwapUint64((*uint64)(f), uint64(swp)))
|
||||
}
|
69
vendor/codeberg.org/gruf/go-atomics/int.go
generated
vendored
69
vendor/codeberg.org/gruf/go-atomics/int.go
generated
vendored
|
@ -1,69 +0,0 @@
|
|||
package atomics
|
||||
|
||||
import "sync/atomic"
|
||||
|
||||
// Int32 provides user-friendly means of performing atomic operations on int32 types.
|
||||
type Int32 int32
|
||||
|
||||
// NewInt32 will return a new Int32 instance initialized with zero value.
|
||||
func NewInt32() *Int32 {
|
||||
return new(Int32)
|
||||
}
|
||||
|
||||
// Add will atomically add int32 delta to value in address contained within i, returning new value.
|
||||
func (i *Int32) Add(delta int32) int32 {
|
||||
return atomic.AddInt32((*int32)(i), delta)
|
||||
}
|
||||
|
||||
// Store will atomically store int32 value in address contained within i.
|
||||
func (i *Int32) Store(val int32) {
|
||||
atomic.StoreInt32((*int32)(i), val)
|
||||
}
|
||||
|
||||
// Load will atomically load int32 value at address contained within i.
|
||||
func (i *Int32) Load() int32 {
|
||||
return atomic.LoadInt32((*int32)(i))
|
||||
}
|
||||
|
||||
// CAS performs a compare-and-swap for a(n) int32 value at address contained within i.
|
||||
func (i *Int32) CAS(cmp, swp int32) bool {
|
||||
return atomic.CompareAndSwapInt32((*int32)(i), cmp, swp)
|
||||
}
|
||||
|
||||
// Swap atomically stores new int32 value into address contained within i, and returns previous value.
|
||||
func (i *Int32) Swap(swp int32) int32 {
|
||||
return atomic.SwapInt32((*int32)(i), swp)
|
||||
}
|
||||
|
||||
// Int64 provides user-friendly means of performing atomic operations on int64 types.
|
||||
type Int64 int64
|
||||
|
||||
// NewInt64 will return a new Int64 instance initialized with zero value.
|
||||
func NewInt64() *Int64 {
|
||||
return new(Int64)
|
||||
}
|
||||
|
||||
// Add will atomically add int64 delta to value in address contained within i, returning new value.
|
||||
func (i *Int64) Add(delta int64) int64 {
|
||||
return atomic.AddInt64((*int64)(i), delta)
|
||||
}
|
||||
|
||||
// Store will atomically store int64 value in address contained within i.
|
||||
func (i *Int64) Store(val int64) {
|
||||
atomic.StoreInt64((*int64)(i), val)
|
||||
}
|
||||
|
||||
// Load will atomically load int64 value at address contained within i.
|
||||
func (i *Int64) Load() int64 {
|
||||
return atomic.LoadInt64((*int64)(i))
|
||||
}
|
||||
|
||||
// CAS performs a compare-and-swap for a(n) int64 value at address contained within i.
|
||||
func (i *Int64) CAS(cmp, swp int64) bool {
|
||||
return atomic.CompareAndSwapInt64((*int64)(i), cmp, swp)
|
||||
}
|
||||
|
||||
// Swap atomically stores new int64 value into address contained within i, and returns previous value.
|
||||
func (i *Int64) Swap(swp int64) int64 {
|
||||
return atomic.SwapInt64((*int64)(i), swp)
|
||||
}
|
57
vendor/codeberg.org/gruf/go-atomics/interface.go
generated
vendored
57
vendor/codeberg.org/gruf/go-atomics/interface.go
generated
vendored
|
@ -1,57 +0,0 @@
|
|||
package atomics
|
||||
|
||||
import (
|
||||
"sync/atomic"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// Interface provides user-friendly means of performing atomic operations on interface{} types.
|
||||
type Interface struct{ ptr unsafe.Pointer }
|
||||
|
||||
// NewInterface will return a new Interface instance initialized with zero value.
|
||||
func NewInterface() *Interface {
|
||||
var v interface{}
|
||||
return &Interface{
|
||||
ptr: unsafe.Pointer(&v),
|
||||
}
|
||||
}
|
||||
|
||||
// Store will atomically store interface{} value in address contained within v.
|
||||
func (v *Interface) Store(val interface{}) {
|
||||
atomic.StorePointer(&v.ptr, unsafe.Pointer(&val))
|
||||
}
|
||||
|
||||
// Load will atomically load interface{} value at address contained within v.
|
||||
func (v *Interface) Load() interface{} {
|
||||
return *(*interface{})(atomic.LoadPointer(&v.ptr))
|
||||
}
|
||||
|
||||
// CAS performs a compare-and-swap for a(n) interface{} value at address contained within v.
|
||||
func (v *Interface) CAS(cmp, swp interface{}) bool {
|
||||
for {
|
||||
// Load current value at address
|
||||
ptr := atomic.LoadPointer(&v.ptr)
|
||||
cur := *(*interface{})(ptr)
|
||||
|
||||
// Perform comparison against current
|
||||
if !(cur == cmp) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Attempt to replace pointer
|
||||
if atomic.CompareAndSwapPointer(
|
||||
&v.ptr,
|
||||
ptr,
|
||||
unsafe.Pointer(&swp),
|
||||
) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Swap atomically stores new interface{} value into address contained within v, and returns previous value.
|
||||
func (v *Interface) Swap(swp interface{}) interface{} {
|
||||
ptr := unsafe.Pointer(&swp)
|
||||
ptr = atomic.SwapPointer(&v.ptr, ptr)
|
||||
return *(*interface{})(ptr)
|
||||
}
|
58
vendor/codeberg.org/gruf/go-atomics/state.go
generated
vendored
58
vendor/codeberg.org/gruf/go-atomics/state.go
generated
vendored
|
@ -1,58 +0,0 @@
|
|||
package atomics
|
||||
|
||||
import "sync"
|
||||
|
||||
// State provides user-friendly means of performing atomic-like
|
||||
// operations on a uint32 state, and allowing callbacks on successful
|
||||
// state change. This is a bit of a misnomer being where it is, as it
|
||||
// actually uses a mutex under-the-hood.
|
||||
type State struct {
|
||||
mutex sync.Mutex
|
||||
state uint32
|
||||
}
|
||||
|
||||
// Store will update State value safely within mutex lock.
|
||||
func (st *State) Store(val uint32) {
|
||||
st.mutex.Lock()
|
||||
st.state = val
|
||||
st.mutex.Unlock()
|
||||
}
|
||||
|
||||
// Load will get value of State safely within mutex lock.
|
||||
func (st *State) Load() uint32 {
|
||||
st.mutex.Lock()
|
||||
state := st.state
|
||||
st.mutex.Unlock()
|
||||
return state
|
||||
}
|
||||
|
||||
// WithLock performs fn within State mutex lock, useful if you want
|
||||
// to just use State's mutex for locking instead of creating another.
|
||||
func (st *State) WithLock(fn func()) {
|
||||
st.mutex.Lock()
|
||||
defer st.mutex.Unlock()
|
||||
fn()
|
||||
}
|
||||
|
||||
// Update performs fn within State mutex lock, with the current state
|
||||
// value provided as an argument, and return value used to update state.
|
||||
func (st *State) Update(fn func(state uint32) uint32) {
|
||||
st.mutex.Lock()
|
||||
defer st.mutex.Unlock()
|
||||
st.state = fn(st.state)
|
||||
}
|
||||
|
||||
// CAS performs a compare-and-swap on State, calling fn on success. Success value is also returned.
|
||||
func (st *State) CAS(cmp, swp uint32, fn func()) (ok bool) {
|
||||
// Acquire lock
|
||||
st.mutex.Lock()
|
||||
defer st.mutex.Unlock()
|
||||
|
||||
// Perform CAS operation, fn() on success
|
||||
if ok = (st.state == cmp); ok {
|
||||
st.state = swp
|
||||
fn()
|
||||
}
|
||||
|
||||
return
|
||||
}
|
57
vendor/codeberg.org/gruf/go-atomics/string.go
generated
vendored
57
vendor/codeberg.org/gruf/go-atomics/string.go
generated
vendored
|
@ -1,57 +0,0 @@
|
|||
package atomics
|
||||
|
||||
import (
|
||||
"sync/atomic"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// String provides user-friendly means of performing atomic operations on string types.
|
||||
type String struct{ ptr unsafe.Pointer }
|
||||
|
||||
// NewString will return a new String instance initialized with zero value.
|
||||
func NewString() *String {
|
||||
var v string
|
||||
return &String{
|
||||
ptr: unsafe.Pointer(&v),
|
||||
}
|
||||
}
|
||||
|
||||
// Store will atomically store string value in address contained within v.
|
||||
func (v *String) Store(val string) {
|
||||
atomic.StorePointer(&v.ptr, unsafe.Pointer(&val))
|
||||
}
|
||||
|
||||
// Load will atomically load string value at address contained within v.
|
||||
func (v *String) Load() string {
|
||||
return *(*string)(atomic.LoadPointer(&v.ptr))
|
||||
}
|
||||
|
||||
// CAS performs a compare-and-swap for a(n) string value at address contained within v.
|
||||
func (v *String) CAS(cmp, swp string) bool {
|
||||
for {
|
||||
// Load current value at address
|
||||
ptr := atomic.LoadPointer(&v.ptr)
|
||||
cur := *(*string)(ptr)
|
||||
|
||||
// Perform comparison against current
|
||||
if !(cur == cmp) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Attempt to replace pointer
|
||||
if atomic.CompareAndSwapPointer(
|
||||
&v.ptr,
|
||||
ptr,
|
||||
unsafe.Pointer(&swp),
|
||||
) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Swap atomically stores new string value into address contained within v, and returns previous value.
|
||||
func (v *String) Swap(swp string) string {
|
||||
ptr := unsafe.Pointer(&swp)
|
||||
ptr = atomic.SwapPointer(&v.ptr, ptr)
|
||||
return *(*string)(ptr)
|
||||
}
|
58
vendor/codeberg.org/gruf/go-atomics/time.go
generated
vendored
58
vendor/codeberg.org/gruf/go-atomics/time.go
generated
vendored
|
@ -1,58 +0,0 @@
|
|||
package atomics
|
||||
|
||||
import (
|
||||
"sync/atomic"
|
||||
"time"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// Time provides user-friendly means of performing atomic operations on time.Time types.
|
||||
type Time struct{ ptr unsafe.Pointer }
|
||||
|
||||
// NewTime will return a new Time instance initialized with zero value.
|
||||
func NewTime() *Time {
|
||||
var v time.Time
|
||||
return &Time{
|
||||
ptr: unsafe.Pointer(&v),
|
||||
}
|
||||
}
|
||||
|
||||
// Store will atomically store time.Time value in address contained within v.
|
||||
func (v *Time) Store(val time.Time) {
|
||||
atomic.StorePointer(&v.ptr, unsafe.Pointer(&val))
|
||||
}
|
||||
|
||||
// Load will atomically load time.Time value at address contained within v.
|
||||
func (v *Time) Load() time.Time {
|
||||
return *(*time.Time)(atomic.LoadPointer(&v.ptr))
|
||||
}
|
||||
|
||||
// CAS performs a compare-and-swap for a(n) time.Time value at address contained within v.
|
||||
func (v *Time) CAS(cmp, swp time.Time) bool {
|
||||
for {
|
||||
// Load current value at address
|
||||
ptr := atomic.LoadPointer(&v.ptr)
|
||||
cur := *(*time.Time)(ptr)
|
||||
|
||||
// Perform comparison against current
|
||||
if !(cur.Equal(cmp)) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Attempt to replace pointer
|
||||
if atomic.CompareAndSwapPointer(
|
||||
&v.ptr,
|
||||
ptr,
|
||||
unsafe.Pointer(&swp),
|
||||
) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Swap atomically stores new time.Time value into address contained within v, and returns previous value.
|
||||
func (v *Time) Swap(swp time.Time) time.Time {
|
||||
ptr := unsafe.Pointer(&swp)
|
||||
ptr = atomic.SwapPointer(&v.ptr, ptr)
|
||||
return *(*time.Time)(ptr)
|
||||
}
|
69
vendor/codeberg.org/gruf/go-atomics/uint.go
generated
vendored
69
vendor/codeberg.org/gruf/go-atomics/uint.go
generated
vendored
|
@ -1,69 +0,0 @@
|
|||
package atomics
|
||||
|
||||
import "sync/atomic"
|
||||
|
||||
// Uint32 provides user-friendly means of performing atomic operations on uint32 types.
|
||||
type Uint32 uint32
|
||||
|
||||
// NewUint32 will return a new Uint32 instance initialized with zero value.
|
||||
func NewUint32() *Uint32 {
|
||||
return new(Uint32)
|
||||
}
|
||||
|
||||
// Add will atomically add uint32 delta to value in address contained within i, returning new value.
|
||||
func (u *Uint32) Add(delta uint32) uint32 {
|
||||
return atomic.AddUint32((*uint32)(u), delta)
|
||||
}
|
||||
|
||||
// Store will atomically store uint32 value in address contained within i.
|
||||
func (u *Uint32) Store(val uint32) {
|
||||
atomic.StoreUint32((*uint32)(u), val)
|
||||
}
|
||||
|
||||
// Load will atomically load uint32 value at address contained within i.
|
||||
func (u *Uint32) Load() uint32 {
|
||||
return atomic.LoadUint32((*uint32)(u))
|
||||
}
|
||||
|
||||
// CAS performs a compare-and-swap for a(n) uint32 value at address contained within i.
|
||||
func (u *Uint32) CAS(cmp, swp uint32) bool {
|
||||
return atomic.CompareAndSwapUint32((*uint32)(u), cmp, swp)
|
||||
}
|
||||
|
||||
// Swap atomically stores new uint32 value into address contained within i, and returns previous value.
|
||||
func (u *Uint32) Swap(swp uint32) uint32 {
|
||||
return atomic.SwapUint32((*uint32)(u), swp)
|
||||
}
|
||||
|
||||
// Uint64 provides user-friendly means of performing atomic operations on uint64 types.
|
||||
type Uint64 uint64
|
||||
|
||||
// NewUint64 will return a new Uint64 instance initialized with zero value.
|
||||
func NewUint64() *Uint64 {
|
||||
return new(Uint64)
|
||||
}
|
||||
|
||||
// Add will atomically add uint64 delta to value in address contained within i, returning new value.
|
||||
func (u *Uint64) Add(delta uint64) uint64 {
|
||||
return atomic.AddUint64((*uint64)(u), delta)
|
||||
}
|
||||
|
||||
// Store will atomically store uint64 value in address contained within i.
|
||||
func (u *Uint64) Store(val uint64) {
|
||||
atomic.StoreUint64((*uint64)(u), val)
|
||||
}
|
||||
|
||||
// Load will atomically load uint64 value at address contained within i.
|
||||
func (u *Uint64) Load() uint64 {
|
||||
return atomic.LoadUint64((*uint64)(u))
|
||||
}
|
||||
|
||||
// CAS performs a compare-and-swap for a(n) uint64 value at address contained within i.
|
||||
func (u *Uint64) CAS(cmp, swp uint64) bool {
|
||||
return atomic.CompareAndSwapUint64((*uint64)(u), cmp, swp)
|
||||
}
|
||||
|
||||
// Swap atomically stores new uint64 value into address contained within i, and returns previous value.
|
||||
func (u *Uint64) Swap(swp uint64) uint64 {
|
||||
return atomic.SwapUint64((*uint64)(u), swp)
|
||||
}
|
9
vendor/codeberg.org/gruf/go-bitutil/LICENSE
generated
vendored
9
vendor/codeberg.org/gruf/go-bitutil/LICENSE
generated
vendored
|
@ -1,9 +0,0 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2022 gruf
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
3
vendor/codeberg.org/gruf/go-bitutil/README.md
generated
vendored
3
vendor/codeberg.org/gruf/go-bitutil/README.md
generated
vendored
|
@ -1,3 +0,0 @@
|
|||
# go-bitutil
|
||||
|
||||
This library provides helpful methods and types for performing typical bitwise operations on integers, e.g. packing/unpacking, bit flags.
|
29
vendor/codeberg.org/gruf/go-bitutil/abs.go
generated
vendored
29
vendor/codeberg.org/gruf/go-bitutil/abs.go
generated
vendored
|
@ -1,29 +0,0 @@
|
|||
package bitutil
|
||||
|
||||
// Abs8 returns the absolute value of i (calculated without branching).
|
||||
func Abs8(i int8) int8 {
|
||||
const bits = 8
|
||||
u := uint64(i >> (bits - 1))
|
||||
return (i ^ int8(u)) + int8(u&1)
|
||||
}
|
||||
|
||||
// Abs16 returns the absolute value of i (calculated without branching).
|
||||
func Abs16(i int16) int16 {
|
||||
const bits = 16
|
||||
u := uint64(i >> (bits - 1))
|
||||
return (i ^ int16(u)) + int16(u&1)
|
||||
}
|
||||
|
||||
// Abs32 returns the absolute value of i (calculated without branching).
|
||||
func Abs32(i int32) int32 {
|
||||
const bits = 32
|
||||
u := uint64(i >> (bits - 1))
|
||||
return (i ^ int32(u)) + int32(u&1)
|
||||
}
|
||||
|
||||
// Abs64 returns the absolute value of i (calculated without branching).
|
||||
func Abs64(i int64) int64 {
|
||||
const bits = 64
|
||||
u := uint64(i >> (bits - 1))
|
||||
return (i ^ int64(u)) + int64(u&1)
|
||||
}
|
3744
vendor/codeberg.org/gruf/go-bitutil/flag.go
generated
vendored
3744
vendor/codeberg.org/gruf/go-bitutil/flag.go
generated
vendored
File diff suppressed because it is too large
Load diff
117
vendor/codeberg.org/gruf/go-bitutil/flag.tpl
generated
vendored
117
vendor/codeberg.org/gruf/go-bitutil/flag.tpl
generated
vendored
|
@ -1,117 +0,0 @@
|
|||
package bitutil
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
{{ range $idx, $size := . }}
|
||||
|
||||
// Flags{{ $size.Size }} is a type-casted unsigned integer with helper
|
||||
// methods for easily managing up to {{ $size.Size }} bit-flags.
|
||||
type Flags{{ $size.Size }} uint{{ $size.Size }}
|
||||
|
||||
// Get will fetch the flag bit value at index 'bit'.
|
||||
func (f Flags{{ $size.Size }}) Get(bit uint8) bool {
|
||||
mask := Flags{{ $size.Size }}(1) << bit
|
||||
return (f & mask != 0)
|
||||
}
|
||||
|
||||
// Set will set the flag bit value at index 'bit'.
|
||||
func (f Flags{{ $size.Size }}) Set(bit uint8) Flags{{ $size.Size }} {
|
||||
mask := Flags{{ $size.Size }}(1) << bit
|
||||
return f | mask
|
||||
}
|
||||
|
||||
// Unset will unset the flag bit value at index 'bit'.
|
||||
func (f Flags{{ $size.Size }}) Unset(bit uint8) Flags{{ $size.Size }} {
|
||||
mask := Flags{{ $size.Size }}(1) << bit
|
||||
return f & ^mask
|
||||
}
|
||||
|
||||
{{ range $idx := $size.Bits }}
|
||||
|
||||
// Get{{ $idx }} will fetch the flag bit value at index {{ $idx }}.
|
||||
func (f Flags{{ $size.Size }}) Get{{ $idx }}() bool {
|
||||
const mask = Flags{{ $size.Size }}(1) << {{ $idx }}
|
||||
return (f & mask != 0)
|
||||
}
|
||||
|
||||
// Set{{ $idx }} will set the flag bit value at index {{ $idx }}.
|
||||
func (f Flags{{ $size.Size }}) Set{{ $idx }}() Flags{{ $size.Size }} {
|
||||
const mask = Flags{{ $size.Size }}(1) << {{ $idx }}
|
||||
return f | mask
|
||||
}
|
||||
|
||||
// Unset{{ $idx }} will unset the flag bit value at index {{ $idx }}.
|
||||
func (f Flags{{ $size.Size }}) Unset{{ $idx }}() Flags{{ $size.Size }} {
|
||||
const mask = Flags{{ $size.Size }}(1) << {{ $idx }}
|
||||
return f & ^mask
|
||||
}
|
||||
|
||||
{{ end }}
|
||||
|
||||
// String returns a human readable representation of Flags{{ $size.Size }}.
|
||||
func (f Flags{{ $size.Size }}) String() string {
|
||||
var (
|
||||
i int
|
||||
val bool
|
||||
buf []byte
|
||||
)
|
||||
|
||||
// Make a prealloc est. based on longest-possible value
|
||||
const prealloc = 1+(len("false ")*{{ $size.Size }})-1+1
|
||||
buf = make([]byte, prealloc)
|
||||
|
||||
buf[i] = '{'
|
||||
i++
|
||||
|
||||
{{ range $idx := .Bits }}
|
||||
val = f.Get{{ $idx }}()
|
||||
i += copy(buf[i:], bool2str(val))
|
||||
buf[i] = ' '
|
||||
i++
|
||||
{{ end }}
|
||||
|
||||
buf[i-1] = '}'
|
||||
buf = buf[:i]
|
||||
|
||||
return *(*string)(unsafe.Pointer(&buf))
|
||||
}
|
||||
|
||||
// GoString returns a more verbose human readable representation of Flags{{ $size.Size }}.
|
||||
func (f Flags{{ $size.Size }})GoString() string {
|
||||
var (
|
||||
i int
|
||||
val bool
|
||||
buf []byte
|
||||
)
|
||||
|
||||
// Make a prealloc est. based on longest-possible value
|
||||
const prealloc = len("bitutil.Flags{{ $size.Size }}{")+(len("{{ sub $size.Size 1 }}=false ")*{{ $size.Size }})-1+1
|
||||
buf = make([]byte, prealloc)
|
||||
|
||||
i += copy(buf[i:], "bitutil.Flags{{ $size.Size }}{")
|
||||
|
||||
{{ range $idx := .Bits }}
|
||||
val = f.Get{{ $idx }}()
|
||||
i += copy(buf[i:], "{{ $idx }}=")
|
||||
i += copy(buf[i:], bool2str(val))
|
||||
buf[i] = ' '
|
||||
i++
|
||||
{{ end }}
|
||||
|
||||
buf[i-1] = '}'
|
||||
buf = buf[:i]
|
||||
|
||||
return *(*string)(unsafe.Pointer(&buf))
|
||||
}
|
||||
|
||||
{{ end }}
|
||||
|
||||
func bool2str(b bool) string {
|
||||
if b {
|
||||
return "true"
|
||||
}
|
||||
return "false"
|
||||
}
|
98
vendor/codeberg.org/gruf/go-bitutil/flag_test.tpl
generated
vendored
98
vendor/codeberg.org/gruf/go-bitutil/flag_test.tpl
generated
vendored
|
@ -1,98 +0,0 @@
|
|||
package bitutil_test
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"codeberg.org/gruf/go-bytes"
|
||||
)
|
||||
|
||||
{{ range $idx, $size := . }}
|
||||
|
||||
func TestFlags{{ $size.Size }}Get(t *testing.T) {
|
||||
var mask, flags bitutil.Flags{{ $size.Size }}
|
||||
|
||||
{{ range $idx := $size.Bits }}
|
||||
|
||||
mask = bitutil.Flags{{ $size.Size }}(1) << {{ $idx }}
|
||||
|
||||
flags = 0
|
||||
|
||||
flags |= mask
|
||||
if !flags.Get({{ $idx }}) {
|
||||
t.Error("failed .Get() set Flags{{ $size.Size }} bit at index {{ $idx }}")
|
||||
}
|
||||
|
||||
flags = ^bitutil.Flags{{ $size.Size }}(0)
|
||||
|
||||
flags &= ^mask
|
||||
if flags.Get({{ $idx }}) {
|
||||
t.Error("failed .Get() unset Flags{{ $size.Size }} bit at index {{ $idx }}")
|
||||
}
|
||||
|
||||
flags = 0
|
||||
|
||||
flags |= mask
|
||||
if !flags.Get{{ $idx }}() {
|
||||
t.Error("failed .Get{{ $idx }}() set Flags{{ $size.Size }} bit at index {{ $idx }}")
|
||||
}
|
||||
|
||||
flags = ^bitutil.Flags{{ $size.Size }}(0)
|
||||
|
||||
flags &= ^mask
|
||||
if flags.Get{{ $idx }}() {
|
||||
t.Error("failed .Get{{ $idx }}() unset Flags{{ $size.Size }} bit at index {{ $idx }}")
|
||||
}
|
||||
|
||||
{{ end }}
|
||||
}
|
||||
|
||||
func TestFlags{{ $size.Size }}Set(t *testing.T) {
|
||||
var mask, flags bitutil.Flags{{ $size.Size }}
|
||||
|
||||
{{ range $idx := $size.Bits }}
|
||||
|
||||
mask = bitutil.Flags{{ $size.Size }}(1) << {{ $idx }}
|
||||
|
||||
flags = 0
|
||||
|
||||
flags = flags.Set({{ $idx }})
|
||||
if flags & mask == 0 {
|
||||
t.Error("failed .Set() Flags{{ $size.Size }} bit at index {{ $idx }}")
|
||||
}
|
||||
|
||||
flags = 0
|
||||
|
||||
flags = flags.Set{{ $idx }}()
|
||||
if flags & mask == 0 {
|
||||
t.Error("failed .Set{{ $idx }}() Flags{{ $size.Size }} bit at index {{ $idx }}")
|
||||
}
|
||||
|
||||
{{ end }}
|
||||
}
|
||||
|
||||
func TestFlags{{ $size.Size }}Unset(t *testing.T) {
|
||||
var mask, flags bitutil.Flags{{ $size.Size }}
|
||||
|
||||
{{ range $idx := $size.Bits }}
|
||||
|
||||
mask = bitutil.Flags{{ $size.Size }}(1) << {{ $idx }}
|
||||
|
||||
flags = ^bitutil.Flags{{ $size.Size }}(0)
|
||||
|
||||
flags = flags.Unset({{ $idx }})
|
||||
if flags & mask != 0 {
|
||||
t.Error("failed .Unset() Flags{{ $size.Size }} bit at index {{ $idx }}")
|
||||
}
|
||||
|
||||
flags = ^bitutil.Flags{{ $size.Size }}(0)
|
||||
|
||||
flags = flags.Unset{{ $idx }}()
|
||||
if flags & mask != 0 {
|
||||
t.Error("failed .Unset{{ $idx }}() Flags{{ $size.Size }} bit at index {{ $idx }}")
|
||||
}
|
||||
|
||||
{{ end }}
|
||||
}
|
||||
|
||||
{{ end }}
|
85
vendor/codeberg.org/gruf/go-bitutil/pack.go
generated
vendored
85
vendor/codeberg.org/gruf/go-bitutil/pack.go
generated
vendored
|
@ -1,85 +0,0 @@
|
|||
package bitutil
|
||||
|
||||
// PackInt8s will pack two signed 8bit integers into an unsigned 16bit integer.
|
||||
func PackInt8s(i1, i2 int8) uint16 {
|
||||
const bits = 8
|
||||
const mask = (1 << bits) - 1
|
||||
return uint16(i1)<<bits | uint16(i2)&mask
|
||||
}
|
||||
|
||||
// UnpackInt8s will unpack two signed 8bit integers from an unsigned 16bit integer.
|
||||
func UnpackInt8s(i uint16) (int8, int8) {
|
||||
const bits = 8
|
||||
const mask = (1 << bits) - 1
|
||||
return int8(i >> bits), int8(i & mask)
|
||||
}
|
||||
|
||||
// PackInt16s will pack two signed 16bit integers into an unsigned 32bit integer.
|
||||
func PackInt16s(i1, i2 int16) uint32 {
|
||||
const bits = 16
|
||||
const mask = (1 << bits) - 1
|
||||
return uint32(i1)<<bits | uint32(i2)&mask
|
||||
}
|
||||
|
||||
// UnpackInt16s will unpack two signed 16bit integers from an unsigned 32bit integer.
|
||||
func UnpackInt16s(i uint32) (int16, int16) {
|
||||
const bits = 16
|
||||
const mask = (1 << bits) - 1
|
||||
return int16(i >> bits), int16(i & mask)
|
||||
}
|
||||
|
||||
// PackInt32s will pack two signed 32bit integers into an unsigned 64bit integer.
|
||||
func PackInt32s(i1, i2 int32) uint64 {
|
||||
const bits = 32
|
||||
const mask = (1 << bits) - 1
|
||||
return uint64(i1)<<bits | uint64(i2)&mask
|
||||
}
|
||||
|
||||
// UnpackInt32s will unpack two signed 32bit integers from an unsigned 64bit integer.
|
||||
func UnpackInt32s(i uint64) (int32, int32) {
|
||||
const bits = 32
|
||||
const mask = (1 << bits) - 1
|
||||
return int32(i >> bits), int32(i & mask)
|
||||
}
|
||||
|
||||
// PackUint8s will pack two unsigned 8bit integers into an unsigned 16bit integer.
|
||||
func PackUint8s(u1, u2 uint8) uint16 {
|
||||
const bits = 8
|
||||
const mask = (1 << bits) - 1
|
||||
return uint16(u1)<<bits | uint16(u2)&mask
|
||||
}
|
||||
|
||||
// UnpackUint8s will unpack two unsigned 8bit integers from an unsigned 16bit integer.
|
||||
func UnpackUint8s(u uint16) (uint8, uint8) {
|
||||
const bits = 8
|
||||
const mask = (1 << bits) - 1
|
||||
return uint8(u >> bits), uint8(u & mask)
|
||||
}
|
||||
|
||||
// PackUint16s will pack two unsigned 16bit integers into an unsigned 32bit integer.
|
||||
func PackUint16s(u1, u2 uint16) uint32 {
|
||||
const bits = 16
|
||||
const mask = (1 << bits) - 1
|
||||
return uint32(u1)<<bits | uint32(u2)&mask
|
||||
}
|
||||
|
||||
// UnpackUint16s will unpack two unsigned 16bit integers from an unsigned 32bit integer.
|
||||
func UnpackUint16s(u uint32) (uint16, uint16) {
|
||||
const bits = 16
|
||||
const mask = (1 << bits) - 1
|
||||
return uint16(u >> bits), uint16(u & mask)
|
||||
}
|
||||
|
||||
// PackUint32s will pack two unsigned 32bit integers into an unsigned 64bit integer.
|
||||
func PackUint32s(u1, u2 uint32) uint64 {
|
||||
const bits = 32
|
||||
const mask = (1 << bits) - 1
|
||||
return uint64(u1)<<bits | uint64(u2)&mask
|
||||
}
|
||||
|
||||
// UnpackUint32s will unpack two unsigned 32bit integers from an unsigned 64bit integer.
|
||||
func UnpackUint32s(u uint64) (uint32, uint32) {
|
||||
const bits = 32
|
||||
const mask = (1 << bits) - 1
|
||||
return uint32(u >> bits), uint32(u & mask)
|
||||
}
|
60
vendor/codeberg.org/gruf/go-bitutil/test.tpl
generated
vendored
60
vendor/codeberg.org/gruf/go-bitutil/test.tpl
generated
vendored
|
@ -1,60 +0,0 @@
|
|||
package atomics_test
|
||||
|
||||
import (
|
||||
"atomic"
|
||||
"unsafe"
|
||||
"testing"
|
||||
|
||||
"codeberg.org/gruf/go-atomics"
|
||||
)
|
||||
|
||||
func Test{{ .Name }}StoreLoad(t *testing.T) {
|
||||
for _, test := range {{ .Name }}Tests {
|
||||
val := atomics.New{{ .Name }}()
|
||||
|
||||
val.Store(test.V1)
|
||||
|
||||
if !({{ call .Compare "val.Load()" "test.V1" }}) {
|
||||
t.Fatalf("failed testing .Store and .Load: expect=%v actual=%v", val.Load(), test.V1)
|
||||
}
|
||||
|
||||
val.Store(test.V2)
|
||||
|
||||
if !({{ call .Compare "val.Load()" "test.V2" }}) {
|
||||
t.Fatalf("failed testing .Store and .Load: expect=%v actual=%v", val.Load(), test.V2)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Test{{ .Name }}CAS(t *testing.T) {
|
||||
for _, test := range {{ .Name }}Tests {
|
||||
val := atomics.New{{ .Name }}()
|
||||
|
||||
val.Store(test.V1)
|
||||
|
||||
if val.CAS(test.V2, test.V1) {
|
||||
t.Fatalf("failed testing negative .CAS: test=%+v state=%v", test, val.Load())
|
||||
}
|
||||
|
||||
if !val.CAS(test.V1, test.V2) {
|
||||
t.Fatalf("failed testing positive .CAS: test=%+v state=%v", test, val.Load())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func Test{{ .Name }}Swap(t *testing.T) {
|
||||
for _, test := range {{ .Name }}Tests {
|
||||
val := atomics.New{{ .Name }}()
|
||||
|
||||
val.Store(test.V1)
|
||||
|
||||
if !({{ call .Compare "val.Swap(test.V2)" "test.V1" }}) {
|
||||
t.Fatal("failed testing .Swap")
|
||||
}
|
||||
|
||||
if !({{ call .Compare "val.Swap(test.V1)" "test.V2" }}) {
|
||||
t.Fatal("failed testing .Swap")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
32
vendor/codeberg.org/gruf/go-sched/job.go
generated
vendored
32
vendor/codeberg.org/gruf/go-sched/job.go
generated
vendored
|
@ -4,9 +4,9 @@
|
|||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"codeberg.org/gruf/go-atomics"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// Job encapsulates logic for a scheduled job to be run according
|
||||
|
@ -14,7 +14,7 @@
|
|||
// holding onto a next execution time safely in a concurrent environment.
|
||||
type Job struct {
|
||||
id uint64
|
||||
next atomics.Time
|
||||
next unsafe.Pointer // *time.Time
|
||||
timing Timing
|
||||
call func(time.Time)
|
||||
panic func(interface{})
|
||||
|
@ -33,9 +33,6 @@ func NewJob(fn func(now time.Time)) *Job {
|
|||
panic: func(i interface{}) { panic(i) },
|
||||
}
|
||||
|
||||
// Init next time ptr
|
||||
j.next.Store(zerotime)
|
||||
|
||||
return j
|
||||
}
|
||||
|
||||
|
@ -99,14 +96,20 @@ func (job *Job) OnPanic(fn func(interface{})) *Job {
|
|||
|
||||
// Next returns the next time this Job is expected to run.
|
||||
func (job *Job) Next() time.Time {
|
||||
return job.next.Load()
|
||||
return loadTime(&job.next)
|
||||
}
|
||||
|
||||
// Run will execute this Job and pass through given now time.
|
||||
func (job *Job) Run(now time.Time) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
switch r := recover(); {
|
||||
case r == nil:
|
||||
// no panic
|
||||
case job != nil &&
|
||||
job.panic != nil:
|
||||
job.panic(r)
|
||||
default:
|
||||
panic(r)
|
||||
}
|
||||
}()
|
||||
job.call(now)
|
||||
|
@ -120,10 +123,21 @@ func (job *Job) String() string {
|
|||
buf.WriteString(strconv.FormatUint(job.id, 10))
|
||||
buf.WriteByte(' ')
|
||||
buf.WriteString("next=")
|
||||
buf.WriteString(job.next.Load().Format(time.StampMicro))
|
||||
buf.WriteString(loadTime(&job.next).Format(time.StampMicro))
|
||||
buf.WriteByte(' ')
|
||||
buf.WriteString("timing=")
|
||||
buf.WriteString(reflect.TypeOf(job.timing).String())
|
||||
buf.WriteByte('}')
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
func loadTime(p *unsafe.Pointer) time.Time {
|
||||
if p := atomic.LoadPointer(p); p != nil {
|
||||
return *(*time.Time)(p)
|
||||
}
|
||||
return zerotime
|
||||
}
|
||||
|
||||
func storeTime(p *unsafe.Pointer, t time.Time) {
|
||||
atomic.StorePointer(p, unsafe.Pointer(&t))
|
||||
}
|
||||
|
|
4
vendor/codeberg.org/gruf/go-sched/scheduler.go
generated
vendored
4
vendor/codeberg.org/gruf/go-sched/scheduler.go
generated
vendored
|
@ -225,7 +225,7 @@ func (sch *Scheduler) handle(v interface{}) {
|
|||
|
||||
// Update the next call time
|
||||
next := v.timing.Next(now)
|
||||
v.next.Store(next)
|
||||
storeTime(&v.next, next)
|
||||
|
||||
// Append this job to queued
|
||||
sch.jobs = append(sch.jobs, v)
|
||||
|
@ -261,7 +261,7 @@ func (sch *Scheduler) schedule(now time.Time) {
|
|||
|
||||
// Update the next call time
|
||||
next := job.timing.Next(now)
|
||||
job.next.Store(next)
|
||||
storeTime(&job.next, next)
|
||||
|
||||
if next.IsZero() {
|
||||
// Zero time, this job is done and can be dropped
|
||||
|
|
10
vendor/modules.txt
vendored
10
vendor/modules.txt
vendored
|
@ -1,9 +1,3 @@
|
|||
# codeberg.org/gruf/go-atomics v1.1.0
|
||||
## explicit; go 1.16
|
||||
codeberg.org/gruf/go-atomics
|
||||
# codeberg.org/gruf/go-bitutil v1.1.0
|
||||
## explicit; go 1.19
|
||||
codeberg.org/gruf/go-bitutil
|
||||
# codeberg.org/gruf/go-bytes v1.0.2
|
||||
## explicit; go 1.14
|
||||
codeberg.org/gruf/go-bytes
|
||||
|
@ -59,10 +53,10 @@ codeberg.org/gruf/go-mimetypes
|
|||
# codeberg.org/gruf/go-mutexes v1.5.1
|
||||
## explicit; go 1.22.2
|
||||
codeberg.org/gruf/go-mutexes
|
||||
# codeberg.org/gruf/go-runners v1.6.2
|
||||
# codeberg.org/gruf/go-runners v1.6.3
|
||||
## explicit; go 1.19
|
||||
codeberg.org/gruf/go-runners
|
||||
# codeberg.org/gruf/go-sched v1.2.3
|
||||
# codeberg.org/gruf/go-sched v1.2.4
|
||||
## explicit; go 1.19
|
||||
codeberg.org/gruf/go-sched
|
||||
# codeberg.org/gruf/go-storage v0.2.0
|
||||
|
|
Loading…
Reference in a new issue