| | |
| | |
| | |
| |
|
| | #include "go_asm.h" |
| | #include "textflag.h" |
| |
|
| | TEXT 路Casint32(SB), NOSPLIT, $0-17 |
| | B 路Cas(SB) |
| |
|
| | TEXT 路Casint64(SB), NOSPLIT, $0-25 |
| | B 路Cas64(SB) |
| |
|
| | TEXT 路Casuintptr(SB), NOSPLIT, $0-25 |
| | B 路Cas64(SB) |
| |
|
| | TEXT 路CasRel(SB), NOSPLIT, $0-17 |
| | B 路Cas(SB) |
| |
|
| | TEXT 路Loadint32(SB), NOSPLIT, $0-12 |
| | B 路Load(SB) |
| |
|
| | TEXT 路Loadint64(SB), NOSPLIT, $0-16 |
| | B 路Load64(SB) |
| |
|
| | TEXT 路Loaduintptr(SB), NOSPLIT, $0-16 |
| | B 路Load64(SB) |
| |
|
| | TEXT 路Loaduint(SB), NOSPLIT, $0-16 |
| | B 路Load64(SB) |
| |
|
| | TEXT 路Storeint32(SB), NOSPLIT, $0-12 |
| | B 路Store(SB) |
| |
|
| | TEXT 路Storeint64(SB), NOSPLIT, $0-16 |
| | B 路Store64(SB) |
| |
|
| | TEXT 路Storeuintptr(SB), NOSPLIT, $0-16 |
| | B 路Store64(SB) |
| |
|
| | TEXT 路Xaddint32(SB), NOSPLIT, $0-20 |
| | B 路Xadd(SB) |
| |
|
| | TEXT 路Xaddint64(SB), NOSPLIT, $0-24 |
| | B 路Xadd64(SB) |
| |
|
| | TEXT 路Xadduintptr(SB), NOSPLIT, $0-24 |
| | B 路Xadd64(SB) |
| |
|
| | TEXT 路Casp1(SB), NOSPLIT, $0-25 |
| | B 路Cas64(SB) |
| |
|
| | |
| | TEXT 路Load(SB),NOSPLIT,$0-12 |
| | MOVD ptr+0(FP), R0 |
| | LDARW (R0), R0 |
| | MOVW R0, ret+8(FP) |
| | RET |
| |
|
| | |
| | TEXT 路Load8(SB),NOSPLIT,$0-9 |
| | MOVD ptr+0(FP), R0 |
| | LDARB (R0), R0 |
| | MOVB R0, ret+8(FP) |
| | RET |
| |
|
| | |
| | TEXT 路Load64(SB),NOSPLIT,$0-16 |
| | MOVD ptr+0(FP), R0 |
| | LDAR (R0), R0 |
| | MOVD R0, ret+8(FP) |
| | RET |
| |
|
| | |
| | TEXT 路Loadp(SB),NOSPLIT,$0-16 |
| | MOVD ptr+0(FP), R0 |
| | LDAR (R0), R0 |
| | MOVD R0, ret+8(FP) |
| | RET |
| |
|
| | |
| | TEXT 路LoadAcq(SB),NOSPLIT,$0-12 |
| | B 路Load(SB) |
| |
|
| | |
| | TEXT 路LoadAcq64(SB),NOSPLIT,$0-16 |
| | B 路Load64(SB) |
| |
|
| | |
| | TEXT 路LoadAcquintptr(SB),NOSPLIT,$0-16 |
| | B 路Load64(SB) |
| |
|
| | TEXT 路StorepNoWB(SB), NOSPLIT, $0-16 |
| | B 路Store64(SB) |
| |
|
| | TEXT 路StoreRel(SB), NOSPLIT, $0-12 |
| | B 路Store(SB) |
| |
|
| | TEXT 路StoreRel64(SB), NOSPLIT, $0-16 |
| | B 路Store64(SB) |
| |
|
| | TEXT 路StoreReluintptr(SB), NOSPLIT, $0-16 |
| | B 路Store64(SB) |
| |
|
| | TEXT 路Store(SB), NOSPLIT, $0-12 |
| | MOVD ptr+0(FP), R0 |
| | MOVW val+8(FP), R1 |
| | STLRW R1, (R0) |
| | RET |
| |
|
| | TEXT 路Store8(SB), NOSPLIT, $0-9 |
| | MOVD ptr+0(FP), R0 |
| | MOVB val+8(FP), R1 |
| | STLRB R1, (R0) |
| | RET |
| |
|
| | TEXT 路Store64(SB), NOSPLIT, $0-16 |
| | MOVD ptr+0(FP), R0 |
| | MOVD val+8(FP), R1 |
| | STLR R1, (R0) |
| | RET |
| |
|
| | |
| | |
| | |
| | |
| | |
| | TEXT 路Xchg8(SB), NOSPLIT, $0-17 |
| | MOVD ptr+0(FP), R0 |
| | MOVB new+8(FP), R1 |
| | #ifndef GOARM64_LSE |
| | MOVBU internal鈭昪pu路ARM64+const_offsetARM64HasATOMICS(SB), R4 |
| | CBZ R4, load_store_loop |
| | #endif |
| | SWPALB R1, (R0), R2 |
| | MOVB R2, ret+16(FP) |
| | RET |
| | #ifndef GOARM64_LSE |
| | load_store_loop: |
| | LDAXRB (R0), R2 |
| | STLXRB R1, (R0), R3 |
| | CBNZ R3, load_store_loop |
| | MOVB R2, ret+16(FP) |
| | RET |
| | #endif |
| |
|
| | // uint32 Xchg(ptr *uint32, new uint32) |
| | // Atomically: |
| | // old := *ptr; |
| | |
| | |
| | TEXT 路Xchg(SB), NOSPLIT, $0-20 |
| | MOVD ptr+0(FP), R0 |
| | MOVW new+8(FP), R1 |
| | #ifndef GOARM64_LSE |
| | MOVBU internal鈭昪pu路ARM64+const_offsetARM64HasATOMICS(SB), R4 |
| | CBZ R4, load_store_loop |
| | #endif |
| | SWPALW R1, (R0), R2 |
| | MOVW R2, ret+16(FP) |
| | RET |
| | #ifndef GOARM64_LSE |
| | load_store_loop: |
| | LDAXRW (R0), R2 |
| | STLXRW R1, (R0), R3 |
| | CBNZ R3, load_store_loop |
| | MOVW R2, ret+16(FP) |
| | RET |
| | #endif |
| |
|
| | // uint64 Xchg64(ptr *uint64, new uint64) |
| | // Atomically: |
| | // old := *ptr; |
| | |
| | |
| | TEXT 路Xchg64(SB), NOSPLIT, $0-24 |
| | MOVD ptr+0(FP), R0 |
| | MOVD new+8(FP), R1 |
| | #ifndef GOARM64_LSE |
| | MOVBU internal鈭昪pu路ARM64+const_offsetARM64HasATOMICS(SB), R4 |
| | CBZ R4, load_store_loop |
| | #endif |
| | SWPALD R1, (R0), R2 |
| | MOVD R2, ret+16(FP) |
| | RET |
| | #ifndef GOARM64_LSE |
| | load_store_loop: |
| | LDAXR (R0), R2 |
| | STLXR R1, (R0), R3 |
| | CBNZ R3, load_store_loop |
| | MOVD R2, ret+16(FP) |
| | RET |
| | #endif |
| |
|
| | // func Cas(ptr *uint32, old, new uint32) bool |
| | // Atomically: |
| | // if *ptr == old { |
| | |
| | |
| | |
| | |
| | |
| | TEXT 路Cas(SB), NOSPLIT, $0-17 |
| | MOVD ptr+0(FP), R0 |
| | MOVW old+8(FP), R1 |
| | MOVW new+12(FP), R2 |
| | #ifndef GOARM64_LSE |
| | MOVBU internal鈭昪pu路ARM64+const_offsetARM64HasATOMICS(SB), R4 |
| | CBZ R4, load_store_loop |
| | #endif |
| | MOVD R1, R3 |
| | CASALW R3, (R0), R2 |
| | CMP R1, R3 |
| | CSET EQ, R0 |
| | MOVB R0, ret+16(FP) |
| | RET |
| | #ifndef GOARM64_LSE |
| | load_store_loop: |
| | LDAXRW (R0), R3 |
| | CMPW R1, R3 |
| | BNE ok |
| | STLXRW R2, (R0), R3 |
| | CBNZ R3, load_store_loop |
| | ok: |
| | CSET EQ, R0 |
| | MOVB R0, ret+16(FP) |
| | RET |
| | #endif |
| |
|
| | // func Cas64(ptr *uint64, old, new uint64) bool |
| | // Atomically: |
| | // if *ptr == old { |
| | |
| | |
| | |
| | |
| | |
| | TEXT 路Cas64(SB), NOSPLIT, $0-25 |
| | MOVD ptr+0(FP), R0 |
| | MOVD old+8(FP), R1 |
| | MOVD new+16(FP), R2 |
| | #ifndef GOARM64_LSE |
| | MOVBU internal鈭昪pu路ARM64+const_offsetARM64HasATOMICS(SB), R4 |
| | CBZ R4, load_store_loop |
| | #endif |
| | MOVD R1, R3 |
| | CASALD R3, (R0), R2 |
| | CMP R1, R3 |
| | CSET EQ, R0 |
| | MOVB R0, ret+24(FP) |
| | RET |
| | #ifndef GOARM64_LSE |
| | load_store_loop: |
| | LDAXR (R0), R3 |
| | CMP R1, R3 |
| | BNE ok |
| | STLXR R2, (R0), R3 |
| | CBNZ R3, load_store_loop |
| | ok: |
| | CSET EQ, R0 |
| | MOVB R0, ret+24(FP) |
| | RET |
| | #endif |
| |
|
| | // uint32 xadd(uint32 volatile *ptr, int32 delta) |
| | // Atomically: |
| | // *val += delta; |
| | |
| | TEXT 路Xadd(SB), NOSPLIT, $0-20 |
| | MOVD ptr+0(FP), R0 |
| | MOVW delta+8(FP), R1 |
| | #ifndef GOARM64_LSE |
| | MOVBU internal鈭昪pu路ARM64+const_offsetARM64HasATOMICS(SB), R4 |
| | CBZ R4, load_store_loop |
| | #endif |
| | LDADDALW R1, (R0), R2 |
| | ADD R1, R2 |
| | MOVW R2, ret+16(FP) |
| | RET |
| | #ifndef GOARM64_LSE |
| | load_store_loop: |
| | LDAXRW (R0), R2 |
| | ADDW R2, R1, R2 |
| | STLXRW R2, (R0), R3 |
| | CBNZ R3, load_store_loop |
| | MOVW R2, ret+16(FP) |
| | RET |
| | #endif |
| |
|
| | // uint64 Xadd64(uint64 volatile *ptr, int64 delta) |
| | // Atomically: |
| | // *val += delta; |
| | |
| | TEXT 路Xadd64(SB), NOSPLIT, $0-24 |
| | MOVD ptr+0(FP), R0 |
| | MOVD delta+8(FP), R1 |
| | #ifndef GOARM64_LSE |
| | MOVBU internal鈭昪pu路ARM64+const_offsetARM64HasATOMICS(SB), R4 |
| | CBZ R4, load_store_loop |
| | #endif |
| | LDADDALD R1, (R0), R2 |
| | ADD R1, R2 |
| | MOVD R2, ret+16(FP) |
| | RET |
| | #ifndef GOARM64_LSE |
| | load_store_loop: |
| | LDAXR (R0), R2 |
| | ADD R2, R1, R2 |
| | STLXR R2, (R0), R3 |
| | CBNZ R3, load_store_loop |
| | MOVD R2, ret+16(FP) |
| | RET |
| | #endif |
| |
|
| | TEXT 路Xchgint32(SB), NOSPLIT, $0-20 |
| | B 路Xchg(SB) |
| |
|
| | TEXT 路Xchgint64(SB), NOSPLIT, $0-24 |
| | B 路Xchg64(SB) |
| |
|
| | TEXT 路Xchguintptr(SB), NOSPLIT, $0-24 |
| | B 路Xchg64(SB) |
| |
|
| | TEXT 路And8(SB), NOSPLIT, $0-9 |
| | MOVD ptr+0(FP), R0 |
| | MOVB val+8(FP), R1 |
| | #ifndef GOARM64_LSE |
| | MOVBU internal鈭昪pu路ARM64+const_offsetARM64HasATOMICS(SB), R4 |
| | CBZ R4, load_store_loop |
| | #endif |
| | MVN R1, R2 |
| | LDCLRALB R2, (R0), R3 |
| | RET |
| | #ifndef GOARM64_LSE |
| | load_store_loop: |
| | LDAXRB (R0), R2 |
| | AND R1, R2 |
| | STLXRB R2, (R0), R3 |
| | CBNZ R3, load_store_loop |
| | RET |
| | #endif |
| |
|
| | TEXT 路Or8(SB), NOSPLIT, $0-9 |
| | MOVD ptr+0(FP), R0 |
| | MOVB val+8(FP), R1 |
| | #ifndef GOARM64_LSE |
| | MOVBU internal鈭昪pu路ARM64+const_offsetARM64HasATOMICS(SB), R4 |
| | CBZ R4, load_store_loop |
| | #endif |
| | LDORALB R1, (R0), R2 |
| | RET |
| | #ifndef GOARM64_LSE |
| | load_store_loop: |
| | LDAXRB (R0), R2 |
| | ORR R1, R2 |
| | STLXRB R2, (R0), R3 |
| | CBNZ R3, load_store_loop |
| | RET |
| | #endif |
| |
|
| | // func And(addr *uint32, v uint32) |
| | TEXT 路And(SB), NOSPLIT, $0-12 |
| | MOVD ptr+0(FP), R0 |
| | MOVW val+8(FP), R1 |
| | #ifndef GOARM64_LSE |
| | MOVBU internal鈭昪pu路ARM64+const_offsetARM64HasATOMICS(SB), R4 |
| | CBZ R4, load_store_loop |
| | #endif |
| | MVN R1, R2 |
| | LDCLRALW R2, (R0), R3 |
| | RET |
| | #ifndef GOARM64_LSE |
| | load_store_loop: |
| | LDAXRW (R0), R2 |
| | AND R1, R2 |
| | STLXRW R2, (R0), R3 |
| | CBNZ R3, load_store_loop |
| | RET |
| | #endif |
| |
|
| | // func Or(addr *uint32, v uint32) |
| | TEXT 路Or(SB), NOSPLIT, $0-12 |
| | MOVD ptr+0(FP), R0 |
| | MOVW val+8(FP), R1 |
| | #ifndef GOARM64_LSE |
| | MOVBU internal鈭昪pu路ARM64+const_offsetARM64HasATOMICS(SB), R4 |
| | CBZ R4, load_store_loop |
| | #endif |
| | LDORALW R1, (R0), R2 |
| | RET |
| | #ifndef GOARM64_LSE |
| | load_store_loop: |
| | LDAXRW (R0), R2 |
| | ORR R1, R2 |
| | STLXRW R2, (R0), R3 |
| | CBNZ R3, load_store_loop |
| | RET |
| | #endif |
| |
|
| | // func Or32(addr *uint32, v uint32) old uint32 |
| | TEXT 路Or32(SB), NOSPLIT, $0-20 |
| | MOVD ptr+0(FP), R0 |
| | MOVW val+8(FP), R1 |
| | #ifndef GOARM64_LSE |
| | MOVBU internal鈭昪pu路ARM64+const_offsetARM64HasATOMICS(SB), R4 |
| | CBZ R4, load_store_loop |
| | #endif |
| | LDORALW R1, (R0), R2 |
| | MOVD R2, ret+16(FP) |
| | RET |
| | #ifndef GOARM64_LSE |
| | load_store_loop: |
| | LDAXRW (R0), R2 |
| | ORR R1, R2, R3 |
| | STLXRW R3, (R0), R4 |
| | CBNZ R4, load_store_loop |
| | MOVD R2, ret+16(FP) |
| | RET |
| | #endif |
| |
|
| | // func And32(addr *uint32, v uint32) old uint32 |
| | TEXT 路And32(SB), NOSPLIT, $0-20 |
| | MOVD ptr+0(FP), R0 |
| | MOVW val+8(FP), R1 |
| | #ifndef GOARM64_LSE |
| | MOVBU internal鈭昪pu路ARM64+const_offsetARM64HasATOMICS(SB), R4 |
| | CBZ R4, load_store_loop |
| | #endif |
| | MVN R1, R2 |
| | LDCLRALW R2, (R0), R3 |
| | MOVD R3, ret+16(FP) |
| | RET |
| | #ifndef GOARM64_LSE |
| | load_store_loop: |
| | LDAXRW (R0), R2 |
| | AND R1, R2, R3 |
| | STLXRW R3, (R0), R4 |
| | CBNZ R4, load_store_loop |
| | MOVD R2, ret+16(FP) |
| | RET |
| | #endif |
| |
|
| | // func Or64(addr *uint64, v uint64) old uint64 |
| | TEXT 路Or64(SB), NOSPLIT, $0-24 |
| | MOVD ptr+0(FP), R0 |
| | MOVD val+8(FP), R1 |
| | #ifndef GOARM64_LSE |
| | MOVBU internal鈭昪pu路ARM64+const_offsetARM64HasATOMICS(SB), R4 |
| | CBZ R4, load_store_loop |
| | #endif |
| | LDORALD R1, (R0), R2 |
| | MOVD R2, ret+16(FP) |
| | RET |
| | #ifndef GOARM64_LSE |
| | load_store_loop: |
| | LDAXR (R0), R2 |
| | ORR R1, R2, R3 |
| | STLXR R3, (R0), R4 |
| | CBNZ R4, load_store_loop |
| | MOVD R2, ret+16(FP) |
| | RET |
| | #endif |
| |
|
| | // func And64(addr *uint64, v uint64) old uint64 |
| | TEXT 路And64(SB), NOSPLIT, $0-24 |
| | MOVD ptr+0(FP), R0 |
| | MOVD val+8(FP), R1 |
| | #ifndef GOARM64_LSE |
| | MOVBU internal鈭昪pu路ARM64+const_offsetARM64HasATOMICS(SB), R4 |
| | CBZ R4, load_store_loop |
| | #endif |
| | MVN R1, R2 |
| | LDCLRALD R2, (R0), R3 |
| | MOVD R3, ret+16(FP) |
| | RET |
| | #ifndef GOARM64_LSE |
| | load_store_loop: |
| | LDAXR (R0), R2 |
| | AND R1, R2, R3 |
| | STLXR R3, (R0), R4 |
| | CBNZ R4, load_store_loop |
| | MOVD R2, ret+16(FP) |
| | RET |
| | #endif |
| |
|
| | // func Anduintptr(addr *uintptr, v uintptr) old uintptr |
| | TEXT 路Anduintptr(SB), NOSPLIT, $0-24 |
| | B 路And64(SB) |
| |
|
| | // func Oruintptr(addr *uintptr, v uintptr) old uintptr |
| | TEXT 路Oruintptr(SB), NOSPLIT, $0-24 |
| | B 路Or64(SB) |
| |
|