mirror of
https://github.com/apple/swift.git
synced 2025-12-14 20:36:38 +01:00
This is part of our poor-man's internal compiler SPI hiding in the standard library. We don't want these functions showing up in code completion, etc. Swift SVN r16916
276 lines
6.0 KiB
ArmAsm
276 lines
6.0 KiB
ArmAsm
//===--- FastEntryPoints.s - Swift Language Assembly Entry Points ABI -----===//
|
|
//
|
|
// This source file is part of the Swift.org open source project
|
|
//
|
|
// Copyright (c) 2014 - 2015 Apple Inc. and the Swift project authors
|
|
// Licensed under Apache License v2.0 with Runtime Library Exception
|
|
//
|
|
// See http://swift.org/LICENSE.txt for license information
|
|
// See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
|
|
//
|
|
//===----------------------------------------------------------------------===//
|
|
//
|
|
// Swift Language Assembly Entry Points ABI
|
|
//
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
#include "swift/Runtime/FastEntryPoints.h"
|
|
|
|
#ifdef __x86_64__
|
|
.macro BEGIN_FUNC
|
|
.text
|
|
.globl $0
|
|
.align 4
|
|
$0:
|
|
.cfi_startproc
|
|
.endmacro
|
|
|
|
.macro STATIC_FUNC
|
|
.text
|
|
.private_extern $0
|
|
.align 4
|
|
$0:
|
|
.endmacro
|
|
|
|
.macro END_FUNC
|
|
.cfi_endproc
|
|
.endmacro
|
|
|
|
#if SWIFT_HAVE_FAST_ENTRY_POINTS
|
|
// The custom swift runtime ABI for x86_64 is as follows.
|
|
//
|
|
// Like normal function calls:
|
|
// 1) Arguments to the runtime are in the same registers as normal functions
|
|
// 2) Return values are in the same registers as normal functions
|
|
// 3) Non-integer registers are NOT preserved: floating point, MMX, SSE, AVX...
|
|
// 4) The direction bit is in the forward direction
|
|
// 5) %r11 may be trashed by the callee
|
|
// 6) Condition flags may be trashed upon completion
|
|
//
|
|
// Unlike normal function calls:
|
|
// 1) The stack need not be aligned
|
|
// 2) No stack frame is needed in the caller
|
|
// 3) All integer registers other than %r11 and optional return registers are
|
|
// preserved. In other words, if the entry point returns void, then %rax
|
|
// and %rdx are preserved.
|
|
//
|
|
// This ABI has many advantages. In particular, it helps avoid many register
|
|
// spills and even makes unusual tail calls possible:
|
|
//
|
|
// _convertStringToNSSwiftString:
|
|
// mov %rdi, %rcx // backup %rdi without spilling
|
|
// mov $5, %rdi
|
|
// call swift_alloc
|
|
// lea NSSwiftStringClass(%rip), %r8
|
|
// mov %r8, (%rax) // vtable/isa
|
|
// mov $1, 8(%rax) // ref count
|
|
// mov %rcx, 16(%rax) // owner
|
|
// mov %rsi, 24(%rax) // base -- NOTE: it didn't need to get spilled
|
|
// mov %rdx, 32(%rax) // len -- NOTE: it didn't need to get spilled
|
|
// mov %rcx, %rdi // prepare to retain the Swift string
|
|
// jmp swift_retain // swift_retain returns void therefore it preserves
|
|
// // %rax and therefore we can tail call
|
|
// // NOTE: %rdi and %rax are NOT the same here
|
|
//
|
|
//
|
|
// MISC NOTES AND BUGS
|
|
//
|
|
// 11565357 ld should rewrite JMPs into fall-through NOPs when possible
|
|
|
|
.macro SaveRegisters
|
|
.cfi_startproc
|
|
push %rbp
|
|
.cfi_def_cfa_offset 16
|
|
.cfi_offset %rbp, -16
|
|
mov %rsp, %rbp
|
|
.cfi_def_cfa_register %rbp
|
|
// potentially align the stack
|
|
and $-16, %rsp
|
|
push %rax
|
|
push %rcx
|
|
push %rdx
|
|
push %rsi
|
|
push %rdi
|
|
push %r8
|
|
push %r9
|
|
push %r10
|
|
.endmacro
|
|
|
|
.macro RestoreRegisters
|
|
pop %r10
|
|
pop %r9
|
|
pop %r8
|
|
pop %rdi
|
|
pop %rsi
|
|
pop %rdx
|
|
pop %rcx
|
|
pop %rax
|
|
// the stack may have been aligned, therefore LEAVE instead of POP %rbp
|
|
leave
|
|
.cfi_def_cfa rsp, 8
|
|
.cfi_same_value rbp
|
|
.endmacro
|
|
|
|
// XXX FIXME -- We need to change this to return "void"
|
|
BEGIN_FUNC _swift_retain
|
|
mov %rdi, %rsi
|
|
jmp _swift_retainAndReturnThree
|
|
END_FUNC
|
|
|
|
#endif // SWIFT_HAVE_FAST_ENTRY_POINTS
|
|
|
|
// XXX FIXME -- hack until we have tinycc
|
|
// func swift_retainAndReturnThree(obj,(rsi,rdx,rcx)) -> (rax,rdx,rcx)
|
|
BEGIN_FUNC _swift_retainAndReturnThree
|
|
test %rdi, %rdi
|
|
jz 1f
|
|
testb $RC_ATOMIC_BIT, RC_OFFSET(%rdi)
|
|
jnz 3f
|
|
addl $RC_INTERVAL, RC_OFFSET(%rdi)
|
|
jc 2f
|
|
1:
|
|
mov %rsi, %rax
|
|
ret
|
|
2:
|
|
int3
|
|
3:
|
|
lock
|
|
addl $RC_INTERVAL, RC_OFFSET(%rdi)
|
|
jc 2b
|
|
mov %rsi, %rax
|
|
ret
|
|
END_FUNC
|
|
|
|
#if SWIFT_HAVE_FAST_ENTRY_POINTS
|
|
BEGIN_FUNC _swift_release
|
|
test %rdi, %rdi
|
|
jz 1f
|
|
testb $RC_ATOMIC_BIT, RC_OFFSET(%rdi)
|
|
jnz 2f
|
|
subl $RC_INTERVAL, RC_OFFSET(%rdi)
|
|
jz 4f
|
|
jc 3f
|
|
1:
|
|
ret
|
|
2:
|
|
// workaround lack of "xsub" instruction via xadd then sub
|
|
movl $-RC_INTERVAL, %r11d
|
|
lock
|
|
xaddl %r11d, RC_OFFSET(%rdi)
|
|
sub $RC_INTERVAL, %r11d
|
|
jc 3f
|
|
andl $(RC_MASK + RC_DEALLOCATING_BIT), %r11d
|
|
jz 4f
|
|
ret
|
|
3:
|
|
int3
|
|
4:
|
|
SaveRegisters
|
|
xor %eax, %eax
|
|
movl $RC_DEALLOCATING_BIT, %edx
|
|
lock cmpxchgl %edx, RC_OFFSET(%rdi)
|
|
jne 5f
|
|
call __swift_release_slow
|
|
5:
|
|
RestoreRegisters
|
|
ret
|
|
END_FUNC
|
|
|
|
BEGIN_FUNC _swift_dealloc
|
|
add __swiftAllocOffset(%rip), %rsi
|
|
mov %gs:(,%rsi,8), %r11
|
|
mov %r11, (%rdi)
|
|
mov %rdi, %gs:(,%rsi,8)
|
|
sub __swiftAllocOffset(%rip), %rsi
|
|
ret
|
|
END_FUNC
|
|
|
|
.macro ALLOC_FUNC
|
|
BEGIN_FUNC $0
|
|
1:
|
|
add __swiftAllocOffset(%rip), %rdi
|
|
mov %gs:(,%rdi,8), %rax
|
|
test %rax, %rax
|
|
je 2f
|
|
mov (%rax), %r11
|
|
mov %r11, %gs:(,%rdi,8)
|
|
sub __swiftAllocOffset(%rip), %rdi
|
|
ret
|
|
2:
|
|
sub __swiftAllocOffset(%rip), %rdi
|
|
SaveRegisters
|
|
.if $2 == 0
|
|
xor %esi, %esi
|
|
.else
|
|
mov $$$2, %esi
|
|
.endif
|
|
call __swift_refillThreadAllocCache
|
|
RestoreRegisters
|
|
.if $2 == SWIFT_TRYALLOC
|
|
mov __swiftAllocOffset(%rip), %r11
|
|
shl $$3, %r11
|
|
cmpq $$0, %gs:(%r11,%rdi,8)
|
|
jnz 1b
|
|
ret
|
|
.else
|
|
jmp 1b
|
|
.endif
|
|
END_FUNC
|
|
.endmacro
|
|
|
|
ALLOC_FUNC _swift_alloc, SWIFT_TSD_RAW_ALLOC_BASE, 0
|
|
ALLOC_FUNC _swift_tryAlloc, SWIFT_TSD_RAW_ALLOC_BASE, SWIFT_TRYALLOC
|
|
|
|
BEGIN_FUNC _swift_tryRetain
|
|
test %rdi, %rdi
|
|
jz 2f
|
|
movl $RC_INTERVAL, %eax
|
|
lock xaddl %eax, RC_OFFSET(%rdi)
|
|
jc 3f
|
|
testb $RC_DEALLOCATING_BIT, %al
|
|
jnz 1f
|
|
movq %rdi, %rax
|
|
ret
|
|
1:
|
|
lock subl $RC_INTERVAL, RC_OFFSET(%rdi)
|
|
2:
|
|
xor %eax, %eax
|
|
ret
|
|
3:
|
|
int3
|
|
END_FUNC
|
|
|
|
BEGIN_FUNC _swift_weakRetain
|
|
test %rdi, %rdi
|
|
jz 1f
|
|
lock addl $WRC_INTERVAL, WRC_OFFSET(%rdi)
|
|
jc 2f
|
|
1:
|
|
ret
|
|
2:
|
|
int3
|
|
END_FUNC
|
|
|
|
BEGIN_FUNC _swift_weakRelease
|
|
test %rdi, %rdi
|
|
jz 1f
|
|
lock subl $WRC_INTERVAL, WRC_OFFSET(%rdi)
|
|
jz 3f
|
|
jc 2f
|
|
1:
|
|
ret
|
|
2:
|
|
int3
|
|
3:
|
|
SaveRegisters
|
|
xor %esi, %esi
|
|
call _swift_slowDealloc
|
|
RestoreRegisters
|
|
ret
|
|
END_FUNC
|
|
|
|
#endif // SWIFT_HAVE_FAST_ENTRY_POINTS
|
|
#endif // __x86_64__
|
|
|
|
.subsections_via_symbols
|