Files
swift-mirror/test/AutoDiff/e2e_optimizations.swift
Anton Korobeynikov a5e83817b2 Reapply "Implement several peephole optimizations to unblock further optimizations of autodiff code" with correctness fix (#62012)
* Implement several peephole optimizations to unblock further optimizations of autodiff code

1. Simplify differentiable_function_extract of differentiable_function.
Before:
%x = differentiable_function(%orig, %jvp, %vjp)
%y = differentiable_function_extract [original] %x
After:
%y = %orig

2. Push conversion instructions inside of differentiable_function.
This unblocks inlining and specialization.
Before:
%x = differentiable_function(%orig, %jvp, %vjp)
%y = convert_escape_to_noescape %x
After:
%orig' = convert_escape_to_noescape %orig
%jvp' = convert_escape_to_noescape %jvp
%vjp' = convert_escape_to_noescape %vjp
%y = differentiable_function(%orig', %jvp', %vjp')

3. Another peephole is needed for reordering function conversion instructions to enable full inlining:
(convert_escape_to_noescape (convert_function (thin_to_thick_function x)))
=>
(convert_escape_to_noescape (thin_to_thick_function (convert_function x)))

Co-authored-by: Dan Zheng <danielzheng@google.com>
2022-11-16 23:21:27 -08:00

120 lines
4.3 KiB
Swift

// RUN: %target-swift-frontend -emit-sil -O %s | %FileCheck %s
// REQUIRES: swift_in_compiler
import _Differentiation
@_silgen_name("blackHole")
@inline(never)
@discardableResult
func blackHole<T>(_ x: T) -> T { x }
func float(_ x0: Float) -> Float {
let x1 = x0 * x0
let x2 = x1 + x1
let x3 = x2 - x1
let x4 = x3 / x2
return x4
}
@_silgen_name("test_gradient_float")
func test_gradient_float() {
blackHole(gradient(at: 10, of: float))
}
// Check that `apply`s are fully inlined.
// CHECK-LABEL: sil hidden @test_gradient_float : $@convention(thin) () -> ()
// CHECK-NOT: apply
// CHECK: [[GRADIENT_RESULT:%.*]] = struct $Float ({{.*}} : $Builtin.FPIEEE32)
// CHECK: [[FN_REF:%.*]] = function_ref @$s9blackHoleSf_Tg5 : $@convention(thin) (Float) -> Float
// CHECK-NEXT: apply [[FN_REF:%.*]]([[GRADIENT_RESULT]])
// CHECK-NOT: apply
// CHECK-LABEL: } // end sil function 'test_gradient_float'
func float_mutation(_ x: Float) -> Float {
var result = x * x
result = result + result
result = result - x
result = result / x
return result
}
@_silgen_name("test_gradient_float_mutation")
func test_gradient_float_mutation() {
blackHole(gradient(at: 10, of: float_mutation))
}
// Check that `apply`s are fully inlined.
// CHECK-LABEL: sil hidden @test_gradient_float_mutation : $@convention(thin) () -> ()
// CHECK-NOT: apply
// CHECK: [[GRADIENT_RESULT:%.*]] = struct $Float ({{.*}} : $Builtin.FPIEEE32)
// CHECK: [[FN_REF:%.*]] = function_ref @$s9blackHoleSf_Tg5 : $@convention(thin) (Float) -> Float
// CHECK-NEXT: apply [[FN_REF:%.*]]([[GRADIENT_RESULT]])
// CHECK-NOT: apply
// CHECK-LABEL: } // end sil function 'test_gradient_float_mutation'
func float_conditional(_ x: Float, _ bool: Bool) -> Float {
var result = x * x
if bool {
result = result + result
result = result - x
} else {
result = result / x
}
return result
}
@_silgen_name("test_gradient_float_conditional")
func test_gradient_float_conditional() {
blackHole(gradient(at: 10, of: { float_conditional($0, true) }))
}
// Check that `apply`s are fully inlined.
// CHECK-LABEL: sil hidden @test_gradient_float_conditional : $@convention(thin) () -> ()
// CHECK-NOT: apply
// CHECK: [[GRADIENT_RESULT:%.*]] = struct $Float ({{.*}} : $Builtin.FPIEEE32)
// CHECK: [[FN_REF:%.*]] = function_ref @$s9blackHoleSf_Tg5 : $@convention(thin) (Float) -> Float
// CHECK-NEXT: apply [[FN_REF:%.*]]([[GRADIENT_RESULT]])
// CHECK-NOT: apply
// CHECK-LABEL: } // end sil function 'test_gradient_float_conditional'
func float_loop(_ x: Float, count: Int) -> Float {
var result: Float = 0
for _ in 0..<count {
result = result * x
}
return result
}
@_silgen_name("test_gradient_float_loop")
func test_gradient_float_loop() {
blackHole(gradient(at: 10, of: { float_loop($0, count: 10) }))
}
// Check whether `apply`s are inlined.
// Currently, the VJP is inlined but the pullback is not.
// CHECK-LABEL: sil hidden @test_gradient_float_loop : $@convention(thin) () -> ()
// CHECK: [[PB_FN_REF:%.*]] = function_ref @{{.*}}24test_gradient_float_loopyyFS2fcfU_TJrSpSr : $@convention(thin) (Float) -> (Float, @owned @callee_guaranteed (Float) -> Float)
// CHECK: [[GRADIENT_RESULT:%.*]] = apply [[PB_FN_REF]]
// CHECK: [[EXTRACT:%.*]] = tuple_extract [[GRADIENT_RESULT]]
// CHECK: [[GRADIENT_RESULT2:%.*]] = apply [[EXTRACT]]
// CHECK: [[FN_REF:%.*]] = function_ref @$s9blackHoleSf_Tg5 : $@convention(thin) (Float) -> Float
// CHECK-NEXT: apply [[FN_REF:%.*]]([[GRADIENT_RESULT2]])
// CHECK-NOT: apply
// CHECK-LABEL: } // end sil function 'test_gradient_float_loop'
func array_loop(_ array: [Float]) -> Float {
var result: Float = 0
for i in withoutDerivative(at: array.indices) {
result += array[i]
}
return result
}
@_silgen_name("test_gradient_array_loop")
func test_gradient_array_loop() {
blackHole(gradient(at: [3, 4, 5], of: array_loop))
}
// Check whether `apply`s are inlined.
// Currently, the VJP is not inlined.
// CHECK-LABEL: sil hidden @test_gradient_array_loop : $@convention(thin) () -> ()
// CHECK: [[VJP_FN_REF:%.*]] = function_ref @{{.*}}10array_loopySfSaySfGFTJrSpSr : $@convention(thin) (@guaranteed Array<Float>) -> (Float, @owned @callee_guaranteed (Float) -> @owned Array<Float>.DifferentiableView)
// CHECK: [[VJP_RESULT:%.*]] = apply [[VJP_FN_REF]]
// CHECK-LABEL: } // end sil function 'test_gradient_array_loop'