[Backtracing] Add a caching wrapper for MemoryReader.

Currently we read many small chunks from the process we're backtracing.
If it so happens that it's the local process, that isn't really a big
problem, but if it's a remote process, especially on Linux where we have
to use the memory server, it's probably a little slow.

Fix by adding a caching layer.

rdar://117681625
This commit is contained in:
Alastair Houghton
2023-10-30 12:19:01 +00:00
parent 330085e9b8
commit 1fc2ca35e8
10 changed files with 162 additions and 66 deletions

View File

@@ -16,8 +16,6 @@
import Swift
@_implementationOnly import OS.Libc
enum ArrayImageSourceError: Error {
case outOfBoundsRead(UInt64, UInt64)
}
@@ -35,16 +33,16 @@ struct ArrayImageSource<T>: ImageSource {
return Bounds(base: 0, size: Size(array.count * MemoryLayout<T>.stride))
}
public func fetch<U>(from addr: Address,
into buffer: UnsafeMutableBufferPointer<U>) throws {
public func fetch(from addr: Address,
into buffer: UnsafeMutableRawBufferPointer) throws {
try array.withUnsafeBytes{
let size = Size($0.count)
let requested = Size(buffer.count * MemoryLayout<U>.stride)
let requested = Size(buffer.count)
if addr > size || requested > size - addr {
throw ArrayImageSourceError.outOfBoundsRead(addr, requested)
}
memcpy(buffer.baseAddress!, $0.baseAddress! + Int(addr), Int(requested))
buffer.copyBytes(from: $0[Int(addr)...])
}
}
}