content
stringlengths
1
103k
path
stringlengths
8
216
filename
stringlengths
2
179
language
stringclasses
15 values
size_bytes
int64
2
189k
quality_score
float64
0.5
0.95
complexity
float64
0
1
documentation_ratio
float64
0
1
repository
stringclasses
5 values
stars
int64
0
1k
created_date
stringdate
2023-07-10 19:21:08
2025-07-09 19:11:45
license
stringclasses
4 values
is_test
bool
2 classes
file_hash
stringlengths
32
32
//===--- ByteSwapping.swift - Utilities for byte swapping -----------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Defines a ByteSwappable protocol that types can implement to indicate that\n// they are able to perform byte swap operations.\n//\n// Mostly the types that implement this should be defined in C.\n//\n//===----------------------------------------------------------------------===//\n\nimport Swift\n\nprotocol ByteSwappable {\n var byteSwapped: Self { get }\n var bigEndian: Self { get }\n var littleEndian: Self { get }\n\n init(bigEndian: Self)\n init(littleEndian: Self)\n}\n\nextension ByteSwappable {\n init(bigEndian value: Self) {\n#if _endian(big)\n self = value\n#else\n self = value.byteSwapped\n#endif\n }\n\n init(littleEndian value: Self) {\n#if _endian(little)\n self = value\n#else\n self = value.byteSwapped\n#endif\n }\n\n var littleEndian: Self {\n#if _endian(little)\n return self\n#else\n return self.byteSwapped\n#endif\n }\n\n var bigEndian: Self {\n#if _endian(big)\n return self\n#else\n return self.byteSwapped\n#endif\n }\n}\n\nextension Array where Self.Element: ByteSwappable {\n mutating func swapBytes() {\n for n in 0..<self.count {\n self[n] = self[n].byteSwapped\n }\n }\n}\n\nextension UnsafeMutableBufferPointer where Self.Element: ByteSwappable {\n func swapBytes() {\n for n in 0..<self.count {\n self[n] = self[n].byteSwapped\n }\n }\n}\n\nextension Array where Self.Element: FixedWidthInteger {\n mutating func swapBytes() {\n for n in 0..<self.count {\n self[n] = self[n].byteSwapped\n }\n }\n}\n\nextension UnsafeMutableBufferPointer where Self.Element: FixedWidthInteger {\n func swapBytes() {\n for n in 0..<self.count {\n self[n] = self[n].byteSwapped\n }\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_ByteSwapping.swift
cpp_apple_swift_stdlib_public_RuntimeModule_ByteSwapping.swift
Swift
2,131
0.95
0.115789
0.357143
react-lib
912
2024-07-22T17:05:02.874877
MIT
false
2a2845451fded3d45f653c0112fb3369
//===--- CachingMemoryReader.swift ----------------------------*- swift -*-===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Wraps a MemoryReader in a layer that caches memory pages.\n//\n//===----------------------------------------------------------------------===//\n\nimport Swift\n\n// The size of the pages in the page cache (must be a power of 2)\nfileprivate let pageSize = 4096\nfileprivate let pageMask = pageSize - 1\n\n// The largest chunk we will try to cache data for\nfileprivate let maxCachedSize = pageSize * 8\n\n@_spi(MemoryReaders)\npublic class CachingMemoryReader<Reader: MemoryReader>: MemoryReader {\n private var reader: Reader\n private var cache: [Address:UnsafeRawBufferPointer]\n\n public init(for reader: Reader) {\n self.reader = reader\n self.cache = [:]\n }\n\n deinit {\n for (_, page) in cache {\n page.deallocate()\n }\n }\n\n func getPage(at address: Address) throws -> UnsafeRawBufferPointer {\n precondition((address & Address(pageMask)) == 0)\n\n if let page = cache[address] {\n return page\n }\n\n let page = UnsafeMutableRawBufferPointer.allocate(byteCount: pageSize,\n alignment: pageSize)\n try reader.fetch(from: address, into: page)\n\n let result = UnsafeRawBufferPointer(page)\n\n cache[address] = result\n\n return result\n }\n\n public func fetch(from address: Address,\n into buffer: UnsafeMutableRawBufferPointer) throws {\n guard buffer.count <= maxCachedSize else {\n try reader.fetch(from: address, into: buffer)\n return\n }\n\n var pageAddress = address & ~Address(pageMask)\n var done = 0\n var offset = Int(address - pageAddress)\n var remaining = buffer.count\n\n while remaining > 0 {\n let page = try getPage(at: pageAddress)\n let maxBytes = pageSize - offset\n let chunk = min(remaining, maxBytes)\n\n buffer[done..<done+chunk].copyBytes(from: page[offset..<offset+chunk])\n\n offset = 0\n done += chunk\n remaining -= chunk\n pageAddress += Address(pageSize)\n }\n }\n}\n\n#if os(Linux)\n@_spi(MemoryReaders)\npublic typealias MemserverMemoryReader\n = CachingMemoryReader<UncachedMemserverMemoryReader>\n\nextension CachingMemoryReader where Reader == UncachedMemserverMemoryReader {\n convenience public init(fd: CInt) {\n self.init(for: UncachedMemserverMemoryReader(fd: fd))\n }\n}\n#endif\n\n#if os(Linux) || os(macOS)\n@_spi(MemoryReaders)\npublic typealias RemoteMemoryReader = CachingMemoryReader<UncachedRemoteMemoryReader>\n\nextension CachingMemoryReader where Reader == UncachedRemoteMemoryReader {\n #if os(macOS)\n convenience public init(task: Any) {\n self.init(for: UncachedRemoteMemoryReader(task: task))\n }\n #elseif os(Linux)\n convenience public init(pid: Any) {\n self.init(for: UncachedRemoteMemoryReader(pid: pid))\n }\n #endif\n}\n\n@_spi(MemoryReaders)\npublic typealias LocalMemoryReader = CachingMemoryReader<UncachedLocalMemoryReader>\n\nextension CachingMemoryReader where Reader == UncachedLocalMemoryReader {\n convenience public init() {\n self.init(for: UncachedLocalMemoryReader())\n }\n}\n#endif // os(Linux) || os(macOS)\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_CachingMemoryReader.swift
cpp_apple_swift_stdlib_public_RuntimeModule_CachingMemoryReader.swift
Swift
3,535
0.95
0.154472
0.24
react-lib
840
2024-06-01T20:01:24.849218
GPL-3.0
false
78dbef37444fa2d15ad6093838e7edea
//===--- CompactBacktrace.swift -------------------------------*- swift -*-===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Definitions for Compact Backtrace Format\n//\n//===----------------------------------------------------------------------===//\n\nimport Swift\n\nenum CompactBacktraceFormat {\n /// Tells us what size of machine words were used when generating the\n /// backtrace.\n enum WordSize: UInt8 {\n case sixteenBit = 0\n case thirtyTwoBit = 1\n case sixtyFourBit = 2\n }\n\n // Instruction encodings\n struct Instruction: RawRepresentable {\n typealias RawValue = UInt8\n\n private(set) var rawValue: UInt8\n\n init?(rawValue: Self.RawValue) {\n self.rawValue = rawValue\n }\n\n static let end = Instruction(rawValue: 0b00000000)!\n static let trunc = Instruction(rawValue: 0b00000001)!\n\n static let pc_first = Instruction(rawValue: 0b00010000)!\n static let pc_last = Instruction(rawValue: 0b00011111)!\n static let ra_first = Instruction(rawValue: 0b00100000)!\n static let ra_last = Instruction(rawValue: 0b00101111)!\n static let async_first = Instruction(rawValue: 0b00110000)!\n static let async_last = Instruction(rawValue: 0b00111111)!\n\n static let omit_first = Instruction(rawValue: 0b01000000)!\n static let omit_last = Instruction(rawValue: 0b01111111)!\n\n static let rep_first = Instruction(rawValue: 0b10000000)!\n static let rep_last = Instruction(rawValue: 0b10001111)!\n\n private static func addressInstr(\n _ code: UInt8, _ absolute: Bool, _ count: Int\n ) -> Instruction {\n return Instruction(rawValue: code\n | (absolute ? 0b00001000 : 0)\n | UInt8(count - 1))!\n }\n\n static func pc(absolute: Bool, count: Int) -> Instruction {\n return addressInstr(0b00010000, absolute, count)\n }\n static func ra(absolute: Bool, count: Int) -> Instruction {\n return addressInstr(0b00100000, absolute, count)\n }\n static func `async`(absolute: Bool, count: Int) -> Instruction {\n return addressInstr(0b00110000, absolute, count)\n }\n\n static func omit(external: Bool, count: Int) -> Instruction {\n return Instruction(rawValue: 0b01000000\n | (external ? 0b00100000 : 0)\n | UInt8(count - 1))!\n }\n\n static func rep(external: Bool, count: Int) -> Instruction {\n return Instruction(rawValue: 0b10000000\n | (external ? 0b00001000 : 0)\n | UInt8(count - 1))!\n }\n }\n\n // Represents a decoded instruction\n enum DecodedInstruction {\n case end\n case trunc\n case pc(absolute: Bool, count: Int)\n case ra(absolute: Bool, count: Int)\n case `async`(absolute: Bool, count: Int)\n case omit(external: Bool, count: Int)\n case rep(external: Bool, count: Int)\n }\n\n\n /// Accumulates bytes until the end of a Compact Backtrace Format\n /// sequence is detected.\n public struct Accumulator<S: Sequence<UInt8>>: Sequence {\n public typealias Element = UInt8\n typealias Source = S\n\n private var source: S\n\n public init(_ source: S) {\n self.source = source\n }\n\n public func makeIterator() -> Iterator {\n return Iterator(source.makeIterator())\n }\n\n public struct Iterator: IteratorProtocol {\n var iterator: Source.Iterator?\n\n enum State {\n case infoByte\n case instruction\n case argumentData(Int)\n }\n\n var state: State\n\n init(_ iterator: Source.Iterator?) {\n self.iterator = iterator\n self.state = .infoByte\n }\n\n private mutating func finished() {\n iterator = nil\n }\n\n private mutating func fail() {\n iterator = nil\n }\n\n public mutating func next() -> UInt8? {\n if iterator == nil {\n return nil\n }\n\n switch state {\n case .infoByte:\n guard let infoByte = iterator!.next() else {\n fail()\n return nil\n }\n let version = infoByte >> 2\n guard let _ = WordSize(rawValue: infoByte & 0x3) else {\n fail()\n return nil\n }\n guard version == 0 else {\n fail()\n return nil\n }\n\n state = .instruction\n\n return infoByte\n\n case .instruction:\n guard let instr = iterator!.next() else {\n finished()\n return nil\n }\n\n guard let decoded = Instruction(rawValue: instr)?.decoded() else {\n fail()\n return nil\n }\n\n switch decoded {\n case .end, .trunc:\n finished()\n return instr\n case let .pc(_, count), let .ra(_, count), let .async(_, count):\n state = .argumentData(count)\n return instr\n case let .omit(external, count), let .rep(external, count):\n if external {\n state = .argumentData(count)\n }\n return instr\n }\n\n case let .argumentData(count):\n guard let byte = iterator!.next() else {\n fail()\n return nil\n }\n\n let newCount = count - 1\n if newCount == 0 {\n state = .instruction\n } else {\n state = .argumentData(newCount)\n }\n\n return byte\n }\n }\n }\n }\n\n /// Adapts a Sequence containing Compact Backtrace Format data into a\n /// Sequence of `Backtrace.Frame`s.\n struct Decoder<S: Sequence<UInt8>>: Sequence {\n typealias Frame = Backtrace.Frame\n typealias Address = Backtrace.Address\n typealias Storage = S\n\n private var storage: Storage\n\n init(_ storage: S) {\n self.storage = storage\n }\n\n public func makeIterator() -> Iterator {\n var iterator = storage.makeIterator()\n guard let infoByte = iterator.next() else {\n return Iterator(nil, .sixtyFourBit)\n }\n let version = infoByte >> 2\n guard let size = WordSize(rawValue: infoByte & 0x3) else {\n return Iterator(nil, .sixtyFourBit)\n }\n guard version == 0 else {\n return Iterator(nil, .sixtyFourBit)\n }\n return Iterator(iterator, size)\n }\n\n struct Iterator: IteratorProtocol {\n var iterator: Storage.Iterator?\n let wordSize: WordSize\n let wordMask: UInt64\n var lastAddress: UInt64\n var lastFrame: Backtrace.Frame?\n var repeatCount: Int = 0\n\n init(_ iterator: Storage.Iterator?, _ size: WordSize) {\n self.iterator = iterator\n self.wordSize = size\n\n switch size {\n case .sixteenBit:\n self.wordMask = 0xff00\n case .thirtyTwoBit:\n self.wordMask = 0xffffff00\n case .sixtyFourBit:\n self.wordMask = 0xffffffffffffff00\n }\n\n self.lastAddress = 0\n }\n\n private mutating func decodeAddress(\n _ absolute: Bool, _ count: Int\n ) -> Address? {\n var word: UInt64\n guard let firstByte = iterator!.next() else {\n return nil\n }\n if (firstByte & 0x80) != 0 {\n word = wordMask | UInt64(firstByte)\n } else {\n word = UInt64(firstByte)\n }\n for _ in 1..<count {\n guard let byte = iterator!.next() else {\n return nil\n }\n word = (word << 8) | UInt64(byte)\n }\n\n if absolute {\n lastAddress = word\n } else {\n lastAddress = lastAddress &+ word\n }\n\n switch wordSize {\n case .sixteenBit:\n return Address(UInt16(truncatingIfNeeded: lastAddress))\n case .thirtyTwoBit:\n return Address(UInt32(truncatingIfNeeded: lastAddress))\n case .sixtyFourBit:\n return Address(UInt64(truncatingIfNeeded: lastAddress))\n }\n }\n\n private mutating func decodeWord(\n _ count: Int\n ) -> Int? {\n var word: Int = 0\n for _ in 0..<count {\n guard let byte = iterator!.next() else {\n return nil\n }\n word = (word << 8) | Int(byte)\n }\n return word\n }\n\n private mutating func finished() {\n iterator = nil\n }\n\n private mutating func fail() {\n iterator = nil\n }\n\n // Note: If we hit an error while decoding, we will return .truncated.\n\n public mutating func next() -> Backtrace.Frame? {\n if repeatCount > 0 {\n repeatCount -= 1\n return lastFrame\n }\n\n if iterator == nil {\n return nil\n }\n\n guard let instr = iterator!.next() else {\n finished()\n return .truncated\n }\n\n guard let decoded = Instruction(rawValue: instr)?.decoded() else {\n fail()\n return .truncated\n }\n\n let result: Backtrace.Frame\n switch decoded {\n case .end:\n finished()\n return nil\n case .trunc:\n finished()\n return .truncated\n case let .pc(absolute, count):\n guard let addr = decodeAddress(absolute, count) else {\n finished()\n return .truncated\n }\n result = .programCounter(addr)\n case let .ra(absolute, count):\n guard let addr = decodeAddress(absolute, count) else {\n finished()\n return .truncated\n }\n result = .returnAddress(addr)\n case let .async(absolute, count):\n guard let addr = decodeAddress(absolute, count) else {\n finished()\n return .truncated\n }\n result = .asyncResumePoint(addr)\n case let .omit(external, count):\n if !external {\n result = .omittedFrames(count)\n } else {\n guard let word = decodeWord(count) else {\n finished()\n return .truncated\n }\n result = .omittedFrames(word)\n }\n case let .rep(external, count):\n if lastFrame == nil {\n finished()\n return .truncated\n }\n if !external {\n repeatCount = count - 1\n } else {\n guard let word = decodeWord(count) else {\n finished()\n return .truncated\n }\n repeatCount = word - 1\n }\n result = lastFrame!\n }\n\n lastFrame = result\n\n return result\n }\n }\n\n }\n\n /// Adapts a Sequence of RichFrames into a sequence containing Compact\n /// Backtrace Format data.\n struct Encoder<A: FixedWidthInteger, S: Sequence<RichFrame<A>>>: Sequence {\n typealias Element = UInt8\n typealias Frame = Backtrace.Frame\n typealias SourceFrame = RichFrame<A>\n typealias Address = A\n typealias Source = S\n\n private var source: Source\n\n init(_ source: Source) {\n self.source = source\n }\n\n public func makeIterator() -> Iterator {\n return Iterator(source.makeIterator())\n }\n\n struct Iterator: IteratorProtocol {\n var iterator: Source.Iterator\n var lastAddress: Address = 0\n\n enum State {\n case start\n case ready\n case emittingBytes(Int, SourceFrame?)\n case stashedFrame(SourceFrame)\n case done\n }\n var bytes = EightByteBuffer()\n var state: State = .start\n var lastFrame: SourceFrame? = nil\n\n init(_ iterator: Source.Iterator) {\n self.iterator = iterator\n }\n\n /// Set up to emit the bytes of `address`, returning the number of bytes\n /// we will need to emit\n private mutating func emitNext(\n address: Address\n ) -> (absolute: Bool, count: Int) {\n let delta = address &- lastAddress\n\n let absCount: Int\n if address & (1 << (Address.bitWidth - 1)) != 0 {\n let ones = ((~address).leadingZeroBitCount - 1) >> 3\n absCount = (Address.bitWidth >> 3) - ones\n } else {\n let zeroes = (address.leadingZeroBitCount - 1) >> 3\n absCount = (Address.bitWidth >> 3) - zeroes\n }\n\n let deltaCount: Int\n if delta & (1 << (Address.bitWidth - 1)) != 0 {\n let ones = ((~delta).leadingZeroBitCount - 1) >> 3\n deltaCount = (Address.bitWidth >> 3) - ones\n } else {\n let zeroes = (delta.leadingZeroBitCount - 1) >> 3\n deltaCount = (Address.bitWidth >> 3) - zeroes\n }\n\n lastAddress = address\n\n if absCount < deltaCount {\n bytes = EightByteBuffer(address)\n state = .emittingBytes(8 - absCount, nil)\n return (absolute: true, count: absCount)\n } else {\n bytes = EightByteBuffer(delta)\n state = .emittingBytes(8 - deltaCount, nil)\n return (absolute: false, count: deltaCount)\n }\n }\n\n /// Set up to emit the bytes of `count`, returning the number of bytes\n /// we will need to emit\n private mutating func emitNext(\n externalCount count: Int\n ) -> Int {\n let ucount = UInt64(count)\n let zeroes = ucount.leadingZeroBitCount >> 3\n let byteCount = 8 - zeroes\n bytes = EightByteBuffer(ucount)\n state = .emittingBytes(zeroes, nil)\n return byteCount\n }\n\n private mutating func emitNext(\n frame: SourceFrame?,\n externalCount count: Int? = nil\n ) -> Int {\n if let count {\n let ucount = UInt64(count)\n let zeroes = ucount.leadingZeroBitCount >> 3\n let byteCount = 8 - zeroes\n bytes = EightByteBuffer(ucount)\n state = .emittingBytes(zeroes, frame)\n return byteCount\n } else if let frame {\n state = .stashedFrame(frame)\n } else {\n state = .ready\n }\n return 0\n }\n\n private mutating func emit(frame: SourceFrame) -> UInt8 {\n lastFrame = frame\n\n switch frame {\n case let .programCounter(addr):\n let (absolute, count) = emitNext(address: addr)\n return Instruction.pc(absolute: absolute,\n count: count).rawValue\n case let .returnAddress(addr):\n let (absolute, count) = emitNext(address: addr)\n return Instruction.ra(absolute: absolute,\n count: count).rawValue\n case let .asyncResumePoint(addr):\n let (absolute, count) = emitNext(address: addr)\n return Instruction.async(absolute: absolute,\n count: count).rawValue\n case let .omittedFrames(count):\n if count <= 0x1f {\n return Instruction.omit(external: false,\n count: count).rawValue\n }\n let countCount = emitNext(externalCount: count)\n return Instruction.omit(external: true,\n count: countCount).rawValue\n case .truncated:\n self.state = .done\n return Instruction.trunc.rawValue\n }\n }\n\n public mutating func next() -> UInt8? {\n switch state {\n case .done:\n return nil\n\n case .start:\n // The first thing we emit is the info byte\n let size: WordSize\n switch Address.bitWidth {\n case 16:\n size = .sixteenBit\n case 32:\n size = .thirtyTwoBit\n case 64:\n size = .sixtyFourBit\n default:\n state = .done\n return nil\n }\n\n state = .ready\n\n let version: UInt8 = 0\n let infoByte = (version << 2) | size.rawValue\n return infoByte\n\n case let .emittingBytes(ndx, frame):\n\n let byte = bytes[ndx]\n if ndx + 1 == 8 {\n if let frame {\n state = .stashedFrame(frame)\n } else {\n state = .ready\n }\n } else {\n state = .emittingBytes(ndx + 1, frame)\n }\n return byte\n\n case .ready:\n\n // Grab a rich frame and encode it\n guard let frame = iterator.next() else {\n state = .done\n return nil\n }\n\n if let lastFrame, lastFrame == frame {\n var count = 1\n var nextFrame: SourceFrame? = nil\n while let frame = iterator.next() {\n if frame != lastFrame {\n nextFrame = frame\n break\n } else {\n count += 1\n }\n }\n\n if count <= 8 {\n _ = emitNext(frame: nextFrame)\n return Instruction.rep(external: false,\n count: count).rawValue\n } else {\n let countCount = emitNext(frame: nextFrame,\n externalCount: count)\n return Instruction.rep(external: true,\n count: countCount).rawValue\n }\n }\n\n return emit(frame: frame)\n\n case let .stashedFrame(frame):\n\n state = .ready\n\n return emit(frame: frame)\n }\n }\n }\n }\n}\n\nextension CompactBacktraceFormat.Instruction: Comparable {\n public static func < (lhs: Self, rhs: Self) -> Bool {\n return lhs.rawValue < rhs.rawValue\n }\n public static func == (lhs: Self, rhs: Self) -> Bool {\n return lhs.rawValue == rhs.rawValue\n }\n}\n\nextension CompactBacktraceFormat.Instruction {\n func decoded() -> CompactBacktraceFormat.DecodedInstruction? {\n switch self {\n case .end:\n return .end\n case .trunc:\n return .trunc\n case .pc_first ... .pc_last:\n let count = Int((self.rawValue & 0x7) + 1)\n let absolute = (self.rawValue & 0x8) != 0\n return .pc(absolute: absolute, count: count)\n case .ra_first ... .ra_last:\n let count = Int((self.rawValue & 0x7) + 1)\n let absolute = (self.rawValue & 0x8) != 0\n return .ra(absolute: absolute, count: count)\n case .async_first ... .async_last:\n let count = Int((self.rawValue & 0x7) + 1)\n let absolute = (self.rawValue & 0x8) != 0\n return .async(absolute: absolute, count: count)\n case .omit_first ... .omit_last:\n let count = Int((self.rawValue & 0x1f) + 1)\n let external = (self.rawValue & 0x20) != 0\n return .omit(external: external, count: count)\n case .rep_first ... .rep_last:\n let count = Int((self.rawValue & 0x7) + 1)\n let external = (self.rawValue & 0x8) != 0\n return .rep(external: external, count: count)\n default:\n return nil\n }\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_CompactBacktrace.swift
cpp_apple_swift_stdlib_public_RuntimeModule_CompactBacktrace.swift
Swift
19,440
0.95
0.056061
0.05614
awesome-app
825
2024-06-11T05:06:20.391661
BSD-3-Clause
false
680c082c9fa471c841841aac73f7fdc5
//===--- CompactImageMap.swift -------------------------------*- swift -*-===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Definitions for Compact ImageMap Format\n//\n//===----------------------------------------------------------------------===//\n\nimport Swift\n\nprivate let slash = UInt8(ascii: "/")\nprivate let backslash = UInt8(ascii: "\\")\n\n@_spi(Internal)\npublic enum CompactImageMapFormat {\n\n /// The list of fixed prefixes used to encode paths.\n static let fixedPathPrefixes = [\n // Traditional UNIX\n (0, "/lib"),\n (1, "/usr/lib"),\n (2, "/usr/local/lib"),\n (3, "/opt/lib"),\n\n // NeXT/Darwin\n (4, "/System/Library/Frameworks"),\n (5, "/System/Library/PrivateFrameworks"),\n (6, "/System/iOSSupport"),\n (7, "/Library/Frameworks"),\n (8, "/System/Applications"),\n (9, "/Applications"),\n\n // Windows\n (10, "C:\\Windows\\System32"),\n (11, "C:\\Program Files")\n ]\n\n /// Tells us what size of machine words were used when generating the\n /// image map.\n enum WordSize: UInt8 {\n case sixteenBit = 0\n case thirtyTwoBit = 1\n case sixtyFourBit = 2\n }\n\n /// Run a closure for each prefix of the specified string\n static func forEachPrefix(of str: String.UTF8View.SubSequence,\n body: (String) -> ()) {\n let base = str.startIndex\n let end = str.endIndex\n var pos = base\n\n while pos < end {\n let ch = str[pos]\n\n if pos > base && (ch == slash || ch == backslash) {\n let range = base..<pos\n let prefix = String(str[range])!\n body(prefix)\n }\n\n pos = str.index(after: pos)\n }\n }\n\n /// Decodes a Sequence containing Compact ImageMap Format data into am\n /// ImageMap.\n @_spi(Internal)\n public struct Decoder<S: Sequence<UInt8>> {\n var sequence: S\n var iterator: S.Iterator\n var imageCount: Int = 0\n var wordSize: WordSize = .sixtyFourBit\n var wordMask: UInt64 = 0\n var pathPrefixes = Dictionary(uniqueKeysWithValues: fixedPathPrefixes)\n var nextCode = 32\n\n public init(_ sequence: S) {\n self.sequence = sequence\n self.iterator = sequence.makeIterator()\n }\n\n mutating func decodeCount() -> Int? {\n var value: Int = 0\n while true {\n guard let byte = iterator.next() else {\n return nil\n }\n\n value = (value << 7) | Int(byte & 0x7f)\n\n if (byte & 0x80) == 0 {\n break\n }\n }\n return value\n }\n\n mutating func decodeString() -> String? {\n guard let utf8Length = iterator.next() else {\n return nil\n }\n\n var bytes: [UInt8] = []\n bytes.reserveCapacity(Int(utf8Length))\n\n for _ in 0..<utf8Length {\n guard let byte = iterator.next() else {\n return nil\n }\n\n bytes.append(byte)\n }\n\n return String(decoding: bytes, as: UTF8.self)\n }\n\n mutating func decodeAddress(_ count: Int) -> UInt64? {\n var word: UInt64\n guard let firstByte = iterator.next() else {\n return nil\n }\n\n // Sign extend\n if (firstByte & 0x80) != 0 {\n word = wordMask | UInt64(firstByte)\n } else {\n word = UInt64(firstByte)\n }\n\n for _ in 1..<count {\n guard let byte = iterator.next() else {\n return nil\n }\n word = (word << 8) | UInt64(byte)\n }\n\n return word\n }\n\n mutating func decodePath() -> String? {\n var byte: UInt8\n\n guard let b = iterator.next() else {\n return nil\n }\n\n byte = b\n\n // `end` here means no string at all\n if byte == 0x00 {\n return nil\n }\n\n var resultBytes: [UInt8] = []\n var stringBase: Int? = nil\n\n while true {\n if byte == 0x00 {\n // `end`\n #if DEBUG_COMPACT_IMAGE_MAP\n print("end")\n #endif\n return String(decoding: resultBytes, as: UTF8.self)\n } else if byte < 0x40 {\n // `str`\n let count = Int(byte)\n resultBytes.reserveCapacity(resultBytes.count + count)\n let base = resultBytes.count\n if stringBase == nil {\n stringBase = base\n }\n for n in 0..<count {\n guard let char = iterator.next() else {\n return nil\n }\n if base + n > stringBase! && (char == slash\n || char == backslash) {\n let prefix = String(decoding: resultBytes[stringBase!..<base+n],\n as: UTF8.self)\n #if DEBUG_COMPACT_IMAGE_MAP\n print("define \(nextCode) = \(prefix)")\n #endif\n pathPrefixes[nextCode] = prefix\n nextCode += 1\n }\n resultBytes.append(char)\n\n #if DEBUG_COMPACT_IMAGE_MAP\n var hex = String(char, radix: 16)\n if hex.count == 1 {\n hex = "0" + hex\n }\n #endif\n }\n\n #if DEBUG_COMPACT_IMAGE_MAP\n let theString = String(decoding: resultBytes[base...], as: UTF8.self)\n print("str '\(theString)'")\n #endif\n } else if byte < 0x80 {\n // `framewk`\n let count = Int((byte & 0x3f) + 1)\n\n guard let version = iterator.next() else {\n return nil\n }\n\n var nameBytes: [UInt8] = []\n nameBytes.reserveCapacity(count)\n\n for _ in 0..<count {\n guard let char = iterator.next() else {\n return nil\n }\n nameBytes.append(char)\n }\n\n #if DEBUG_COMPACT_IMAGE_MAP\n let name = String(decoding: nameBytes, as: UTF8.self)\n let versionChar = String(Unicode.Scalar(version))\n print("framewk version='\(versionChar)' name='\(name)'")\n #endif\n\n resultBytes.append(slash)\n resultBytes.append(contentsOf: nameBytes)\n resultBytes.append(contentsOf: ".framework/Versions/".utf8)\n resultBytes.append(version)\n resultBytes.append(slash)\n resultBytes.append(contentsOf: nameBytes)\n\n return String(decoding: resultBytes, as: UTF8.self)\n } else {\n // `expand`\n var code: Int\n if (byte & 0x40) == 0 {\n code = Int(byte & 0x3f)\n } else {\n let byteCount = Int(byte & 0x3f) + 1\n code = 0\n for _ in 0..<byteCount {\n guard let byte = iterator.next() else {\n return nil\n }\n code = (code << 8) | Int(byte)\n }\n code += 64\n }\n\n #if DEBUG_COMPACT_IMAGE_MAP\n print("expand \(code) = \(String(describing: pathPrefixes[code]))")\n #endif\n\n guard let prefix = pathPrefixes[code] else {\n return nil\n }\n\n resultBytes.append(contentsOf: prefix.utf8)\n }\n\n guard let b = iterator.next() else {\n return nil\n }\n\n byte = b\n }\n }\n\n mutating func decode() -> (String, [ImageMap.Image], ImageMap.WordSize)? {\n // Check the version and decode the size\n guard let infoByte = iterator.next() else {\n return nil\n }\n let version = infoByte >> 2\n guard let size = WordSize(rawValue: infoByte & 0x3) else {\n return nil\n }\n wordSize = size\n guard version == 0 else {\n return nil\n }\n\n // Set up the word mask\n switch wordSize {\n case .sixteenBit:\n wordMask = 0xff00\n case .thirtyTwoBit:\n wordMask = 0xffffff00\n case .sixtyFourBit:\n wordMask = 0xffffffffffffff00\n }\n\n // Now decode the platform\n guard let platform = decodeString() else {\n return nil\n }\n\n // Next is the image count\n guard let count = decodeCount() else {\n return nil\n }\n\n imageCount = count\n\n // Now decode all of the images\n var images: [ImageMap.Image] = []\n var lastAddress: UInt64 = 0\n\n images.reserveCapacity(count)\n\n for _ in 0..<count {\n // Decode the header byte\n guard let header = iterator.next() else {\n return nil\n }\n\n let relative = (header & 0x80) != 0\n let acount = Int(((header >> 3) & 0x7) + 1)\n let ecount = Int((header & 0x7) + 1)\n\n #if DEBUG_COMPACT_IMAGE_MAP\n print("r = \(relative), acount = \(acount), ecount = \(ecount)")\n #endif\n\n // Now the base and end of text addresses\n guard let address = decodeAddress(acount) else {\n return nil\n }\n let baseAddress: UInt64\n if relative {\n baseAddress = lastAddress &+ address\n } else {\n baseAddress = address\n }\n\n lastAddress = baseAddress\n\n guard let eotOffset = decodeAddress(ecount) else {\n return nil\n }\n let endOfText = baseAddress &+ eotOffset\n\n #if DEBUG_COMPACT_IMAGE_MAP\n print("address = \(hex(address)), eotOffset = \(hex(eotOffset))")\n print("baseAddress = \(hex(baseAddress)), endOfText = \(hex(endOfText))")\n #endif\n\n // Next, get the build ID byte count\n guard let buildIdBytes = decodeCount() else {\n return nil\n }\n\n #if DEBUG_COMPACT_IMAGE_MAP\n print("buildIdBytes = \(buildIdBytes)")\n #endif\n\n // Read the build ID\n var buildId: [UInt8]? = nil\n\n if buildIdBytes > 0 {\n buildId = []\n buildId!.reserveCapacity(buildIdBytes)\n\n for _ in 0..<buildIdBytes {\n guard let byte = iterator.next() else {\n return nil\n }\n buildId!.append(byte)\n }\n }\n\n #if DEBUG_COMPACT_IMAGE_MAP\n print("buildId = \(buildId)")\n #endif\n\n // Decode the path\n let path = decodePath()\n let name: String?\n\n // Extract the name from the path\n if let path = path {\n if let lastSlashNdx = path.utf8.lastIndex(\n where: { $0 == slash || $0 == backslash }\n ) {\n let nameNdx = path.index(after: lastSlashNdx)\n\n name = String(path[nameNdx...])\n } else {\n name = path\n }\n } else {\n name = nil\n }\n\n let image = ImageMap.Image(\n name: name,\n path: path,\n uniqueID: buildId,\n baseAddress: baseAddress,\n endOfText: endOfText\n )\n\n images.append(image)\n }\n\n let wsMap: ImageMap.WordSize\n switch wordSize {\n case .sixteenBit:\n wsMap = .sixteenBit\n case .thirtyTwoBit:\n wsMap = .thirtyTwoBit\n case .sixtyFourBit:\n wsMap = .sixtyFourBit\n }\n\n return (platform, images, wsMap)\n }\n }\n\n /// Encodes an ImageMap as a Sequence<UInt8>\n @_spi(Internal)\n public struct Encoder: Sequence {\n public typealias Element = UInt8\n\n private var source: ImageMap\n\n public init(_ source: ImageMap) {\n self.source = source\n }\n\n public func makeIterator() -> Iterator {\n return Iterator(source)\n }\n\n public struct Iterator: IteratorProtocol {\n enum State {\n case start\n case platform(Int)\n case count(Int)\n case image\n case baseAddress(Int)\n case endOfText(Int)\n case uniqueID(Int)\n case uniqueIDBytes(Int)\n case path\n case pathCode(Int)\n case pathString\n case pathStringChunk(Int)\n case version\n case framework\n case done\n }\n\n var abytes = EightByteBuffer()\n var ebytes = EightByteBuffer()\n var acount: Int = 0\n var ecount: Int = 0\n var version: UInt8 = 0\n var lastAddress: UInt64 = 0\n var ndx: Int = 0\n var state: State = .start\n var source: ImageMap\n var pathPrefixes = fixedPathPrefixes\n var nextCode = 32\n var remainingPath: String.UTF8View.SubSequence?\n\n func signExtend(_ value: UInt64) -> UInt64 {\n let mask: UInt64\n let topBit: UInt64\n switch source.wordSize {\n case .sixteenBit:\n topBit = 0x8000\n mask = 0xffffffffffff0000\n case .thirtyTwoBit:\n topBit = 0x80000000\n mask = 0xffffffff00000000\n case .sixtyFourBit:\n return value\n }\n\n if (value & topBit) != 0 {\n return value | mask\n }\n return value\n }\n\n init(_ source: ImageMap) {\n self.source = source\n }\n\n public mutating func next() -> UInt8? {\n switch state {\n case .done:\n return nil\n\n case .start:\n // The first thing we emit is the info byte\n let size: WordSize\n switch source.wordSize {\n case .sixteenBit:\n size = .sixteenBit\n case .thirtyTwoBit:\n size = .thirtyTwoBit\n case .sixtyFourBit:\n size = .sixtyFourBit\n }\n\n state = .platform(-1)\n\n let version: UInt8 = 0\n let infoByte = (version << 2) | size.rawValue\n return infoByte\n\n case let .platform(ndx):\n let length = UInt8(source.platform.utf8.count)\n let byte: UInt8\n\n if ndx == -1 {\n // The length byte comes first\n byte = length\n } else {\n byte = source.platform.utf8[\n source.platform.utf8.index(\n source.platform.utf8.startIndex,\n offsetBy: ndx\n )\n ]\n }\n\n // If we're done, move to the .count state\n if ndx + 1 == length {\n let count = source.images.count\n let bits = Int.bitWidth - count.leadingZeroBitCount\n state = .count(7 * (bits / 7))\n } else {\n state = .platform(ndx + 1)\n }\n\n return byte\n\n case let .count(ndx):\n let count = source.images.count\n let byte = UInt8(truncatingIfNeeded:(count >> ndx) & 0x7f)\n if ndx == 0 {\n state = .image\n return byte\n } else {\n state = .count(ndx - 7)\n return 0x80 | byte\n }\n\n case .image:\n if ndx == source.images.count {\n state = .done\n return nil\n }\n\n let baseAddress = signExtend(source.images[ndx].baseAddress)\n let delta = baseAddress &- lastAddress\n\n let endOfText = signExtend(source.images[ndx].endOfText)\n let endOfTextOffset = endOfText - baseAddress\n\n let eotCount: Int\n if endOfTextOffset & (1 << 63) != 0 {\n let ones = ((~endOfTextOffset).leadingZeroBitCount - 1) >> 3\n eotCount = 8 - ones\n } else {\n let zeroes = (endOfTextOffset.leadingZeroBitCount - 1) >> 3\n eotCount = 8 - zeroes\n }\n\n ebytes = EightByteBuffer(endOfTextOffset)\n ecount = eotCount\n\n let absCount: Int\n if baseAddress & (1 << 63) != 0 {\n let ones = ((~baseAddress).leadingZeroBitCount - 1) >> 3\n absCount = 8 - ones\n } else {\n let zeroes = (baseAddress.leadingZeroBitCount - 1) >> 3\n absCount = 8 - zeroes\n }\n\n let deltaCount: Int\n if delta & (1 << 63) != 0 {\n let ones = ((~delta).leadingZeroBitCount - 1) >> 3\n deltaCount = 8 - ones\n } else {\n let zeroes = (delta.leadingZeroBitCount - 1) >> 3\n deltaCount = 8 - zeroes\n }\n\n lastAddress = baseAddress\n\n let relativeFlag: UInt8\n if absCount <= deltaCount {\n abytes = EightByteBuffer(baseAddress)\n acount = absCount\n relativeFlag = 0\n } else {\n abytes = EightByteBuffer(delta)\n acount = deltaCount\n relativeFlag = 0x80\n }\n\n state = .baseAddress(8 - acount)\n return relativeFlag\n | UInt8(truncatingIfNeeded: (acount - 1) << 3)\n | UInt8(truncatingIfNeeded: ecount - 1)\n\n case let .baseAddress(ndx):\n let byte = abytes[ndx]\n if ndx + 1 == 8 {\n state = .endOfText(8 - ecount)\n } else {\n state = .baseAddress(ndx + 1)\n }\n return byte\n\n case let .endOfText(ndx):\n let byte = ebytes[ndx]\n if ndx + 1 == 8 {\n let count = source.images[self.ndx].uniqueID?.count ?? 0\n let bits = Int.bitWidth - count.leadingZeroBitCount\n state = .uniqueID(7 * (bits / 7))\n } else {\n state = .endOfText(ndx + 1)\n }\n return byte\n\n case let .uniqueID(cndx):\n guard let count = source.images[self.ndx].uniqueID?.count else {\n state = .path\n if let path = source.images[self.ndx].path {\n remainingPath = path.utf8[...]\n } else {\n remainingPath = nil\n }\n return 0\n }\n let byte = UInt8(truncatingIfNeeded: (count >> cndx) & 0x7f)\n if cndx == 0 {\n state = .uniqueIDBytes(0)\n return byte\n } else {\n state = .uniqueID(cndx - 7)\n return 0x80 | byte\n }\n\n case let .uniqueIDBytes(byteNdx):\n let uniqueID = source.images[self.ndx].uniqueID!\n let byte = uniqueID[byteNdx]\n if byteNdx + 1 == uniqueID.count {\n state = .path\n if let path = source.images[self.ndx].path {\n remainingPath = path.utf8[...]\n } else {\n remainingPath = nil\n }\n } else {\n state = .uniqueIDBytes(byteNdx + 1)\n }\n return byte\n\n case .path:\n guard let remainingPath = remainingPath,\n remainingPath.count > 0 else {\n ndx += 1\n state = .image\n return 0x00\n }\n\n // Find the longest prefix match\n var longestMatchLen = 0\n var matchedPrefix: Int? = nil\n for (ndx, (_, prefix)) in pathPrefixes.enumerated() {\n let prefixUTF8 = prefix.utf8\n if prefixUTF8.count > remainingPath.count {\n continue\n }\n if prefixUTF8.count > longestMatchLen\n && remainingPath.starts(with: prefixUTF8) {\n longestMatchLen = prefixUTF8.count\n matchedPrefix = ndx\n }\n }\n\n if let ndx = matchedPrefix {\n let (code, prefix) = pathPrefixes[ndx]\n self.remainingPath = remainingPath.dropFirst(prefix.utf8.count)\n if code <= 0x3f {\n return 0x80 | UInt8(exactly: code)!\n }\n\n let theCode = UInt64(exactly: code - 0x40)!\n abytes = EightByteBuffer(theCode)\n\n let codeBytes = Swift.max(\n (64 - theCode.leadingZeroBitCount) >> 3, 1\n )\n\n state = .pathCode(8 - codeBytes)\n\n return 0xc0 | UInt8(exactly: codeBytes - 1)!\n }\n\n // Check for /<name>.framework/Versions/<version>/<name>\n if let name = source.images[ndx].name, !name.isEmpty {\n let nameCount = name.utf8.count\n let expectedLen = 1 // '/'\n + nameCount // <name>\n + 20 // .framework/Versions/\n + 1 // <version>\n + 1 // '/'\n + nameCount // <name>\n if remainingPath.count == expectedLen {\n let framework = "/\(name).framework/Versions/"\n if remainingPath.starts(with: framework.utf8) {\n var verNdx = remainingPath.startIndex\n remainingPath.formIndex(&verNdx, offsetBy: framework.utf8.count)\n\n version = remainingPath[verNdx]\n\n let slashNdx = remainingPath.index(after: verNdx)\n if remainingPath[slashNdx] == slash {\n let nameNdx = remainingPath.index(after: slashNdx)\n if remainingPath[nameNdx...].elementsEqual(name.utf8) {\n self.remainingPath = remainingPath[nameNdx...]\n\n state = .version\n return 0x40 | UInt8(exactly: nameCount - 1)!\n }\n }\n }\n }\n }\n\n // Add any new prefixes\n forEachPrefix(of: remainingPath) { prefix in\n #if DEBUG_COMPACT_IMAGE_MAP\n print("defining \(nextCode) as \"\(prefix)\"")\n #endif\n pathPrefixes.append((nextCode, prefix))\n nextCode += 1\n }\n\n fallthrough\n\n case .pathString:\n if remainingPath!.count == 0 {\n ndx += 1\n state = .image\n return 0x00\n }\n\n let chunkLength = Swift.min(remainingPath!.count, 0x3f)\n state = .pathStringChunk(chunkLength)\n return UInt8(truncatingIfNeeded: chunkLength)\n\n case let .pathStringChunk(length):\n let byte = remainingPath!.first!\n remainingPath = remainingPath!.dropFirst()\n if length == 1 {\n state = .pathString\n } else {\n state = .pathStringChunk(length - 1)\n }\n return byte\n\n case .version:\n state = .framework\n return version\n\n case .framework:\n let byte = remainingPath!.first!\n remainingPath = remainingPath!.dropFirst()\n if remainingPath!.count == 0 {\n ndx += 1\n state = .image\n }\n return byte\n\n case let .pathCode(ndx):\n let byte = abytes[ndx]\n if ndx + 1 == 8 {\n state = .path\n } else {\n state = .pathCode(ndx + 1)\n }\n return byte\n }\n }\n }\n }\n\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_CompactImageMap.swift
cpp_apple_swift_stdlib_public_RuntimeModule_CompactImageMap.swift
Swift
22,921
0.95
0.093168
0.102639
vue-tools
254
2023-09-24T22:15:09.215376
BSD-3-Clause
false
e6858fbe13e52e56d9df784b8ff4a11f
//===--- Compression.swift - Data compression for ELF images --------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2022 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Defines the compressed image sources, which are used to access compressed\n// data from ELF images.\n//\n// There are three different compression formats we might have to interact\n// with, namely zlib (deflate), zstd and LZMA. We don't implement the\n// decompression algorithms here, but rather we will try to use zlib,\n// zstd and liblzma respectively.\n//\n// We support two different modes; one is where the compression libraries\n// have been statically linked with us, in which case we can just call them;\n// the other is where we will try to load them with `dlopen()`. We do this\n// so as to avoid a hard dependency on them in the runtime.\n//\n//===----------------------------------------------------------------------===//\n\nimport Swift\n\n#if os(macOS) || os(iOS) || os(tvOS) || os(watchOS)\ninternal import Darwin\n#elseif os(Windows)\ninternal import ucrt\n#elseif canImport(Glibc)\ninternal import Glibc\n#elseif canImport(Musl)\ninternal import Musl\n#endif\ninternal import BacktracingImpl.CompressionLibs\ninternal import BacktracingImpl.ImageFormats.Elf\n\nenum CompressedImageSourceError: Error {\n case unboundedImageSource\n case outOfRangeFetch(UInt64, Int)\n case badCompressedData\n case unsupportedFormat\n case libraryNotFound(String)\n case outputOverrun\n}\n\nlet zlib_stream_init = swift.runtime.zlib_stream_init\nlet lzma_stream_init = swift.runtime.lzma_stream_init\n\n// .. CompressedStream .........................................................\n\nprotocol CompressedStream {\n typealias InputSource = () throws -> UnsafeRawBufferPointer\n typealias OutputSink = (_ used: UInt, _ done: Bool) throws\n -> UnsafeMutableRawBufferPointer?\n\n func decompress(input: InputSource, output: OutputSink) throws -> UInt\n}\n\n// .. Compression library bindings .............................................\n\n#if os(macOS) || os(iOS) || os(tvOS) || os(watchOS)\nprivate var lzmaHandle = dlopen("liblzma.dylib", RTLD_LAZY)\nprivate var zlibHandle = dlopen("libz.dylib", RTLD_LAZY)\nprivate var zstdHandle = dlopen("libzstd.dylib", RTLD_LAZY)\n#elseif os(Linux)\nprivate var lzmaHandle = dlopen("liblzma.so.5", RTLD_LAZY)\nprivate var zlibHandle = dlopen("libz.so.1", RTLD_LAZY)\nprivate var zstdHandle = dlopen("libzstd.so.1", RTLD_LAZY)\n#elseif os(Windows)\n// ###TODO\n#endif\n\nprivate func symbol<T>(_ handle: UnsafeMutableRawPointer?, _ name: String) -> T? {\n guard let handle = handle, let result = dlsym(handle, name) else {\n return nil\n }\n return unsafeBitCast(result, to: T.self)\n}\n\nprivate enum Sym {\n static let lzma_stream_decoder: (\n @convention(c) (UnsafeMutablePointer<lzma_stream>,\n UInt64, UInt32) -> lzma_ret)?\n = symbol(lzmaHandle, "lzma_stream_decoder")\n\n static let lzma_code: (@convention(c) (UnsafeMutablePointer<lzma_stream>,\n lzma_action) -> lzma_ret)?\n = symbol(lzmaHandle, "lzma_code")\n\n static let lzma_end: (@convention(c) (UnsafeMutablePointer<lzma_stream>) -> ())?\n = symbol(lzmaHandle, "lzma_end")\n\n static let inflateInit_: (@convention(c) (z_streamp,\n UnsafePointer<CChar>, CInt) -> CInt)?\n = symbol(zlibHandle, "inflateInit_")\n\n static func inflateInit(_ stream: z_streamp) -> CInt {\n return inflateInit_!(stream, ZLIB_VERSION, CInt(MemoryLayout<z_stream>.size))\n }\n\n static let inflate: (@convention(c) (z_streamp, CInt) -> CInt)?\n = symbol(zlibHandle, "inflate")\n\n static let inflateEnd: (@convention(c) (z_streamp) -> CInt)?\n = symbol(zlibHandle, "inflateEnd")\n\n static let ZSTD_createDStream: (\n @convention(c) () -> UnsafeMutableRawPointer?)?\n = symbol(zstdHandle, "ZSTD_createDStream")\n\n static let ZSTD_freeDStream: (\n @convention(c) (UnsafeMutableRawPointer) -> UInt)?\n = symbol(zstdHandle, "ZSTD_freeDStream")\n\n static let ZSTD_decompressStream: (\n @convention(c) (UnsafeMutableRawPointer,\n UnsafeMutablePointer<ZSTD_outBuffer>,\n UnsafeMutablePointer<ZSTD_inBuffer>) -> UInt)?\n = symbol(zstdHandle, "ZSTD_decompressStream")\n\n static let ZSTD_isError: (@convention(c) (UInt) -> UInt)?\n = symbol(zstdHandle, "ZSTD_isError")\n}\n\n// .. zlib (deflate) ...........................................................\n\nenum ZLibError: Error {\n case decodeError(CInt)\n}\n\nstruct ZLibStream: CompressedStream {\n init() {}\n\n func decompress(input: InputSource, output: OutputSink) throws -> UInt {\n\n if zlibHandle == nil {\n throw CompressedImageSourceError.libraryNotFound("libz")\n }\n\n var stream = zlib_stream_init()\n\n let ret = Sym.inflateInit(&stream)\n if ret != Z_OK {\n throw ZLibError.decodeError(ret)\n }\n defer {\n _ = Sym.inflateEnd!(&stream)\n }\n\n var outputBufferSize = UInt(0)\n while true {\n if stream.avail_in == 0 {\n let buffer = try input()\n\n // Not really mutable; this is just an issue with z_const\n stream.next_in = UnsafeMutablePointer(\n mutating: buffer.baseAddress?.assumingMemoryBound(to: UInt8.self)\n )\n stream.avail_in = CUnsignedInt(buffer.count)\n }\n\n if stream.avail_out == 0 {\n guard let buffer = try output(outputBufferSize, false) else {\n throw CompressedImageSourceError.outputOverrun\n }\n\n stream.next_out = buffer.baseAddress?.assumingMemoryBound(to: UInt8.self)\n stream.avail_out = CUnsignedInt(buffer.count)\n outputBufferSize = UInt(buffer.count)\n }\n\n let ret = Sym.inflate!(&stream, Z_NO_FLUSH)\n\n if ret == Z_STREAM_END {\n _ = try output(outputBufferSize - UInt(stream.avail_out), true)\n return stream.total_out\n }\n\n if ret != Z_OK {\n throw ZLibError.decodeError(ret)\n }\n }\n }\n}\n\n// .. zstd .....................................................................\n\nenum ZStdError: Error {\n case unableToCreateStream\n case decodeError(UInt)\n}\n\nstruct ZStdStream: CompressedStream {\n init() {}\n\n func decompress(input: InputSource, output: OutputSink) throws -> UInt {\n\n if zstdHandle == nil {\n throw CompressedImageSourceError.libraryNotFound("libzstd")\n }\n\n guard let stream = Sym.ZSTD_createDStream!() else {\n throw ZStdError.unableToCreateStream\n }\n defer {\n _ = Sym.ZSTD_freeDStream!(stream)\n }\n\n var inBuffer = ZSTD_inBuffer(src: nil, size: 0, pos: 0)\n var outBuffer = ZSTD_outBuffer(dst: nil, size: 0, pos: 0)\n var totalOutput = UInt(0)\n\n while true {\n if inBuffer.size == inBuffer.pos {\n let buffer = try input()\n\n inBuffer.src = buffer.baseAddress\n inBuffer.size = buffer.count\n inBuffer.pos = 0\n }\n\n if outBuffer.size == outBuffer.pos {\n let byteCount = UInt(outBuffer.pos)\n\n totalOutput += byteCount\n\n guard let buffer = try output(byteCount, false) else {\n throw CompressedImageSourceError.outputOverrun\n }\n\n outBuffer.dst = buffer.baseAddress\n outBuffer.size = buffer.count\n outBuffer.pos = 0\n }\n\n let ret = Sym.ZSTD_decompressStream!(stream, &outBuffer, &inBuffer)\n\n if ret == 0 {\n _ = try output(UInt(outBuffer.pos), true)\n return totalOutput\n }\n\n if Sym.ZSTD_isError!(ret) != 0 {\n throw ZStdError.decodeError(ret)\n }\n }\n }\n}\n\n\n// .. LZMA .....................................................................\n\nenum LZMAError: Error {\n case decodeError(lzma_ret)\n}\n\nstruct LZMAStream: CompressedStream {\n private var memlimit: UInt64\n private var flags: UInt32\n\n init(memlimit: UInt64 = ~UInt64(0), flags: UInt32 = 0) {\n self.memlimit = memlimit\n self.flags = flags\n }\n\n func decompress(input: InputSource, output: OutputSink) throws -> UInt {\n\n if lzmaHandle == nil {\n throw CompressedImageSourceError.libraryNotFound("liblzma")\n }\n\n var stream = lzma_stream_init()\n\n let ret = Sym.lzma_stream_decoder!(&stream, memlimit, flags)\n if ret != LZMA_OK {\n throw LZMAError.decodeError(ret)\n }\n defer {\n Sym.lzma_end!(&stream)\n }\n\n var outputBufferSize = UInt(0)\n while true {\n if stream.avail_in == 0 {\n let buffer = try input()\n stream.next_in = buffer.baseAddress?.assumingMemoryBound(to: UInt8.self)\n stream.avail_in = buffer.count\n }\n\n if stream.avail_out == 0 {\n guard let buffer = try output(outputBufferSize, false) else {\n throw CompressedImageSourceError.outputOverrun\n }\n\n stream.next_out = buffer.baseAddress?.assumingMemoryBound(to: UInt8.self)\n stream.avail_out = buffer.count\n outputBufferSize = UInt(buffer.count)\n }\n\n let ret = Sym.lzma_code!(&stream, LZMA_RUN)\n\n if ret == LZMA_STREAM_END {\n _ = try output(outputBufferSize - UInt(stream.avail_out), true)\n return UInt(stream.total_out)\n }\n\n if ret != LZMA_OK {\n throw LZMAError.decodeError(ret)\n }\n }\n }\n}\n\n// .. Image Sources ............................................................\n\nfileprivate func decompress<S: CompressedStream>(\n stream: S,\n source: ImageSource,\n offset: Int,\n output: inout ImageSource\n) throws {\n let totalBytes = try stream.decompress(\n input: {\n () throws -> UnsafeRawBufferPointer in\n\n return UnsafeRawBufferPointer(rebasing: source.bytes[offset...])\n },\n output: {\n (used: UInt, done: Bool) throws -> UnsafeMutableRawBufferPointer? in\n\n if used == 0 {\n return output.unusedBytes\n } else {\n return nil\n }\n }\n )\n output.used(bytes: Int(totalBytes))\n}\n\nfileprivate func decompressChunked<S: CompressedStream>(\n stream: S,\n source: ImageSource,\n offset: Int,\n output: inout ImageSource\n) throws {\n let bufSize = 65536\n let outputBuffer = UnsafeMutableRawBufferPointer.allocate(byteCount: bufSize,\n alignment: 16)\n defer {\n outputBuffer.deallocate()\n }\n\n let _ = try stream.decompress(\n input: {\n () throws -> UnsafeRawBufferPointer in\n\n return UnsafeRawBufferPointer(rebasing: source.bytes[offset...])\n },\n output: {\n (used: UInt, done: Bool) throws -> UnsafeMutableRawBufferPointer? in\n\n output.append(\n bytes: UnsafeRawBufferPointer(rebasing: outputBuffer[..<Int(used)])\n )\n if !done {\n return outputBuffer\n } else {\n return nil\n }\n }\n )\n}\n\nextension ImageSource {\n @_specialize(kind: full, where Traits == Elf32Traits)\n @_specialize(kind: full, where Traits == Elf64Traits)\n init<Traits: ElfTraits>(elfCompressedImageSource source: ImageSource,\n traits: Traits.Type) throws {\n if source.bytes.count < MemoryLayout<Traits.Chdr>.size {\n throw CompressedImageSourceError.badCompressedData\n }\n\n let rawChdr = try source.fetch(from: 0, as: Traits.Chdr.self)\n let chdr: Traits.Chdr\n switch rawChdr.ch_type {\n case .ELFCOMPRESS_ZLIB.byteSwapped, .ELFCOMPRESS_ZSTD.byteSwapped:\n chdr = rawChdr.byteSwapped\n default:\n chdr = rawChdr\n }\n\n let uncompressedSize = UInt(chdr.ch_size)\n\n self.init(capacity: Int(uncompressedSize), isMappedImage: false, path: nil)\n\n switch chdr.ch_type {\n case .ELFCOMPRESS_ZLIB:\n try decompress(stream: ZLibStream(),\n source: source, offset: MemoryLayout<Traits.Chdr>.stride,\n output: &self)\n case .ELFCOMPRESS_ZSTD:\n try decompress(stream: ZStdStream(),\n source: source, offset: MemoryLayout<Traits.Chdr>.stride,\n output: &self)\n default:\n throw CompressedImageSourceError.unsupportedFormat\n }\n }\n\n init(gnuCompressedImageSource source: ImageSource) throws {\n if source.bytes.count < 12 {\n throw CompressedImageSourceError.badCompressedData\n }\n\n let magic = try source.fetch(from: 0, as: UInt32.self)\n let rawUncompressedSize = try source.fetch(from: 4, as: UInt64.self)\n let uncompressedSize: UInt64\n switch magic {\n case 0x42494c5a: // BILZ\n uncompressedSize = rawUncompressedSize.byteSwapped\n case 0x5a4c4942: // ZLIB\n uncompressedSize = rawUncompressedSize\n default:\n throw CompressedImageSourceError.badCompressedData\n }\n\n self.init(capacity: Int(uncompressedSize), isMappedImage: false, path: nil)\n\n try decompress(stream: ZLibStream(),\n source: source, offset: 12,\n output: &self)\n }\n\n init(lzmaCompressedImageSource source: ImageSource) throws {\n self.init(isMappedImage: false, path: nil)\n\n try decompressChunked(stream: LZMAStream(),\n source: source, offset: 0,\n output: &self)\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_Compression.swift
cpp_apple_swift_stdlib_public_RuntimeModule_Compression.swift
Swift
13,265
0.95
0.116331
0.118785
vue-tools
32
2023-08-06T03:14:50.709827
BSD-3-Clause
false
eaba862f82dc504780d667e41c633fed
//===--- Context.swift - Unwind context structure -------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2022 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Defines the Context protocol and some concrete implementations for various\n// different types of CPU.\n//\n// Context holds register values during unwinding.\n//\n//===----------------------------------------------------------------------===//\n\nimport Swift\n\n#if os(macOS) || os(iOS) || os(tvOS) || os(watchOS)\ninternal import Darwin\n#elseif os(Windows)\ninternal import ucrt\n#elseif canImport(Glibc)\ninternal import Glibc\n#elseif canImport(Musl)\ninternal import Musl\n#endif\n\n#if os(macOS) || os(iOS) || os(watchOS) || os(tvOS)\ninternal import BacktracingImpl.OS.Darwin\n#endif\n\ninternal import BacktracingImpl.FixedLayout\n\ntypealias x86_64_gprs = swift.runtime.backtrace.x86_64_gprs\ntypealias i386_gprs = swift.runtime.backtrace.i386_gprs\ntypealias arm64_gprs = swift.runtime.backtrace.arm64_gprs\ntypealias arm_gprs = swift.runtime.backtrace.arm_gprs\n\n@_spi(Contexts) public enum ContextError: Error {\n case unableToFormTLSAddress\n}\n\n@_spi(Contexts) public protocol Context: CustomStringConvertible {\n /// Represents a machine address for this type of machine\n associatedtype Address: FixedWidthInteger\n\n /// Represents a size for this type of machine\n associatedtype Size: FixedWidthInteger\n\n /// The type of a general purpose register on this machine\n associatedtype GPRValue: FixedWidthInteger\n\n /// An enumerated type defining the registers for the machine (this comes\n /// from the architecture specific DWARF specification).\n associatedtype Register: RawRepresentable where Register.RawValue == Int\n\n /// The architecture tag for this context (e.g. arm64, x86_64)\n var architecture: String { get }\n\n /// The program counter; this is likely a return address\n var programCounter: GPRValue { get set }\n\n /// The stack pointer\n var stackPointer: GPRValue { get set }\n\n /// The frame pointer\n var framePointer: GPRValue { get set }\n\n /// The CFA as defined by the relevant architecture specific DWARF\n /// specification. For the architectures we have currently, it turns out\n /// that this is the stack pointer, but it might in general be some other\n /// thing.\n var callFrameAddress: GPRValue { get set }\n\n /// The number of register slots to reserve in the unwinder (this corresponds\n /// to the DWARF register numbers, which is why some of these reserve a lot\n /// of slots).\n static var registerCount: Int { get }\n\n /// Given a thread local address, form a genuine machine address\n func formTLSAddress(threadLocal: Address) throws -> Address\n\n /// Get the value of the specified general purpose register, or nil if unknown\n func getRegister(_ register: Register) -> GPRValue?\n\n /// Set the value of the specified general purpose register (or mark it as\n /// unknown if nil is passed)\n mutating func setRegister(_ register: Register, to value: GPRValue?)\n\n /// Set all of the registers in bulk\n mutating func setRegisters(_ registers: [GPRValue?])\n\n /// Strip any pointer authentication that might apply from an address.\n static func stripPtrAuth(address: Address) -> Address\n\n /// Test if an address is appropriately aligned for the stack.\n static func isAlignedForStack(framePointer: Address) -> Bool\n}\n\nextension Context {\n public func formTLSAddress(threadLocal: Address) throws -> Address {\n throw ContextError.unableToFormTLSAddress\n }\n\n public mutating func setRegisters(_ registers: [GPRValue?]) {\n for (ndx, value) in registers.enumerated() {\n if let reg = Register(rawValue: ndx) {\n setRegister(reg, to: value)\n }\n }\n }\n\n public static func stripPtrAuth(address: Address) -> Address {\n return address\n }\n}\n\n// .. Extensions to the GPR structures .........................................\n\n// We need these because the arrays in the _gprs structs (which are defined\n// in C so that the layout is fixed) get imported as tuples.\n\nextension x86_64_gprs {\n func getR(_ ndx: Int) -> UInt64 {\n return withUnsafePointer(to: _r) {\n $0.withMemoryRebound(to: UInt64.self, capacity: 16) {\n $0[ndx]\n }\n }\n }\n\n mutating func setR(_ ndx: Int, to value: UInt64) {\n withUnsafeMutablePointer(to: &_r) {\n $0.withMemoryRebound(to: UInt64.self, capacity: 16) {\n $0[ndx] = value\n }\n }\n valid |= 1 << ndx\n }\n}\n\nextension i386_gprs {\n func getR(_ ndx: Int) -> UInt32 {\n return withUnsafePointer(to: _r) {\n $0.withMemoryRebound(to: UInt32.self, capacity: 8) {\n $0[ndx]\n }\n }\n }\n\n mutating func setR(_ ndx: Int, to value: UInt32) {\n withUnsafeMutablePointer(to: &_r) {\n $0.withMemoryRebound(to: UInt32.self, capacity: 8) {\n $0[ndx] = value\n }\n }\n valid |= 1 << ndx\n }\n}\n\nextension arm64_gprs {\n func getX(_ ndx: Int) -> UInt64 {\n return withUnsafePointer(to: _x) {\n $0.withMemoryRebound(to: UInt64.self, capacity: 32) {\n $0[ndx]\n }\n }\n }\n\n mutating func setX(_ ndx: Int, to value: UInt64) {\n withUnsafeMutablePointer(to: &_x) {\n $0.withMemoryRebound(to: UInt64.self, capacity: 32) {\n $0[ndx] = value\n }\n }\n valid |= 1 << ndx\n }\n}\n\nextension arm_gprs {\n func getR(_ ndx: Int) -> UInt32 {\n return withUnsafePointer(to: _r) {\n $0.withMemoryRebound(to: UInt32.self, capacity: 16) {\n $0[ndx]\n }\n }\n }\n\n mutating func setR(_ ndx: Int, to value: UInt32) {\n withUnsafeMutablePointer(to: &_r) {\n $0.withMemoryRebound(to: UInt32.self, capacity: 16) {\n $0[ndx] = value\n }\n }\n valid |= 1 << ndx\n }\n}\n\n// .. x86-64 ...................................................................\n\n@_spi(Contexts) public struct X86_64Context: Context {\n public typealias Address = UInt64\n public typealias Size = UInt64\n public typealias GPRValue = UInt64\n public typealias Register = X86_64Register\n\n var gprs = x86_64_gprs()\n\n public var architecture: String { "x86_64" }\n\n public var programCounter: Address {\n get { return gprs.rip }\n set {\n gprs.rip = newValue\n gprs.valid |= 1 << 20\n }\n }\n public var framePointer: Address {\n get { return gprs.getR(X86_64Register.rbp.rawValue) }\n set {\n gprs.setR(X86_64Register.rbp.rawValue, to: newValue)\n }\n }\n public var stackPointer: Address {\n get { return gprs.getR(X86_64Register.rsp.rawValue) }\n set {\n gprs.setR(X86_64Register.rsp.rawValue, to: newValue)\n }\n }\n\n public var callFrameAddress: GPRValue {\n get { return stackPointer }\n set { stackPointer = newValue }\n }\n\n public static var registerCount: Int { return 56 }\n\n #if os(macOS) && arch(x86_64)\n init?(from thread: thread_t) {\n var state = darwin_x86_64_thread_state()\n let kr = thread_get_state(thread,\n X86_THREAD_STATE64,\n &state)\n if kr != KERN_SUCCESS {\n return nil\n }\n\n self.init(from: state)\n }\n\n init(with mctx: darwin_x86_64_mcontext) {\n self.init(from: mctx.ss)\n }\n\n init(from state: darwin_x86_64_thread_state) {\n gprs.setR(X86_64Register.rax.rawValue, to: state.rax)\n gprs.setR(X86_64Register.rbx.rawValue, to: state.rbx)\n gprs.setR(X86_64Register.rcx.rawValue, to: state.rcx)\n gprs.setR(X86_64Register.rdx.rawValue, to: state.rdx)\n gprs.setR(X86_64Register.rdi.rawValue, to: state.rdi)\n gprs.setR(X86_64Register.rsi.rawValue, to: state.rsi)\n gprs.setR(X86_64Register.rbp.rawValue, to: state.rbp)\n gprs.setR(X86_64Register.rsp.rawValue, to: state.rsp)\n gprs.setR(X86_64Register.r8.rawValue, to: state.r8)\n gprs.setR(X86_64Register.r9.rawValue, to: state.r9)\n gprs.setR(X86_64Register.r10.rawValue, to: state.r10)\n gprs.setR(X86_64Register.r11.rawValue, to: state.r11)\n gprs.setR(X86_64Register.r12.rawValue, to: state.r12)\n gprs.setR(X86_64Register.r13.rawValue, to: state.r13)\n gprs.setR(X86_64Register.r14.rawValue, to: state.r14)\n gprs.setR(X86_64Register.r15.rawValue, to: state.r15)\n gprs.rip = state.rip\n gprs.rflags = state.rflags\n gprs.cs = UInt16(state.cs)\n gprs.fs = UInt16(state.fs)\n gprs.gs = UInt16(state.gs)\n gprs.valid = 0x1fffff\n }\n\n public static func fromHostThread(_ thread: Any) -> HostContext? {\n return X86_64Context(from: thread as! thread_t)\n }\n\n public static func fromHostMContext(_ mcontext: Any) -> HostContext {\n return X86_64Context(with: mcontext as! darwin_x86_64_mcontext)\n }\n #elseif os(Linux) && arch(x86_64)\n init(with mctx: mcontext_t) {\n gprs.setR(X86_64Register.rax.rawValue, to: UInt64(bitPattern: mctx.gregs.13))\n gprs.setR(X86_64Register.rbx.rawValue, to: UInt64(bitPattern: mctx.gregs.12))\n gprs.setR(X86_64Register.rcx.rawValue, to: UInt64(bitPattern: mctx.gregs.14))\n gprs.setR(X86_64Register.rdx.rawValue, to: UInt64(bitPattern: mctx.gregs.11))\n gprs.setR(X86_64Register.rdi.rawValue, to: UInt64(bitPattern: mctx.gregs.9))\n gprs.setR(X86_64Register.rsi.rawValue, to: UInt64(bitPattern: mctx.gregs.8))\n gprs.setR(X86_64Register.rbp.rawValue, to: UInt64(bitPattern: mctx.gregs.10))\n gprs.setR(X86_64Register.rsp.rawValue, to: UInt64(bitPattern: mctx.gregs.15))\n gprs.setR(X86_64Register.r8.rawValue, to: UInt64(bitPattern: mctx.gregs.0))\n gprs.setR(X86_64Register.r9.rawValue, to: UInt64(bitPattern: mctx.gregs.1))\n gprs.setR(X86_64Register.r10.rawValue, to: UInt64(bitPattern: mctx.gregs.2))\n gprs.setR(X86_64Register.r11.rawValue, to: UInt64(bitPattern: mctx.gregs.3))\n gprs.setR(X86_64Register.r12.rawValue, to: UInt64(bitPattern: mctx.gregs.4))\n gprs.setR(X86_64Register.r13.rawValue, to: UInt64(bitPattern: mctx.gregs.5))\n gprs.setR(X86_64Register.r14.rawValue, to: UInt64(bitPattern: mctx.gregs.6))\n gprs.setR(X86_64Register.r15.rawValue, to: UInt64(bitPattern: mctx.gregs.7))\n gprs.rip = UInt64(bitPattern: mctx.gregs.16)\n gprs.rflags = UInt64(bitPattern: mctx.gregs.17)\n gprs.cs = UInt16(mctx.gregs.18 & 0xffff)\n gprs.fs = UInt16((mctx.gregs.18 >> 16) & 0xffff)\n gprs.gs = UInt16((mctx.gregs.18 >> 32) & 0xffff)\n gprs.valid = 0x1fffff\n }\n\n public static func fromHostMContext(_ mcontext: Any) -> HostContext {\n return X86_64Context(with: mcontext as! mcontext_t)\n }\n #endif\n\n #if os(Windows) || !SWIFT_ASM_AVAILABLE\n struct NotImplemented: Error {}\n public static func withCurrentContext<T>(fn: (X86_64Context) throws -> T) throws -> T {\n throw NotImplemented()\n }\n #elseif arch(x86_64)\n @usableFromInline\n @_silgen_name("_swift_get_cpu_context")\n static func _swift_get_cpu_context() -> X86_64Context\n\n @_transparent\n public static func withCurrentContext<T>(fn: (X86_64Context) throws -> T) rethrows -> T {\n return try fn(_swift_get_cpu_context())\n }\n #endif\n\n private func validNdx(_ register: Register) -> Int? {\n switch register {\n case .rax ... .r15:\n return register.rawValue\n case .rflags:\n return 16\n case .cs:\n return 17\n case .fs:\n return 18\n case .gs:\n return 19\n default:\n return nil\n }\n }\n\n private func isValid(_ register: Register) -> Bool {\n guard let ndx = validNdx(register) else {\n return false\n }\n return (gprs.valid & (UInt64(1) << ndx)) != 0\n }\n\n private mutating func setValid(_ register: Register) {\n guard let ndx = validNdx(register) else {\n return\n }\n gprs.valid |= UInt64(1) << ndx\n }\n\n private mutating func clearValid(_ register: Register) {\n guard let ndx = validNdx(register) else {\n return\n }\n gprs.valid &= ~(UInt64(1) << ndx)\n }\n\n public func getRegister(_ register: Register) -> GPRValue? {\n if !isValid(register) {\n return nil\n }\n\n switch register {\n case .rax ... .r15:\n return gprs.getR(register.rawValue)\n case .rflags: return gprs.rflags\n case .cs: return UInt64(gprs.cs)\n case .fs: return UInt64(gprs.fs)\n case .gs: return UInt64(gprs.gs)\n default:\n return nil\n }\n }\n\n public mutating func setRegister(_ register: Register, to value: GPRValue?) {\n if let value = value {\n switch register {\n case .rax ... .r15:\n gprs.setR(register.rawValue, to: value)\n case .rflags:\n gprs.rflags = value\n setValid(register)\n case .cs:\n gprs.cs = UInt16(value)\n setValid(register)\n case .fs:\n gprs.fs = UInt16(value)\n setValid(register)\n case .gs:\n gprs.gs = UInt16(value)\n setValid(register)\n default:\n return\n }\n } else {\n clearValid(register)\n }\n }\n\n public var description: String {\n return """\n rax: \(hex(gprs.getR(0))) rbx: \(hex(gprs.getR(3))) rcx: \(hex(gprs.getR(2)))\n rdx: \(hex(gprs.getR(1))) rsi: \(hex(gprs.getR(4))) rdi: \(hex(gprs.getR(5)))\n rbp: \(hex(gprs.getR(6))) rsp: \(hex(gprs.getR(7))) r8: \(hex(gprs.getR(8)))\n r9: \(hex(gprs.getR(9))) r10: \(hex(gprs.getR(10))) r11: \(hex(gprs.getR(11)))\n r12: \(hex(gprs.getR(12))) r13: \(hex(gprs.getR(13))) r14: \(hex(gprs.getR(14)))\n r15: \(hex(gprs.getR(15)))\n\n cs: \(hex(gprs.cs)) fs: \(hex(gprs.fs)) gs: \(hex(gprs.gs))\n\n rip: \(hex(gprs.rip)) rflags: \(hex(gprs.rflags))\n """\n }\n\n public static func isAlignedForStack(framePointer: Address) -> Bool {\n return (framePointer & 0xf) == 0\n }\n\n #if os(macOS) || os(iOS) || os(watchOS) || os(tvOS)\n internal static var coreSymbolicationArchitecture: CSArchitecture {\n return kCSArchitectureX86_64\n }\n #endif\n}\n\n// .. i386 .....................................................................\n\n@_spi(Contexts) public struct I386Context: Context {\n public typealias Address = UInt32\n public typealias Size = UInt32\n public typealias GPRValue = UInt32\n public typealias Register = I386Register\n\n var gprs = i386_gprs()\n\n public var architecture: String { "i386" }\n\n public var programCounter: GPRValue {\n get { return gprs.eip }\n set {\n gprs.eip = newValue\n gprs.valid |= 1 << 15\n }\n }\n\n public var framePointer: GPRValue {\n get { return gprs.getR(I386Register.ebp.rawValue) }\n set { gprs.setR(I386Register.ebp.rawValue, to: newValue) }\n }\n\n public var stackPointer: GPRValue {\n get { return gprs.getR(I386Register.esp.rawValue) }\n set { gprs.setR(I386Register.esp.rawValue, to: newValue) }\n }\n\n public var callFrameAddress: GPRValue {\n get { return stackPointer }\n set { stackPointer = newValue }\n }\n\n public static var registerCount: Int { return 50 }\n\n #if os(Linux) && arch(i386)\n init(with mctx: mcontext_t) {\n gprs.setR(I386Register.eax.rawValue, to: UInt32(bitPattern: mctx.gregs.11))\n gprs.setR(I386Register.ecx.rawValue, to: UInt32(bitPattern: mctx.gregs.10))\n gprs.setR(I386Register.edx.rawValue, to: UInt32(bitPattern: mctx.gregs.9))\n gprs.setR(I386Register.ebx.rawValue, to: UInt32(bitPattern: mctx.gregs.8))\n gprs.setR(I386Register.esp.rawValue, to: UInt32(bitPattern: mctx.gregs.7))\n gprs.setR(I386Register.ebp.rawValue, to: UInt32(bitPattern: mctx.gregs.6))\n gprs.setR(I386Register.ebp.rawValue, to: UInt32(bitPattern: mctx.gregs.5))\n gprs.setR(I386Register.ebp.rawValue, to: UInt32(bitPattern: mctx.gregs.4))\n gprs.eip = UInt32(bitPattern: mctx.gregs.14)\n gprs.eflags = UInt32(bitPattern: mctx.gregs.16)\n gprs.segreg.0 = UInt16(bitPattern: mctx.gregs.2 & 0xffff) // es\n gprs.segreg.1 = UInt16(bitPattern: mctx.gregs.15 & 0xffff) // cs\n gprs.segreg.2 = UInt16(bitPattern: mctx.gregs.18 & 0xffff) // ss\n gprs.segreg.3 = UInt16(bitPattern: mctx.gregs.3 & 0xffff) // ds\n gprs.segreg.4 = UInt16(bitPattern: mctx.gregs.1 & 0xffff) // fs\n gprs.segreg.5 = UInt16(bitPattern: mctx.gregs.0 & 0xffff) // gs\n gprs.valid = 0x7fff\n }\n\n public static func fromHostMContext(_ mcontext: Any) -> HostContext {\n return I386Context(with: mcontext as! mcontext_t)\n }\n #endif\n\n #if os(Windows) || !SWIFT_ASM_AVAILABLE\n struct NotImplemented: Error {}\n public static func withCurrentContext<T>(fn: (I386Context) throws -> T) throws -> T {\n throw NotImplemented()\n }\n #elseif arch(i386)\n @usableFromInline\n @_silgen_name("_swift_get_cpu_context")\n static func _swift_get_cpu_context() -> I386Context\n\n @_transparent\n public static func withCurrentContext<T>(fn: (I386Context) throws -> T) rethrows -> T {\n return try fn(_swift_get_cpu_context())\n }\n #endif\n\n private func validNdx(_ register: Register) -> Int? {\n switch register {\n case .eax ... .edi:\n return register.rawValue\n case .eflags:\n return 8\n case .es, .cs, .ss, .ds, .fs, .gs:\n return 9 + register.rawValue - Register.es.rawValue\n case .ra:\n return 15\n default:\n return nil\n }\n }\n\n private func isValid(_ register: Register) -> Bool {\n guard let ndx = validNdx(register) else {\n return false\n }\n return (gprs.valid & (UInt32(1) << ndx)) != 0\n }\n\n private mutating func setValid(_ register: Register) {\n guard let ndx = validNdx(register) else {\n return\n }\n gprs.valid |= UInt32(1) << ndx\n }\n\n private mutating func clearValid(_ register: Register) {\n guard let ndx = validNdx(register) else {\n return\n }\n gprs.valid &= ~(UInt32(1) << ndx)\n }\n\n public func getRegister(_ register: Register) -> GPRValue? {\n if !isValid(register) {\n return nil\n }\n switch register {\n case .eax ... .edi:\n return gprs.getR(register.rawValue)\n case .eflags: return gprs.eflags\n case .es ... .gs:\n return withUnsafeBytes(of: gprs.segreg) { ptr in\n return ptr.withMemoryRebound(to: GPRValue.self) { regs in\n return regs[register.rawValue - Register.es.rawValue]\n }\n }\n case .ra: return gprs.eip\n default:\n return nil\n }\n }\n\n public mutating func setRegister(_ register: Register, to value: GPRValue?) {\n if let value = value {\n switch register {\n case .eax ... .edi:\n gprs.setR(register.rawValue, to: value)\n case .eflags:\n gprs.eflags = value\n setValid(register)\n case .es ... .gs:\n withUnsafeMutableBytes(of: &gprs.segreg) { ptr in\n ptr.withMemoryRebound(to: GPRValue.self) { regs in\n regs[register.rawValue - Register.es.rawValue] = value\n }\n }\n setValid(register)\n case .ra:\n gprs.eip = value\n setValid(register)\n default:\n return\n }\n } else {\n clearValid(register)\n }\n }\n\n public var description: String {\n return """\n eax: \(hex(gprs.getR(0))) ebx: \(hex(gprs.getR(3))) ecx: \(hex(gprs.getR(1))) edx: \(hex(gprs.getR(2)))\n esi: \(hex(gprs.getR(6))) edi: \(hex(gprs.getR(7))) ebp: \(hex(gprs.getR(5))) esp: \(hex(gprs.getR(4)))\n\n es: \(hex(gprs.segreg.0)) cs: \(hex(gprs.segreg.1)) ss: \(hex(gprs.segreg.2)) ds: \(hex(gprs.segreg.3)) fs: \(hex(gprs.segreg.4)) gs: \(hex(gprs.segreg.5))\n\n eip: \(hex(gprs.eip)) eflags: \(hex(gprs.eflags))\n """\n }\n\n public static func isAlignedForStack(framePointer: Address) -> Bool {\n return (framePointer & 0xf) == 8\n }\n\n #if os(macOS) || os(iOS) || os(watchOS) || os(tvOS)\n internal static var coreSymbolicationArchitecture: CSArchitecture {\n return kCSArchitectureI386\n }\n #endif\n}\n\n// .. ARM64 ....................................................................\n\n@_spi(Contexts) public struct ARM64Context: Context {\n public typealias Address = UInt64\n public typealias Size = UInt64\n public typealias GPRValue = UInt64\n public typealias Register = ARM64Register\n\n var gprs = arm64_gprs()\n\n public var architecture: String { "arm64" }\n\n public var programCounter: GPRValue {\n get { return gprs.pc }\n set {\n gprs.pc = newValue\n gprs.valid |= 1 << 32\n }\n }\n\n public var stackPointer: GPRValue {\n get { return gprs.getX(ARM64Register.sp.rawValue) }\n set {\n gprs.setX(ARM64Register.sp.rawValue, to: newValue)\n }\n }\n\n public var framePointer: GPRValue {\n get { return gprs.getX(ARM64Register.x29.rawValue) }\n set {\n gprs.setX(ARM64Register.x29.rawValue, to: newValue)\n }\n }\n\n public var callFrameAddress: GPRValue {\n get { return stackPointer }\n set { stackPointer = newValue }\n }\n\n public static var registerCount: Int { return 40 }\n\n #if os(macOS) && arch(arm64)\n init?(from thread: thread_t) {\n var state = darwin_arm64_thread_state()\n let kr = thread_get_state(thread,\n ARM_THREAD_STATE64,\n &state)\n if kr != KERN_SUCCESS {\n return nil\n }\n\n self.init(from: state)\n }\n\n init(with mctx: darwin_arm64_mcontext) {\n self.init(from: mctx.ss)\n }\n\n init(from state: darwin_arm64_thread_state) {\n withUnsafeMutablePointer(to: &gprs._x) {\n $0.withMemoryRebound(to: UInt64.self, capacity: 32){ to in\n withUnsafePointer(to: state._x) {\n $0.withMemoryRebound(to: UInt64.self, capacity: 29){ from in\n for n in 0..<29 {\n to[n] = from[n]\n }\n }\n }\n\n to[29] = state.fp\n to[30] = state.lr\n to[31] = state.sp\n }\n }\n gprs.pc = state.pc\n gprs.valid = 0x1ffffffff\n }\n\n public static func fromHostThread(_ thread: Any) -> HostContext? {\n return ARM64Context(from: thread as! thread_t)\n }\n\n public static func fromHostMContext(_ mcontext: Any) -> HostContext {\n return ARM64Context(with: mcontext as! darwin_arm64_mcontext)\n }\n #elseif os(Linux) && arch(arm64)\n init(with mctx: mcontext_t) {\n withUnsafeMutablePointer(to: &gprs._x) {\n $0.withMemoryRebound(to: UInt64.self, capacity: 32){ to in\n withUnsafePointer(to: mctx.regs) {\n $0.withMemoryRebound(to: UInt64.self, capacity: 31) { from in\n for n in 0..<31 {\n to[n] = from[n]\n }\n }\n }\n\n to[31] = UInt64(mctx.sp)\n }\n }\n gprs.pc = UInt64(mctx.pc)\n gprs.valid = 0x1ffffffff\n }\n\n public static func fromHostMContext(_ mcontext: Any) -> HostContext {\n return ARM64Context(with: mcontext as! mcontext_t)\n }\n #endif\n\n #if os(Windows) || !SWIFT_ASM_AVAILABLE\n struct NotImplemented: Error {}\n public static func withCurrentContext<T>(fn: (ARM64Context) throws -> T) throws -> T {\n throw NotImplemented()\n }\n #elseif arch(arm64) || arch(arm64_32)\n @usableFromInline\n @_silgen_name("_swift_get_cpu_context")\n static func _swift_get_cpu_context() -> ARM64Context\n\n @_transparent\n public static func withCurrentContext<T>(fn: (ARM64Context) throws -> T) rethrows -> T {\n return try fn(_swift_get_cpu_context())\n }\n #endif\n\n private func isValid(_ register: Register) -> Bool {\n if register.rawValue < 33 {\n return (gprs.valid & (UInt64(1) << register.rawValue)) != 0\n }\n return false\n }\n\n private mutating func setValid(_ register: Register) {\n if register.rawValue < 33 {\n gprs.valid |= UInt64(1) << register.rawValue\n }\n }\n\n private mutating func clearValid(_ register: Register) {\n if register.rawValue < 33 {\n gprs.valid &= ~(UInt64(1) << register.rawValue)\n }\n }\n\n public func getRegister(_ reg: Register) -> GPRValue? {\n if !isValid(reg) {\n return nil\n }\n switch reg {\n case .x0 ... .sp:\n return gprs.getX(reg.rawValue)\n case .pc:\n return gprs.pc\n default:\n return nil\n }\n }\n\n public mutating func setRegister(_ reg: Register, to value: GPRValue?) {\n if let value = value {\n switch reg {\n case .x0 ... .sp:\n gprs.setX(reg.rawValue, to: value)\n case .pc:\n gprs.pc = value\n setValid(reg)\n default:\n break\n }\n } else {\n clearValid(reg)\n }\n }\n\n public var description: String {\n return """\n x0: \(hex(gprs.getX(0))) x1: \(hex(gprs.getX(1)))\n x2: \(hex(gprs.getX(2))) x3: \(hex(gprs.getX(3)))\n x4: \(hex(gprs.getX(4))) x5: \(hex(gprs.getX(5)))\n x6: \(hex(gprs.getX(6))) x7: \(hex(gprs.getX(7)))\n x8: \(hex(gprs.getX(8))) x9: \(hex(gprs.getX(9)))\n x10: \(hex(gprs.getX(10))) x11: \(hex(gprs.getX(11)))\n x12: \(hex(gprs.getX(12))) x13: \(hex(gprs.getX(13)))\n x14: \(hex(gprs.getX(14))) x15: \(hex(gprs.getX(15)))\n x16: \(hex(gprs.getX(16))) x17: \(hex(gprs.getX(17)))\n x18: \(hex(gprs.getX(18))) x19: \(hex(gprs.getX(19)))\n x20: \(hex(gprs.getX(20))) x21: \(hex(gprs.getX(21)))\n x22: \(hex(gprs.getX(22))) x23: \(hex(gprs.getX(23)))\n x24: \(hex(gprs.getX(24))) x25: \(hex(gprs.getX(25)))\n x26: \(hex(gprs.getX(26))) x27: \(hex(gprs.getX(27)))\n x28: \(hex(gprs.getX(28)))\n\n fp: \(hex(gprs.getX(29))) (aka x29)\n lr: \(hex(gprs.getX(30))) (aka x30)\n sp: \(hex(gprs.getX(31))) (aka x31)\n\n pc: \(hex(gprs.pc))\n """\n }\n\n public static func isAlignedForStack(framePointer: Address) -> Bool {\n return (framePointer & 1) == 0\n }\n\n #if os(macOS) || os(iOS) || os(watchOS) || os(tvOS)\n public static func stripPtrAuth(address: Address) -> Address {\n // Is there a better way to do this? It'd be easy if we just wanted to\n // strip for the *host*, but we might conceivably want this under other\n // circumstances too.\n return address & 0x00007fffffffffff\n }\n\n internal static var coreSymbolicationArchitecture: CSArchitecture {\n return kCSArchitectureArm64\n }\n #endif\n}\n\n// .. 32-bit ARM ...............................................................\n\n@_spi(Contexts) public struct ARMContext: Context {\n public typealias Address = UInt32\n public typealias Size = UInt32\n public typealias GPRValue = UInt32\n public typealias Register = ARMRegister\n\n var gprs = arm_gprs()\n\n public var architecture: String { "arm" }\n\n public var programCounter: GPRValue {\n get { return gprs.getR(ARMRegister.r15.rawValue) }\n set { gprs.setR(ARMRegister.r15.rawValue, to: newValue) }\n }\n\n public var stackPointer: GPRValue {\n get { return gprs.getR(ARMRegister.r13.rawValue) }\n set { gprs.setR(ARMRegister.r13.rawValue, to: newValue) }\n }\n\n public var framePointer: GPRValue {\n get { return gprs.getR(ARMRegister.r11.rawValue) }\n set { gprs.setR(ARMRegister.r11.rawValue, to: newValue) }\n }\n\n public var callFrameAddress: GPRValue {\n get { return stackPointer }\n set { stackPointer = newValue }\n }\n\n public static var registerCount: Int { return 16 }\n\n #if os(Linux) && arch(arm)\n init(with mctx: mcontext_t) {\n withUnsafeMutablePointer(to: &gprs._r) {\n $0.withMemoryRebound(to: UInt32.self, capacity: 16) {\n withUnsafePointer(to: &mctx.arm_r0) {\n $0.withMemoryRebound(to: UInt32.self, capacity: 16) {\n for n in 0..<16 {\n to[n] = from[n]\n }\n }\n }\n }\n }\n gprs.valid = 0xffff\n }\n\n public static func fromHostMContext(_ mcontext: Any) -> HostContext {\n return ARMContext(with: mcontext as! mcontext_t)\n }\n #endif\n\n #if os(Windows) || !SWIFT_ASM_AVAILABLE\n struct NotImplemented: Error {}\n public static func withCurrentContext<T>(fn: (ARMContext) throws -> T) throws -> T {\n throw NotImplemented()\n }\n #elseif arch(arm)\n @usableFromInline\n @_silgen_name("_swift_get_cpu_context")\n static func _swift_get_cpu_context() -> ARMContext\n\n @_transparent\n public static func withCurrentContext<T>(fn: (ARMContext) throws -> T) rethrows -> T {\n return try fn(_swift_get_cpu_context())\n }\n #endif\n\n private func isValid(_ register: Register) -> Bool {\n if register.rawValue < 16 {\n return (gprs.valid & (UInt32(1) << register.rawValue)) != 0\n }\n return false\n }\n\n private mutating func setValid(_ register: Register) {\n if register.rawValue < 16 {\n gprs.valid |= UInt32(1) << register.rawValue\n }\n }\n\n private mutating func clearValid(_ register: Register) {\n if register.rawValue < 16 {\n gprs.valid &= ~(UInt32(1) << register.rawValue)\n }\n }\n\n public func getRegister(_ reg: Register) -> GPRValue? {\n if !isValid(reg) {\n return nil\n }\n switch reg {\n case .r0 ... .r15:\n return gprs.getR(reg.rawValue)\n default:\n return nil\n }\n }\n\n public mutating func setRegister(_ reg: Register, to value: GPRValue?) {\n if let value = value {\n switch reg {\n case .r0 ... .r15:\n gprs.setR(reg.rawValue, to: value)\n default:\n break\n }\n } else {\n clearValid(reg)\n }\n }\n\n public var description: String {\n return """\n r0: \(hex(gprs.getR(0))) r1: \(hex(gprs.getR(1)))\n r2: \(hex(gprs.getR(2))) r3: \(hex(gprs.getR(3)))\n r4: \(hex(gprs.getR(4))) r5: \(hex(gprs.getR(5)))\n r6: \(hex(gprs.getR(6))) r7: \(hex(gprs.getR(7)))\n r8: \(hex(gprs.getR(8))) r9: \(hex(gprs.getR(9)))\n r10: \(hex(gprs.getR(10)))\n\n fp: \(hex(gprs.getR(11))) (aka r11)\n ip: \(hex(gprs.getR(12))) (aka r12)\n sp: \(hex(gprs.getR(13))) (aka r13)\n lr: \(hex(gprs.getR(14))) (aka r14)\n pc: \(hex(gprs.getR(15))) (aka r15)\n """\n }\n\n public static func isAlignedForStack(framePointer: Address) -> Bool {\n return (framePointer & 1) == 0\n }\n\n #if os(macOS) || os(iOS) || os(watchOS) || os(tvOS)\n internal static var coreSymbolicationArchitecture: CSArchitecture {\n return kCSArchitectureArmV7K\n }\n #endif\n}\n\n// .. Darwin specifics .........................................................\n\n#if (os(macOS) || os(iOS) || os(watchOS) || os(tvOS))\nprivate func thread_get_state<T>(_ thread: thread_t,\n _ flavor: CInt,\n _ result: inout T) -> kern_return_t {\n var count: mach_msg_type_number_t\n = mach_msg_type_number_t(MemoryLayout<T>.stride\n / MemoryLayout<natural_t>.stride)\n\n return withUnsafeMutablePointer(to: &result) { ptr in\n ptr.withMemoryRebound(to: natural_t.self,\n capacity: Int(count)) { intPtr in\n return thread_get_state(thread,\n thread_state_flavor_t(flavor),\n intPtr,\n &count)\n }\n }\n}\n#endif\n\n// .. HostContext ..............................................................\n\n/// HostContext is an alias for the appropriate context for the machine on which\n/// the code was compiled.\n#if arch(x86_64)\n@_spi(Contexts) public typealias HostContext = X86_64Context\n#elseif arch(i386)\n@_spi(Contexts) public typealias HostContext = I386Context\n#elseif arch(arm64) || arch(arm64_32)\n@_spi(Contexts) public typealias HostContext = ARM64Context\n#elseif arch(arm)\n@_spi(Contexts) public typealias HostContext = ARMContext\n#endif\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_Context.swift
cpp_apple_swift_stdlib_public_RuntimeModule_Context.swift
Swift
31,128
0.95
0.064579
0.112885
python-kit
764
2024-02-08T04:15:25.799062
BSD-3-Clause
false
268f4d8930a0f3a69d3aa10c8306535e
//===--- CoreSymbolication.swift - Shims for CoreSymbolication ------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2022 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// CoreSymbolication is a private framework, which makes it tricky to link\n// with from here and also means there are no headers on customer builds.\n//\n//===----------------------------------------------------------------------===//\n\n#if os(iOS) || os(macOS) || os(tvOS) || os(watchOS)\n\nimport Swift\n\ninternal import Darwin\ninternal import BacktracingImpl.OS.Darwin\n\n// .. Dynamic binding ..........................................................\n\nprivate let coreFoundationPath =\n "/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation"\n\nprivate let coreFoundationHandle = dlopen(coreFoundationPath, RTLD_LAZY)!\n\nprivate let coreSymbolicationPath =\n "/System/Library/PrivateFrameworks/CoreSymbolication.framework/CoreSymbolication"\nprivate let coreSymbolicationHandle = dlopen(coreSymbolicationPath, RTLD_LAZY)!\n\nprivate let crashReporterSupportPath =\n "/System/Library/PrivateFrameworks/CrashReporterSupport.framework/CrashReporterSupport"\n\nprivate let crashReporterSupportHandle\n = dlopen(crashReporterSupportPath, RTLD_LAZY)!\n\nprivate func symbol<T>(_ handle: UnsafeMutableRawPointer, _ name: String) -> T {\n guard let result = dlsym(handle, name) else {\n fatalError("Unable to look up \(name) in CoreSymbolication")\n }\n return unsafeBitCast(result, to: T.self)\n}\n\n// Define UniChar\ntypealias UniChar = UInt16\n\nprivate enum Sym {\n // CRCopySanitizedPath\n static let CRCopySanitizedPath: @convention(c) (CFString, CFIndex) -> CFString =\n symbol(crashReporterSupportHandle, "CRCopySanitizedPath")\n\n // Base functionality\n static let CSRetain: @convention(c) (CSTypeRef) -> CSTypeRef =\n symbol(coreSymbolicationHandle, "CSRetain")\n static let CSRelease: @convention(c) (CSTypeRef) -> () =\n symbol(coreSymbolicationHandle, "CSRelease")\n static let CSEqual: @convention(c) (CSTypeRef, CSTypeRef) -> CBool =\n symbol(coreSymbolicationHandle, "CSEqual")\n static let CSIsNull: @convention(c) (CSTypeRef) -> CBool =\n symbol(coreSymbolicationHandle, "CSIsNull")\n\n // CSSymbolicator\n static let CSSymbolicatorCreateWithBinaryImageList:\n @convention(c) (UnsafeMutablePointer<CSBinaryImageInformation>,\n UInt32, UInt32, CSNotificationBlock?) -> CSSymbolicatorRef =\n symbol(coreSymbolicationHandle, "CSSymbolicatorCreateWithBinaryImageList")\n\n static let CSSymbolicatorGetSymbolOwnerWithAddressAtTime:\n @convention(c) (CSSymbolicatorRef, vm_address_t,\n CSMachineTime) -> CSSymbolOwnerRef =\n symbol(coreSymbolicationHandle, "CSSymbolicatorGetSymbolOwnerWithAddressAtTime")\n static let CSSymbolicatorForeachSymbolOwnerAtTime:\n @convention(c) (CSSymbolicatorRef, CSMachineTime, @convention(block) (CSSymbolOwnerRef) -> Void) -> UInt =\n symbol(coreSymbolicationHandle, "CSSymbolicatorForeachSymbolOwnerAtTime")\n\n // CSSymbolOwner\n static let CSSymbolOwnerGetName:\n @convention(c) (CSSymbolOwnerRef) -> UnsafePointer<CChar>? =\n symbol(coreSymbolicationHandle, "CSSymbolOwnerGetName")\n static let CSSymbolOwnerGetSymbolWithAddress:\n @convention(c) (CSSymbolOwnerRef, vm_address_t) -> CSSymbolRef =\n symbol(coreSymbolicationHandle, "CSSymbolOwnerGetSymbolWithAddress")\n static let CSSymbolOwnerGetSourceInfoWithAddress:\n @convention(c) (CSSymbolOwnerRef, vm_address_t) -> CSSourceInfoRef =\n symbol(coreSymbolicationHandle, "CSSymbolOwnerGetSourceInfoWithAddress")\n static let CSSymbolOwnerForEachStackFrameAtAddress:\n @convention(c) (CSSymbolOwnerRef, vm_address_t, CSStackFrameIterator) -> UInt =\n symbol(coreSymbolicationHandle, "CSSymbolOwnerForEachStackFrameAtAddress")\n static let CSSymbolOwnerGetBaseAddress:\n @convention(c) (CSSymbolOwnerRef) -> vm_address_t =\n symbol(coreSymbolicationHandle, "CSSymbolOwnerGetBaseAddress")\n\n // CSSymbol\n static let CSSymbolGetRange:\n @convention(c) (CSSymbolRef) -> CSRange =\n symbol(coreSymbolicationHandle, "CSSymbolGetRange")\n static let CSSymbolGetName:\n @convention(c) (CSSymbolRef) -> UnsafePointer<CChar>? =\n symbol(coreSymbolicationHandle, "CSSymbolGetName")\n static let CSSymbolGetMangledName:\n @convention(c) (CSSymbolRef) -> UnsafePointer<CChar>? =\n symbol(coreSymbolicationHandle, "CSSymbolGetMangledName")\n\n // CSSourceInfo\n static let CSSourceInfoGetPath:\n @convention(c) (CSSourceInfoRef) -> UnsafePointer<CChar>? =\n symbol(coreSymbolicationHandle, "CSSourceInfoGetPath")\n static let CSSourceInfoGetLineNumber:\n @convention(c) (CSSourceInfoRef) -> UInt32 =\n symbol(coreSymbolicationHandle, "CSSourceInfoGetLineNumber")\n static let CSSourceInfoGetColumn:\n @convention(c) (CSSourceInfoRef) -> UInt32 =\n symbol(coreSymbolicationHandle, "CSSourceInfoGetColumn")\n\n // CFString\n static let CFStringCreateWithBytes:\n @convention(c) (CFAllocator?, UnsafeRawPointer?, CFIndex,\n CFStringEncoding, Bool) -> CFString? =\n symbol(coreFoundationHandle, "CFStringCreateWithBytes")\n static let CFStringGetLength:\n @convention(c) (CFString) -> CFIndex =\n symbol(coreFoundationHandle, "CFStringGetLength")\n static let CFStringGetCStringPtr:\n @convention(c) (CFString, CFStringEncoding) -> UnsafePointer<CChar>? =\n symbol(coreFoundationHandle, "CFStringGetCStringPtr")\n static let CFStringGetBytes:\n @convention(c) (CFString, CFRange, CFStringEncoding, UInt8, Bool,\n UnsafeMutableRawPointer?, CFIndex,\n UnsafeMutablePointer<CFIndex>?) -> CFIndex =\n symbol(coreFoundationHandle, "CFStringGetBytes")\n static let CFStringGetCharactersPtr:\n @convention(c) (CFString) -> UnsafePointer<UniChar>? =\n symbol(coreFoundationHandle, "CFStringGetCharactersPtr")\n}\n\n// .. Core Foundation miscellany ...............................................\n\ninternal func CFRangeMake(_ location: CFIndex, _ length: CFIndex) -> CFRange {\n return CFRange(location: location, length: length)\n}\n\ninternal func CFStringCreateWithBytes(_ allocator: CFAllocator?,\n _ bytes: UnsafeRawPointer?,\n _ length: CFIndex,\n _ encoding: CFStringEncoding,\n _ isExternalRepresentation: Bool)\n -> CFString? {\n return Sym.CFStringCreateWithBytes(allocator,\n bytes,\n length,\n encoding,\n isExternalRepresentation)\n}\n\ninternal func CFStringGetLength(_ s: CFString) -> CFIndex {\n return Sym.CFStringGetLength(s)\n}\n\ninternal func CFStringGetCStringPtr(_ s: CFString,\n _ encoding: CFStringEncoding)\n -> UnsafePointer<CChar>? {\n return Sym.CFStringGetCStringPtr(s, encoding)\n}\n\ninternal func CFStringGetCharactersPtr(_ s: CFString)\n -> UnsafePointer<UniChar>? {\n return Sym.CFStringGetCharactersPtr(s);\n}\n\ninternal func CFStringGetBytes(_ s: CFString,\n _ range: CFRange,\n _ encoding: CFStringEncoding,\n _ lossByte: UInt8,\n _ isExternalRepresentation: Bool,\n _ buffer: UnsafeMutableRawPointer?,\n _ maxBufLen: CFIndex,\n _ usedBufLen: UnsafeMutablePointer<CFIndex>?)\n -> CFIndex {\n return Sym.CFStringGetBytes(s, range, encoding, lossByte,\n isExternalRepresentation, buffer, maxBufLen,\n usedBufLen)\n}\n\n// .. Crash Reporter support ...................................................\n\n// We can't import swiftFoundation here, so there's no automatic bridging for\n// CFString. As a result, we need to do the dance manually.\n\nprivate func toCFString(_ s: String) -> CFString! {\n var s = s\n return s.withUTF8 {\n return CFStringCreateWithBytes(nil,\n $0.baseAddress,\n $0.count,\n CFStringBuiltInEncodings.UTF8.rawValue,\n false)\n }\n}\n\nprivate func fromCFString(_ cf: CFString) -> String {\n let length = CFStringGetLength(cf)\n if length == 0 {\n return ""\n }\n\n if let ptr = CFStringGetCStringPtr(cf,\n CFStringBuiltInEncodings.ASCII.rawValue) {\n let buffer = UnsafeRawBufferPointer(start: ptr, count: length)\n return String(decoding: buffer, as: UTF8.self)\n } else if let ptr = CFStringGetCharactersPtr(cf) {\n let buffer = UnsafeBufferPointer(start: ptr, count: length)\n return String(decoding: buffer, as: UTF16.self)\n } else if let ptr = CFStringGetCStringPtr(cf,\n CFStringBuiltInEncodings.UTF8.rawValue) {\n let buffer = UnsafeRawBufferPointer(start: ptr, count: length)\n return String(decoding: buffer, as: UTF8.self)\n } else {\n var byteLen = CFIndex(0)\n\n _ = CFStringGetBytes(cf,\n CFRangeMake(0, length),\n CFStringBuiltInEncodings.UTF8.rawValue,\n 0,\n false,\n nil,\n 0,\n &byteLen)\n\n let buffer = UnsafeMutableBufferPointer<UInt8>.allocate(capacity: byteLen)\n defer {\n buffer.deallocate()\n }\n\n _ = CFStringGetBytes(cf, CFRangeMake(0, length),\n CFStringBuiltInEncodings.UTF8.rawValue,\n 0, false, buffer.baseAddress, buffer.count, nil)\n\n return String(decoding: buffer, as: UTF8.self)\n }\n}\n\nfunc CRCopySanitizedPath(_ path: String, _ options: Int) -> String {\n return fromCFString(Sym.CRCopySanitizedPath(toCFString(path), CFIndex(options)))\n}\n\n// .. Base functionality .......................................................\n\nfunc CSRetain(_ obj: CSTypeRef) -> CSTypeRef {\n return Sym.CSRetain(obj)\n}\n\nfunc CSRelease(_ obj: CSTypeRef) {\n Sym.CSRelease(obj)\n}\n\nfunc CSEqual(_ a: CSTypeRef, _ b: CSTypeRef) -> Bool {\n return Sym.CSEqual(a, b)\n}\n\nfunc CSIsNull(_ obj: CSTypeRef) -> Bool {\n return Sym.CSIsNull(obj)\n}\n\n// .. CSSymbolicator ...........................................................\n\nlet kCSSymbolicatorDisallowDaemonCommunication = UInt32(0x00000800)\n\nstruct BinaryRelocationInformation {\n var base: vm_address_t\n var extent: vm_address_t\n var name: String\n}\n\nstruct BinaryImageInformation {\n var base: vm_address_t\n var extent: vm_address_t\n var uuid: CFUUIDBytes\n var arch: CSArchitecture\n var path: String\n var relocations: [BinaryRelocationInformation]\n var flags: UInt32\n}\n\nfunc CSSymbolicatorCreateWithBinaryImageList(\n _ imageInfo: [BinaryImageInformation],\n _ flags: UInt32,\n _ notificationBlock: CSNotificationBlock?) -> CSSymbolicatorRef {\n\n // Convert the Swifty types above to suitable input for the C API\n var pathBuf: [UInt8] = []\n let imageList = UnsafeMutableBufferPointer<CSBinaryImageInformation>.allocate(capacity: imageInfo.count)\n defer {\n imageList.deallocate()\n }\n\n var totalRelocations = 0\n for image in imageInfo {\n totalRelocations += image.relocations.count\n\n pathBuf.insert(contentsOf: image.path.utf8, at: pathBuf.count)\n pathBuf.append(0)\n }\n\n let relocationList = UnsafeMutableBufferPointer<CSBinaryRelocationInformation>.allocate(capacity: totalRelocations)\n defer {\n relocationList.deallocate()\n }\n\n return pathBuf.withUnsafeBufferPointer {\n $0.withMemoryRebound(to: CChar.self) { pathData in\n var pathPtr = pathData.baseAddress!\n var relocationPtr = relocationList.baseAddress!\n\n for (n, image) in imageInfo.enumerated() {\n imageList[n].base = image.base\n imageList[n].extent = image.extent\n imageList[n].uuid = image.uuid\n imageList[n].arch = image.arch\n imageList[n].path = pathPtr\n imageList[n].relocations = relocationPtr\n imageList[n].relocationCount = UInt32(image.relocations.count)\n imageList[n].flags = image.flags\n\n pathPtr += strlen(pathPtr) + 1\n\n for relocation in image.relocations {\n relocationPtr.pointee.base = relocation.base\n relocationPtr.pointee.extent = relocation.extent\n withUnsafeMutablePointer(to: &relocationPtr.pointee.name) {\n $0.withMemoryRebound(to: CChar.self, capacity: 17) { buf in\n var utf8Iterator = relocation.name.utf8.makeIterator()\n var ndx = 0\n while let ch = utf8Iterator.next(), ndx < 16 {\n buf[ndx] = CChar(bitPattern: ch)\n ndx += 1\n }\n buf[ndx] = 0\n }\n }\n\n relocationPtr += 1\n }\n }\n\n return Sym.CSSymbolicatorCreateWithBinaryImageList(\n imageList.baseAddress!,\n UInt32(imageList.count),\n flags,\n notificationBlock\n )\n }\n }\n}\n\nfunc CSSymbolicatorGetSymbolOwnerWithAddressAtTime(\n _ symbolicator: CSSymbolicatorRef,\n _ addr: vm_address_t,\n _ time: CSMachineTime\n) -> CSSymbolOwnerRef {\n return Sym.CSSymbolicatorGetSymbolOwnerWithAddressAtTime(symbolicator,\n addr, time)\n}\n\nfunc CSSymbolicatorForeachSymbolOwnerAtTime(\n _ symbolicator: CSSymbolicatorRef,\n _ time: CSMachineTime,\n _ symbolIterator: (CSSymbolOwnerRef) -> Void\n ) -> UInt {\n return Sym.CSSymbolicatorForeachSymbolOwnerAtTime(symbolicator, time,\n symbolIterator)\n}\n\n// .. CSSymbolOwner ............................................................\n\nfunc CSSymbolOwnerGetName(_ sym: CSTypeRef) -> String? {\n Sym.CSSymbolOwnerGetName(sym)\n .map(String.init(cString:))\n}\n\nfunc CSSymbolOwnerGetSymbolWithAddress(\n _ owner: CSSymbolOwnerRef,\n _ address: vm_address_t\n) -> CSSymbolRef {\n return Sym.CSSymbolOwnerGetSymbolWithAddress(owner, address)\n}\n\nfunc CSSymbolOwnerGetSourceInfoWithAddress(\n _ owner: CSSymbolOwnerRef,\n _ address: vm_address_t\n) -> CSSourceInfoRef {\n return Sym.CSSymbolOwnerGetSourceInfoWithAddress(owner, address)\n}\n\nfunc CSSymbolOwnerForEachStackFrameAtAddress(\n _ owner: CSSymbolOwnerRef,\n _ address: vm_address_t,\n _ iterator: CSStackFrameIterator\n) -> UInt {\n return Sym.CSSymbolOwnerForEachStackFrameAtAddress(owner, address, iterator)\n}\n\nfunc CSSymbolOwnerGetBaseAddress(\n _ owner: CSSymbolOwnerRef\n) -> vm_address_t {\n return Sym.CSSymbolOwnerGetBaseAddress(owner)\n}\n\n// .. CSSymbol .................................................................\n\nfunc CSSymbolGetRange(_ symbol: CSSymbolRef) -> CSRange {\n return Sym.CSSymbolGetRange(symbol)\n}\n\nfunc CSSymbolGetName(_ symbol: CSSymbolRef) -> String? {\n return Sym.CSSymbolGetName(symbol).map{ String(cString: $0) }\n}\n\nfunc CSSymbolGetMangledName(_ symbol: CSSymbolRef) -> String? {\n return Sym.CSSymbolGetMangledName(symbol).map{ String(cString: $0) }\n}\n\n// .. CSSourceInfo .............................................................\n\nfunc CSSourceInfoGetPath(_ sourceInfo: CSSourceInfoRef) -> String? {\n return Sym.CSSourceInfoGetPath(sourceInfo).map{ String(cString: $0) }\n}\n\nfunc CSSourceInfoGetLineNumber(_ sourceInfo: CSSourceInfoRef) -> UInt32 {\n return Sym.CSSourceInfoGetLineNumber(sourceInfo)\n}\n\nfunc CSSourceInfoGetColumn(_ sourceInfo: CSSourceInfoRef) -> UInt32 {\n return Sym.CSSourceInfoGetColumn(sourceInfo)\n}\n\n#endif // os(Darwin)\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_CoreSymbolication.swift
cpp_apple_swift_stdlib_public_RuntimeModule_CoreSymbolication.swift
Swift
15,923
0.95
0.031746
0.100543
python-kit
835
2025-04-21T10:39:49.955713
GPL-3.0
false
154ae001473fc9ad9dce49bf2cfbb2d9
//===--- Dwarf.swift - DWARF support for Swift ----------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Defines various DWARF structures and provides types for working with\n// DWARF data on disk and in memory.\n//\n//===----------------------------------------------------------------------===//\n\nimport Swift\n\ninternal import BacktracingImpl.ImageFormats.Dwarf\n\n// .. Use *our* Dwarf definitions ..............................................\n\n// To avoid confusion with other similar sets of definitions, we've put ours\n// into a C++ wrapper, which means we need aliases here.\n\ntypealias Dwarf_Byte = swift.runtime.Dwarf_Byte\ntypealias Dwarf_Half = swift.runtime.Dwarf_Half\ntypealias Dwarf_Word = swift.runtime.Dwarf_Word\ntypealias Dwarf_Xword = swift.runtime.Dwarf_Xword\ntypealias Dwarf_Sbyte = swift.runtime.Dwarf_Sbyte\ntypealias Dwarf_Sword = swift.runtime.Dwarf_Sword\ntypealias Dwarf_Sxword = swift.runtime.Dwarf_Sxword\n\ntypealias Dwarf32_Offset = swift.runtime.Dwarf32_Offset\ntypealias Dwarf32_Size = swift.runtime.Dwarf32_Size\ntypealias Dwarf32_Length = swift.runtime.Dwarf32_Length\n\ntypealias Dwarf64_Offset = swift.runtime.Dwarf64_Offset\ntypealias Dwarf64_Size = swift.runtime.Dwarf64_Size\ntypealias Dwarf64_Length = swift.runtime.Dwarf64_Length\n\ntypealias Dwarf_UnitType = swift.runtime.Dwarf_UnitType\ntypealias Dwarf_Tag = swift.runtime.Dwarf_Tag\ntypealias Dwarf_ChildDetermination = swift.runtime.Dwarf_ChildDetermination\ntypealias Dwarf_Attribute = swift.runtime.Dwarf_Attribute\ntypealias Dwarf_Form = swift.runtime.Dwarf_Form\n\nlet DW_OP_addr = swift.runtime.DW_OP_addr\nlet DW_OP_deref = swift.runtime.DW_OP_deref\nlet DW_OP_const1u = swift.runtime.DW_OP_const1u\nlet DW_OP_const1s = swift.runtime.DW_OP_const1s\nlet DW_OP_const2u = swift.runtime.DW_OP_const2u\nlet DW_OP_const2s = swift.runtime.DW_OP_const2s\nlet DW_OP_const4u = swift.runtime.DW_OP_const4u\nlet DW_OP_const4s = swift.runtime.DW_OP_const4s\nlet DW_OP_const8u = swift.runtime.DW_OP_const8u\nlet DW_OP_const8s = swift.runtime.DW_OP_const8s\nlet DW_OP_constu = swift.runtime.DW_OP_constu\nlet DW_OP_consts = swift.runtime.DW_OP_consts\nlet DW_OP_dup = swift.runtime.DW_OP_dup\nlet DW_OP_drop = swift.runtime.DW_OP_drop\nlet DW_OP_over = swift.runtime.DW_OP_over\nlet DW_OP_pick = swift.runtime.DW_OP_pick\nlet DW_OP_swap = swift.runtime.DW_OP_swap\nlet DW_OP_rot = swift.runtime.DW_OP_rot\nlet DW_OP_xderef = swift.runtime.DW_OP_xderef\nlet DW_OP_abs = swift.runtime.DW_OP_abs\nlet DW_OP_and = swift.runtime.DW_OP_and\nlet DW_OP_div = swift.runtime.DW_OP_div\nlet DW_OP_minus = swift.runtime.DW_OP_minus\nlet DW_OP_mod = swift.runtime.DW_OP_mod\nlet DW_OP_mul = swift.runtime.DW_OP_mul\nlet DW_OP_neg = swift.runtime.DW_OP_neg\nlet DW_OP_not = swift.runtime.DW_OP_not\nlet DW_OP_or = swift.runtime.DW_OP_or\nlet DW_OP_plus = swift.runtime.DW_OP_plus\nlet DW_OP_plus_uconst = swift.runtime.DW_OP_plus_uconst\nlet DW_OP_shl = swift.runtime.DW_OP_shl\nlet DW_OP_shr = swift.runtime.DW_OP_shr\nlet DW_OP_shra = swift.runtime.DW_OP_shra\nlet DW_OP_xor = swift.runtime.DW_OP_xor\nlet DW_OP_bra = swift.runtime.DW_OP_bra\nlet DW_OP_eq = swift.runtime.DW_OP_eq\nlet DW_OP_ge = swift.runtime.DW_OP_ge\nlet DW_OP_gt = swift.runtime.DW_OP_gt\nlet DW_OP_le = swift.runtime.DW_OP_le\nlet DW_OP_lt = swift.runtime.DW_OP_lt\nlet DW_OP_ne = swift.runtime.DW_OP_ne\nlet DW_OP_skip = swift.runtime.DW_OP_skip\nlet DW_OP_lit0 = swift.runtime.DW_OP_lit0\nlet DW_OP_lit1 = swift.runtime.DW_OP_lit1\nlet DW_OP_lit2 = swift.runtime.DW_OP_lit2\nlet DW_OP_lit3 = swift.runtime.DW_OP_lit3\nlet DW_OP_lit4 = swift.runtime.DW_OP_lit4\nlet DW_OP_lit5 = swift.runtime.DW_OP_lit5\nlet DW_OP_lit6 = swift.runtime.DW_OP_lit6\nlet DW_OP_lit7 = swift.runtime.DW_OP_lit7\nlet DW_OP_lit8 = swift.runtime.DW_OP_lit8\nlet DW_OP_lit9 = swift.runtime.DW_OP_lit9\nlet DW_OP_lit10 = swift.runtime.DW_OP_lit10\nlet DW_OP_lit11 = swift.runtime.DW_OP_lit11\nlet DW_OP_lit12 = swift.runtime.DW_OP_lit12\nlet DW_OP_lit13 = swift.runtime.DW_OP_lit13\nlet DW_OP_lit14 = swift.runtime.DW_OP_lit14\nlet DW_OP_lit15 = swift.runtime.DW_OP_lit15\nlet DW_OP_lit16 = swift.runtime.DW_OP_lit16\nlet DW_OP_lit17 = swift.runtime.DW_OP_lit17\nlet DW_OP_lit18 = swift.runtime.DW_OP_lit18\nlet DW_OP_lit19 = swift.runtime.DW_OP_lit19\nlet DW_OP_lit20 = swift.runtime.DW_OP_lit20\nlet DW_OP_lit21 = swift.runtime.DW_OP_lit21\nlet DW_OP_lit22 = swift.runtime.DW_OP_lit22\nlet DW_OP_lit23 = swift.runtime.DW_OP_lit23\nlet DW_OP_lit24 = swift.runtime.DW_OP_lit24\nlet DW_OP_lit25 = swift.runtime.DW_OP_lit25\nlet DW_OP_lit26 = swift.runtime.DW_OP_lit26\nlet DW_OP_lit27 = swift.runtime.DW_OP_lit27\nlet DW_OP_lit28 = swift.runtime.DW_OP_lit28\nlet DW_OP_lit29 = swift.runtime.DW_OP_lit29\nlet DW_OP_lit30 = swift.runtime.DW_OP_lit30\nlet DW_OP_lit31 = swift.runtime.DW_OP_lit31\n\nlet DW_OP_reg0 = swift.runtime.DW_OP_reg0\nlet DW_OP_reg1 = swift.runtime.DW_OP_reg1\nlet DW_OP_reg2 = swift.runtime.DW_OP_reg2\nlet DW_OP_reg3 = swift.runtime.DW_OP_reg3\nlet DW_OP_reg4 = swift.runtime.DW_OP_reg4\nlet DW_OP_reg5 = swift.runtime.DW_OP_reg5\nlet DW_OP_reg6 = swift.runtime.DW_OP_reg6\nlet DW_OP_reg7 = swift.runtime.DW_OP_reg7\nlet DW_OP_reg8 = swift.runtime.DW_OP_reg8\nlet DW_OP_reg9 = swift.runtime.DW_OP_reg9\nlet DW_OP_reg10 = swift.runtime.DW_OP_reg10\nlet DW_OP_reg11 = swift.runtime.DW_OP_reg11\nlet DW_OP_reg12 = swift.runtime.DW_OP_reg12\nlet DW_OP_reg13 = swift.runtime.DW_OP_reg13\nlet DW_OP_reg14 = swift.runtime.DW_OP_reg14\nlet DW_OP_reg15 = swift.runtime.DW_OP_reg15\nlet DW_OP_reg16 = swift.runtime.DW_OP_reg16\nlet DW_OP_reg17 = swift.runtime.DW_OP_reg17\nlet DW_OP_reg18 = swift.runtime.DW_OP_reg18\nlet DW_OP_reg19 = swift.runtime.DW_OP_reg19\nlet DW_OP_reg20 = swift.runtime.DW_OP_reg20\nlet DW_OP_reg21 = swift.runtime.DW_OP_reg21\nlet DW_OP_reg22 = swift.runtime.DW_OP_reg22\nlet DW_OP_reg23 = swift.runtime.DW_OP_reg23\nlet DW_OP_reg24 = swift.runtime.DW_OP_reg24\nlet DW_OP_reg25 = swift.runtime.DW_OP_reg25\nlet DW_OP_reg26 = swift.runtime.DW_OP_reg26\nlet DW_OP_reg27 = swift.runtime.DW_OP_reg27\nlet DW_OP_reg28 = swift.runtime.DW_OP_reg28\nlet DW_OP_reg29 = swift.runtime.DW_OP_reg29\nlet DW_OP_reg30 = swift.runtime.DW_OP_reg30\nlet DW_OP_reg31 = swift.runtime.DW_OP_reg31\n\nlet DW_OP_breg0 = swift.runtime.DW_OP_breg0\nlet DW_OP_breg1 = swift.runtime.DW_OP_breg1\nlet DW_OP_breg2 = swift.runtime.DW_OP_breg2\nlet DW_OP_breg3 = swift.runtime.DW_OP_breg3\nlet DW_OP_breg4 = swift.runtime.DW_OP_breg4\nlet DW_OP_breg5 = swift.runtime.DW_OP_breg5\nlet DW_OP_breg6 = swift.runtime.DW_OP_breg6\nlet DW_OP_breg7 = swift.runtime.DW_OP_breg7\nlet DW_OP_breg8 = swift.runtime.DW_OP_breg8\nlet DW_OP_breg9 = swift.runtime.DW_OP_breg9\nlet DW_OP_breg10 = swift.runtime.DW_OP_breg10\nlet DW_OP_breg11 = swift.runtime.DW_OP_breg11\nlet DW_OP_breg12 = swift.runtime.DW_OP_breg12\nlet DW_OP_breg13 = swift.runtime.DW_OP_breg13\nlet DW_OP_breg14 = swift.runtime.DW_OP_breg14\nlet DW_OP_breg15 = swift.runtime.DW_OP_breg15\nlet DW_OP_breg16 = swift.runtime.DW_OP_breg16\nlet DW_OP_breg17 = swift.runtime.DW_OP_breg17\nlet DW_OP_breg18 = swift.runtime.DW_OP_breg18\nlet DW_OP_breg19 = swift.runtime.DW_OP_breg19\nlet DW_OP_breg20 = swift.runtime.DW_OP_breg20\nlet DW_OP_breg21 = swift.runtime.DW_OP_breg21\nlet DW_OP_breg22 = swift.runtime.DW_OP_breg22\nlet DW_OP_breg23 = swift.runtime.DW_OP_breg23\nlet DW_OP_breg24 = swift.runtime.DW_OP_breg24\nlet DW_OP_breg25 = swift.runtime.DW_OP_breg25\nlet DW_OP_breg26 = swift.runtime.DW_OP_breg26\nlet DW_OP_breg27 = swift.runtime.DW_OP_breg27\nlet DW_OP_breg28 = swift.runtime.DW_OP_breg28\nlet DW_OP_breg29 = swift.runtime.DW_OP_breg29\nlet DW_OP_breg30 = swift.runtime.DW_OP_breg30\nlet DW_OP_breg31 = swift.runtime.DW_OP_breg31\nlet DW_OP_regx = swift.runtime.DW_OP_regx\nlet DW_OP_fbreg = swift.runtime.DW_OP_fbreg\nlet DW_OP_bregx = swift.runtime.DW_OP_bregx\nlet DW_OP_piece = swift.runtime.DW_OP_piece\nlet DW_OP_deref_size = swift.runtime.DW_OP_deref_size\nlet DW_OP_xderef_size = swift.runtime.DW_OP_xderef_size\nlet DW_OP_nop = swift.runtime.DW_OP_nop\nlet DW_OP_push_object_address = swift.runtime.DW_OP_push_object_address\nlet DW_OP_call2 = swift.runtime.DW_OP_call2\nlet DW_OP_call4 = swift.runtime.DW_OP_call4\nlet DW_OP_call_ref = swift.runtime.DW_OP_call_ref\nlet DW_OP_form_tls_address = swift.runtime.DW_OP_form_tls_address\nlet DW_OP_call_frame_cfa = swift.runtime.DW_OP_call_frame_cfa\nlet DW_OP_bit_piece = swift.runtime.DW_OP_bit_piece\nlet DW_OP_implicit_value = swift.runtime.DW_OP_implicit_value\nlet DW_OP_stack_value = swift.runtime.DW_OP_stack_value\nlet DW_OP_implicit_pointer = swift.runtime.DW_OP_implicit_pointer\nlet DW_OP_addrx = swift.runtime.DW_OP_addrx\nlet DW_OP_constx = swift.runtime.DW_OP_constx\nlet DW_OP_entry_value = swift.runtime.DW_OP_entry_value\nlet DW_OP_const_type = swift.runtime.DW_OP_const_type\nlet DW_OP_regval_type = swift.runtime.DW_OP_regval_type\nlet DW_OP_deref_type = swift.runtime.DW_OP_deref_type\nlet DW_OP_xderef_type = swift.runtime.DW_OP_xderef_type\nlet DW_OP_convert = swift.runtime.DW_OP_convert\nlet DW_OP_reinterpret = swift.runtime.DW_OP_reinterpret\nlet DW_OP_lo_user = swift.runtime.DW_OP_lo_user\nlet DW_OP_hi_user = swift.runtime.DW_OP_hi_user\n\ntypealias Dwarf_LNS_Opcode = swift.runtime.Dwarf_LNS_Opcode\ntypealias Dwarf_LNE_Opcode = swift.runtime.Dwarf_LNE_Opcode\ntypealias Dwarf_Lhdr_Format = swift.runtime.Dwarf_Lhdr_Format\n\ntypealias DWARF32_Lhdr = swift.runtime.DWARF32_Lhdr\ntypealias DWARF64_Lhdr = swift.runtime.DWARF64_Lhdr\n\nlet DW_CFA_advance_loc = swift.runtime.DW_CFA_advance_loc\nlet DW_CFA_offset = swift.runtime.DW_CFA_offset\nlet DW_CFA_restore = swift.runtime.DW_CFA_restore\nlet DW_CFA_nop = swift.runtime.DW_CFA_nop\nlet DW_CFA_set_loc = swift.runtime.DW_CFA_set_loc\nlet DW_CFA_advance_loc1 = swift.runtime.DW_CFA_advance_loc1\nlet DW_CFA_advance_loc2 = swift.runtime.DW_CFA_advance_loc2\nlet DW_CFA_advance_loc4 = swift.runtime.DW_CFA_advance_loc4\nlet DW_CFA_offset_extended = swift.runtime.DW_CFA_offset_extended\nlet DW_CFA_restore_extended = swift.runtime.DW_CFA_restore_extended\nlet DW_CFA_undefined = swift.runtime.DW_CFA_undefined\nlet DW_CFA_same_value = swift.runtime.DW_CFA_same_value\nlet DW_CFA_register = swift.runtime.DW_CFA_register\nlet DW_CFA_remember_state = swift.runtime.DW_CFA_remember_state\nlet DW_CFA_restore_state = swift.runtime.DW_CFA_restore_state\nlet DW_CFA_def_cfa = swift.runtime.DW_CFA_def_cfa\nlet DW_CFA_def_cfa_register = swift.runtime.DW_CFA_def_cfa_register\nlet DW_CFA_def_cfa_offset = swift.runtime.DW_CFA_def_cfa_offset\nlet DW_CFA_def_cfa_expression = swift.runtime.DW_CFA_def_cfa_expression\nlet DW_CFA_expression = swift.runtime.DW_CFA_expression\nlet DW_CFA_offset_extended_sf = swift.runtime.DW_CFA_offset_extended_sf\nlet DW_CFA_def_cfa_sf = swift.runtime.DW_CFA_def_cfa_sf\nlet DW_CFA_def_cfa_offset_sf = swift.runtime.DW_CFA_def_cfa_offset_sf\nlet DW_CFA_val_offset = swift.runtime.DW_CFA_val_offset\nlet DW_CFA_val_offset_sf = swift.runtime.DW_CFA_val_offset_sf\nlet DW_CFA_val_expression = swift.runtime.DW_CFA_val_expression\nlet DW_CFA_lo_user = swift.runtime.DW_CFA_lo_user\nlet DW_CFA_hi_user = swift.runtime.DW_CFA_hi_user\n\ntypealias Dwarf_RLE_Entry = swift.runtime.Dwarf_RLE_Entry\ntypealias Dwarf32_CIEHdr = swift.runtime.Dwarf32_CIEHdr\ntypealias Dwarf64_CIEHdr = swift.runtime.Dwarf64_CIEHdr\ntypealias Dwarf32_FDEHdr = swift.runtime.Dwarf32_FDEHdr\n\ntypealias EHFrameHdr = swift.runtime.EHFrameHdr\ntypealias EHFrameEncoding = swift.runtime.EHFrameEncoding\n\nlet DW_EH_PE_omit = swift.runtime.DW_EH_PE_omit\nlet DW_EH_PE_uleb128 = swift.runtime.DW_EH_PE_uleb128\nlet DW_EH_PE_udata2 = swift.runtime.DW_EH_PE_udata2\nlet DW_EH_PE_udata4 = swift.runtime.DW_EH_PE_udata4\nlet DW_EH_PE_udata8 = swift.runtime.DW_EH_PE_udata8\nlet DW_EH_PE_sleb128 = swift.runtime.DW_EH_PE_sleb128\nlet DW_EH_PE_sdata2 = swift.runtime.DW_EH_PE_sdata2\nlet DW_EH_PE_sdata4 = swift.runtime.DW_EH_PE_sdata4\nlet DW_EH_PE_sdata8 = swift.runtime.DW_EH_PE_sdata8\nlet DW_EH_PE_absptr = swift.runtime.DW_EH_PE_absptr\nlet DW_EH_PE_pcrel = swift.runtime.DW_EH_PE_pcrel\nlet DW_EH_PE_datarel = swift.runtime.DW_EH_PE_datarel\n\n// .. Dwarf specific errors ....................................................\n\nprivate enum DwarfError: Error {\n case noDebugInformation\n case unsupportedVersion(Int)\n case unknownEHValueEncoding\n case unknownEHOffsetEncoding\n case badAttribute(UInt64)\n case badForm(UInt64)\n case badTag(UInt64)\n case badLength(UInt32)\n case badAddressSize(Int)\n case badLineContentType(UInt64)\n case badString\n case missingAbbrev(UInt64)\n case doubleIndirectForm\n case unknownForm(Dwarf_Form)\n case missingBaseOffset\n case missingAddrSection\n case missingStrSection\n case missingLineStrSection\n case missingStrOffsetsSection\n case missingAddrBase\n case missingStrOffsetsBase\n case missingLocListsBase\n case unspecifiedAddressSize\n}\n\n// .. Dwarf utilities for ImageSource ..........................................\n\nextension ImageSource {\n\n func fetchULEB128(from a: Address) throws -> (Address, UInt64) {\n var addr = a\n var shift = 0\n var value: UInt64 = 0\n while true {\n let byte = try fetch(from: addr, as: UInt8.self)\n addr += 1\n value |= UInt64(byte & 0x7f) << shift\n if (byte & 0x80) == 0 {\n break\n }\n shift += 7\n }\n\n return (addr, value)\n }\n\n func fetchSLEB128(from a: Address) throws -> (Address, Int64) {\n var addr = a\n var shift = 0\n var sign: UInt8 = 0\n var value: Int64 = 0\n\n while true {\n let byte = try fetch(from: addr, as: UInt8.self)\n addr += 1\n value |= Int64(byte & 0x7f) << shift\n shift += 7\n sign = byte & 0x40\n if (byte & 0x80) == 0 {\n break\n }\n }\n\n if shift < 64 && sign != 0 {\n value |= -(1 << shift)\n }\n\n return (addr, value)\n }\n\n func fetchEHValue(from a: Address, with encoding: EHFrameEncoding,\n pc: Address = 0, data: Address = 0, shouldSwap: Bool = false) throws\n -> (Address, UInt64)? {\n\n func maybeSwap<T: FixedWidthInteger>(_ x: T) -> T {\n if shouldSwap {\n return x.byteSwapped\n }\n return x\n }\n\n let valueEnc = EHFrameEncoding(encoding & 0x0f)\n var value: UInt64 = 0\n var addr = a\n\n switch valueEnc {\n case DW_EH_PE_omit:\n return nil\n case DW_EH_PE_uleb128:\n (addr, value) = try fetchULEB128(from: addr)\n case DW_EH_PE_udata2:\n let u2 = maybeSwap(try fetch(from: addr, as: UInt16.self))\n value = UInt64(u2)\n addr += 2\n case DW_EH_PE_udata4:\n let u4 = maybeSwap(try fetch(from: addr, as: UInt32.self))\n value = UInt64(u4)\n addr += 4\n case DW_EH_PE_udata8:\n let u8 = maybeSwap(try fetch(from: addr, as: UInt64.self))\n value = u8\n addr += 8\n case DW_EH_PE_sleb128:\n let (newAddr, newValue) = try fetchSLEB128(from: addr)\n value = UInt64(bitPattern: newValue)\n addr = newAddr\n case DW_EH_PE_sdata2:\n let s2 = maybeSwap(try fetch(from: addr, as: Int16.self))\n value = UInt64(bitPattern: Int64(s2))\n addr += 2\n case DW_EH_PE_sdata4:\n let s4 = maybeSwap(try fetch(from: addr, as: Int32.self))\n value = UInt64(bitPattern: Int64(s4))\n addr += 4\n case DW_EH_PE_sdata8:\n let s8 = maybeSwap(try fetch(from: addr, as: Int64.self))\n value = UInt64(bitPattern: s8)\n addr += 8\n default:\n throw DwarfError.unknownEHValueEncoding\n }\n\n let offsetEnc = EHFrameEncoding(encoding & 0xf0)\n\n switch offsetEnc {\n case DW_EH_PE_absptr:\n return (addr, value)\n case DW_EH_PE_pcrel:\n return (addr, UInt64(pc) &+ value)\n case DW_EH_PE_datarel:\n return (addr, UInt64(data) &+ value)\n default:\n throw DwarfError.unknownEHOffsetEncoding\n }\n }\n\n func fetchDwarfLength(from addr: Address) throws\n -> (length: UInt64, isDwarf64: Bool) {\n\n let len32 = try fetch(from: addr, as: UInt32.self)\n if len32 < 0xfffffff0 {\n return (length: UInt64(len32), isDwarf64: false)\n } else if len32 < 0xffffffff {\n throw DwarfError.badLength(len32)\n } else {\n let len64 = try fetch(from: addr + 4, as: UInt64.self)\n return (length: len64, isDwarf64: true)\n }\n }\n}\n\n// .. Dwarf utilities for ImageSourceCursor .....................................\n\nextension ImageSourceCursor {\n\n mutating func readULEB128() throws -> UInt64 {\n let (next, result) = try source.fetchULEB128(from: pos)\n pos = next\n return result\n }\n\n mutating func readSLEB128() throws -> Int64 {\n let (next, result) = try source.fetchSLEB128(from: pos)\n pos = next\n return result\n }\n\n mutating func readEHValue(\n with encoding: EHFrameEncoding,\n pc: Address = 0,\n data: Address = 0,\n shouldSwap: Bool = false\n ) throws -> UInt64? {\n guard let (next, result)\n = try source.fetchEHValue(from: pos,\n with: encoding,\n pc: pc,\n data: data,\n shouldSwap: shouldSwap) else {\n return nil\n }\n\n pos = next\n return result\n }\n\n mutating func readDwarfLength() throws -> (length: UInt64, isDwarf64: Bool) {\n let result = try source.fetchDwarfLength(from: pos)\n pos += result.isDwarf64 ? 12 : 4\n return result\n }\n\n}\n\n// .. DwarfReader ...............................................................\n\nenum DwarfSection {\n case debugAbbrev\n case debugAddr\n case debugARanges\n case debugFrame\n case debugInfo\n case debugLine\n case debugLineStr\n case debugLoc\n case debugLocLists\n case debugMacInfo\n case debugMacro\n case debugNames\n case debugPubNames\n case debugPubTypes\n case debugRanges\n case debugRngLists\n case debugStr\n case debugStrOffsets\n case debugSup\n case debugTypes\n case debugCuIndex\n case debugTuIndex\n}\n\nprotocol DwarfSource {\n\n func getDwarfSection(_ section: DwarfSection) -> ImageSource?\n\n}\n\nstruct DwarfReader<S: DwarfSource & AnyObject> {\n\n typealias Source = S\n typealias Address = UInt64\n typealias Size = UInt64\n struct Bounds {\n var base: Address\n var size: Size\n var end: Address { return base + size }\n }\n\n unowned var source: Source\n\n struct AbbrevInfo {\n var tag: Dwarf_Tag\n var hasChildren: Bool\n var attributes: [(Dwarf_Attribute, Dwarf_Form, Int64?)]\n }\n\n var infoSection: ImageSource\n var abbrevSection: ImageSource\n var lineSection: ImageSource?\n var addrSection: ImageSource?\n var strSection: ImageSource?\n var lineStrSection: ImageSource?\n var strOffsetsSection: ImageSource?\n var rangesSection: ImageSource?\n var shouldSwap: Bool\n\n typealias DwarfAbbrev = UInt64\n\n struct Unit {\n var baseOffset: Address\n var version: Int\n var isDwarf64: Bool\n var unitType: Dwarf_UnitType\n var addressSize: Int\n var abbrevOffset: Address\n var dieBounds: Bounds\n\n var lowPC: Address?\n\n var lineBase: UInt64?\n var addrBase: UInt64?\n var strOffsetsBase: UInt64?\n var loclistsBase: UInt64?\n\n var abbrevs: [DwarfAbbrev: AbbrevInfo]\n\n var tag: Dwarf_Tag\n var attributes: [Dwarf_Attribute:DwarfValue] = [:]\n }\n\n var units: [Unit] = []\n\n var lineNumberInfo: [DwarfLineNumberInfo] = []\n\n struct RangeListInfo {\n var length: UInt64\n var isDwarf64: Bool\n var version: Int\n var addressSize: Int\n var segmentSelectorSize: Int\n var offsetEntryCount: Int\n var offsetEntryBase: Address\n }\n\n var rangeListInfo: RangeListInfo?\n\n @_specialize(kind: full, where S == Elf32Image)\n @_specialize(kind: full, where S == Elf64Image)\n init(source: Source, shouldSwap: Bool = false) throws {\n // ###TODO: This should be optional, because we can have just line number\n // information. We should test that, too.\n guard let abbrevSection = source.getDwarfSection(.debugAbbrev),\n let infoSection = source.getDwarfSection(.debugInfo) else {\n throw DwarfError.noDebugInformation\n }\n\n self.infoSection = infoSection\n self.abbrevSection = abbrevSection\n\n addrSection = source.getDwarfSection(.debugAddr)\n strSection = source.getDwarfSection(.debugStr)\n lineSection = source.getDwarfSection(.debugLine)\n lineStrSection = source.getDwarfSection(.debugLineStr)\n strOffsetsSection = source.getDwarfSection(.debugStrOffsets)\n rangesSection = source.getDwarfSection(.debugRanges)\n\n self.source = source\n self.shouldSwap = shouldSwap\n self.lineNumberInfo = try readLineNumberInfo()\n self.units = try readUnits()\n\n // On DWARF 4 and earlier, we need to fix up a couple of things in the\n // line number info; these are explicitly included in DWARF 5 so that\n // we can strip everything except line number information.\n for n in 0..<lineNumberInfo.count {\n if lineNumberInfo[n].version >= 5 {\n continue\n }\n\n for unit in self.units {\n if let lineBase = unit.lineBase,\n lineNumberInfo[n].baseOffset == lineBase {\n var filename = "<unknown>"\n if let nameVal = unit.attributes[.DW_AT_name],\n case let .string(theName) = nameVal {\n filename = theName\n }\n var dirname = "."\n if let dirVal = unit.attributes[.DW_AT_comp_dir],\n case let .string(theDir) = dirVal {\n dirname = theDir\n }\n\n lineNumberInfo[n].directories[0] = dirname\n lineNumberInfo[n].files[0] = DwarfFileInfo(\n path: filename,\n directoryIndex: 0,\n timestamp: nil,\n size: nil,\n md5sum: nil\n )\n lineNumberInfo[n].addressSize = unit.addressSize\n break\n }\n }\n }\n }\n\n private func maybeSwap<T: FixedWidthInteger>(_ x: T) -> T {\n if shouldSwap {\n return x.byteSwapped\n } else {\n return x\n }\n }\n\n private func readUnits() throws -> [Unit] {\n let end = infoSection.bytes.count\n var units: [Unit] = []\n var cursor = ImageSourceCursor(source: infoSection)\n\n while cursor.pos < end {\n // See 7.5.1.1 Full and Partial Compilation Unit Headers\n let base = cursor.pos\n\n // .1 unit_length\n let (length, dwarf64) = try cursor.readDwarfLength()\n let next = cursor.pos + length\n\n // .2 version\n let version = Int(maybeSwap(try cursor.read(as: Dwarf_Half.self)))\n\n if version < 3 || version > 5 {\n throw DwarfError.unsupportedVersion(version)\n }\n\n var unitType: Dwarf_UnitType = .DW_UT_unknown\n let addressSize: Int\n let abbrevOffset: Address\n let dieBounds: Bounds\n\n if dwarf64 {\n if version >= 3 && version <= 4 {\n // .3 debug_abbrev_offset\n abbrevOffset = Address(maybeSwap(try cursor.read(as: Dwarf_Xword.self)))\n\n // .4 address_size\n addressSize = Int(try cursor.read(as: Dwarf_Byte.self))\n } else if version == 5 {\n // .3 unit_type\n unitType = try cursor.read(as: Dwarf_UnitType.self)\n\n // .4 address_size\n addressSize = Int(try cursor.read(as: Dwarf_Byte.self))\n\n // .5 debug_abbrev_offset\n abbrevOffset = Address(maybeSwap(try cursor.read(as: Dwarf_Xword.self)))\n } else {\n throw DwarfError.unsupportedVersion(version)\n }\n\n dieBounds = Bounds(base: cursor.pos, size: next - cursor.pos)\n } else {\n if version >= 3 && version <= 4 {\n // .3 debug_abbrev_offset\n abbrevOffset = Address(maybeSwap(try cursor.read(as: Dwarf_Word.self)))\n\n // .4 address_size\n addressSize = Int(try cursor.read(as: Dwarf_Byte.self))\n } else if version == 5 {\n // .3 unit_type\n unitType = try cursor.read(as: Dwarf_UnitType.self)\n\n // .4 address_size\n addressSize = Int(try cursor.read(as: Dwarf_Byte.self))\n\n // .5 debug_abbrev_offset\n abbrevOffset = Address(maybeSwap(try cursor.read(as: Dwarf_Word.self)))\n } else {\n throw DwarfError.unsupportedVersion(version)\n }\n\n dieBounds = Bounds(base: cursor.pos, size: next - cursor.pos)\n }\n\n if unitType == .DW_UT_skeleton || unitType == .DW_UT_split_compile {\n // .6 dwo_id\n let _ = try cursor.read(as: UInt64.self)\n } else if unitType == .DW_UT_type || unitType == .DW_UT_split_type {\n // .6 type_signature\n let _ = try cursor.read(as: UInt64.self)\n\n // .7 type_offset\n if dwarf64 {\n let _ = try cursor.read(as: UInt64.self)\n } else {\n let _ = try cursor.read(as: UInt32.self)\n }\n }\n\n let abbrevs = try readAbbrevs(at: abbrevOffset)\n\n let abbrev = try cursor.readULEB128()\n\n guard let abbrevInfo = abbrevs[abbrev] else {\n throw DwarfError.missingAbbrev(abbrev)\n }\n let tag = abbrevInfo.tag\n\n var unit = Unit(baseOffset: base,\n version: Int(version),\n isDwarf64: dwarf64,\n unitType: unitType,\n addressSize: Int(addressSize),\n abbrevOffset: abbrevOffset,\n dieBounds: dieBounds,\n abbrevs: abbrevs,\n tag: tag)\n\n let attrPos = cursor.pos\n let firstPass = try readDieAttributes(\n at: &cursor,\n unit: unit,\n abbrevInfo: abbrevInfo,\n shouldFetchIndirect: false\n )\n\n if let value = firstPass[.DW_AT_addr_base],\n case let .sectionOffset(offset) = value {\n unit.addrBase = offset\n }\n if let value = firstPass[.DW_AT_str_offsets_base],\n case let .sectionOffset(offset) = value {\n unit.strOffsetsBase = offset\n }\n if let value = firstPass[.DW_AT_loclists_base],\n case let .sectionOffset(offset) = value {\n unit.loclistsBase = offset\n }\n if let value = firstPass[.DW_AT_stmt_list],\n case let .sectionOffset(offset) = value {\n unit.lineBase = offset\n }\n if let value = firstPass[.DW_AT_low_pc],\n case let .address(lowPC) = value {\n unit.lowPC = lowPC\n }\n\n // Re-read the attributes, with indirect fetching enabled;\n // we can't do this in one step because attributes might be using\n // indirections based on the base attributes, and those can come\n // after the data needed to decode them.\n cursor.pos = attrPos\n\n let attributes = try readDieAttributes(\n at: &cursor,\n unit: unit,\n abbrevInfo: abbrevInfo,\n shouldFetchIndirect: true\n )\n\n unit.attributes = attributes\n\n units.append(unit)\n\n cursor.pos = next\n }\n\n return units\n }\n\n private func readLineNumberInfo() throws -> [DwarfLineNumberInfo] {\n guard let lineSection = lineSection else {\n return []\n }\n\n let end = lineSection.bytes.count\n var result: [DwarfLineNumberInfo] = []\n var cursor = ImageSourceCursor(source: lineSection, offset: 0)\n\n while cursor.pos < end {\n // 6.2.4 The Line Number Program Header\n\n // .1 unit_length\n let baseOffset = cursor.pos\n let (length, dwarf64) = try cursor.readDwarfLength()\n if length == 0 {\n break\n }\n\n let nextOffset = cursor.pos + length\n\n // .2 version\n let version = Int(maybeSwap(try cursor.read(as: Dwarf_Half.self)))\n\n if version < 3 || version > 5 {\n cursor.pos = nextOffset\n continue\n }\n\n var addressSize: Int? = nil\n var segmentSelectorSize: Int? = nil\n\n if version == 5 {\n // .3 address_size\n addressSize = Int(try cursor.read(as: Dwarf_Byte.self))\n\n // .4 segment_selector_size\n segmentSelectorSize = Int(try cursor.read(as: Dwarf_Byte.self))\n }\n\n // .5 header_length\n let headerLength: UInt64\n if dwarf64 {\n headerLength = maybeSwap(try cursor.read(as: Dwarf_Xword.self))\n } else {\n headerLength = UInt64(maybeSwap(try cursor.read(as: Dwarf_Word.self)))\n }\n\n // .6 minimum_instruction_length\n let minimumInstructionLength = UInt(try cursor.read(as: Dwarf_Byte.self))\n\n // .7 maximum_operations_per_instruction\n let maximumOpsPerInstruction = UInt(try cursor.read(as: Dwarf_Byte.self))\n\n // .8 default_is_stmt\n let defaultIsStmt = try cursor.read(as: Dwarf_Byte.self) != 0\n\n // .9 line_base\n let lineBase = try cursor.read(as: Dwarf_Sbyte.self)\n\n // .10 line_range\n let lineRange = try cursor.read(as: Dwarf_Byte.self)\n\n // .11 opcode_base\n let opcodeBase = try cursor.read(as: Dwarf_Byte.self)\n\n // .12 standard_opcode_lengths\n var standardOpcodeLengths: [UInt64] = [0]\n for _ in 1..<Int(opcodeBase) {\n let length = try cursor.readULEB128()\n standardOpcodeLengths.append(length)\n }\n\n var dirNames: [String] = []\n var fileInfo: [DwarfFileInfo] = []\n\n if version == 3 || version == 4 {\n // .11 include_directories\n\n // Prior to version 5, the compilation directory is not included; put\n // a placeholder here for now, which we'll fix later.\n dirNames.append(".")\n\n while true {\n guard let path = try cursor.readString() else {\n throw DwarfError.badString\n }\n\n if path == "" {\n break\n }\n\n dirNames.append(path)\n }\n\n // .12 file_names\n\n // Prior to version 5, the compilation unit's filename is not included;\n // put a placeholder here for now, which we'll fix up later.\n fileInfo.append(DwarfFileInfo(\n path: "<unknown>",\n directoryIndex: 0,\n timestamp: nil,\n size: nil,\n md5sum: nil))\n\n while true {\n guard let path = try cursor.readString() else {\n throw DwarfError.badString\n }\n\n if path == "" {\n break\n }\n\n let dirIndex = try cursor.readULEB128()\n let timestamp = try cursor.readULEB128()\n let size = try cursor.readULEB128()\n\n fileInfo.append(DwarfFileInfo(\n path: path,\n directoryIndex: Int(dirIndex),\n timestamp: timestamp != 0 ? Int(timestamp) : nil,\n size: size != 0 ? size : nil,\n md5sum: nil))\n }\n } else if version == 5 {\n // .13/.14 directory_entry_format\n var dirEntryFormat: [(Dwarf_Lhdr_Format, Dwarf_Form)] = []\n let dirEntryFormatCount = Int(try cursor.read(as: Dwarf_Byte.self))\n for _ in 0..<dirEntryFormatCount {\n let rawType = try cursor.readULEB128()\n let rawForm = try cursor.readULEB128()\n\n guard let halfType = Dwarf_Half(exactly: rawType),\n let type = Dwarf_Lhdr_Format(rawValue: halfType) else {\n throw DwarfError.badLineContentType(rawType)\n }\n guard let byteForm = Dwarf_Byte(exactly: rawForm),\n let form = Dwarf_Form(rawValue: byteForm) else {\n throw DwarfError.badForm(rawForm)\n }\n\n dirEntryFormat.append((type, form))\n }\n\n // .15 directories_count\n let dirCount = Int(try cursor.readULEB128())\n\n // .16 directories\n for _ in 0..<dirCount {\n var attributes: [Dwarf_Lhdr_Format: DwarfValue] = [:]\n for (type, form) in dirEntryFormat {\n attributes[type] = try read(form: form,\n at: &cursor,\n addressSize: addressSize ?? 4,\n isDwarf64: dwarf64,\n unit: nil,\n shouldFetchIndirect: true)\n }\n\n if let pathVal = attributes[.DW_LNCT_path],\n case let .string(path) = pathVal {\n dirNames.append(path)\n } else {\n dirNames.append("<unknown>")\n }\n }\n\n // .17/.18 file_name_entry_format\n var fileEntryFormat: [(Dwarf_Lhdr_Format, Dwarf_Form)] = []\n let fileEntryFormatCount = Int(try cursor.read(as: Dwarf_Byte.self))\n for _ in 0..<fileEntryFormatCount {\n let rawType = try cursor.readULEB128()\n let rawForm = try cursor.readULEB128()\n\n guard let halfType = Dwarf_Half(exactly: rawType),\n let type = Dwarf_Lhdr_Format(rawValue: halfType) else {\n throw DwarfError.badLineContentType(rawType)\n }\n guard let byteForm = Dwarf_Byte(exactly: rawForm),\n let form = Dwarf_Form(rawValue: byteForm) else {\n throw DwarfError.badForm(rawForm)\n }\n\n fileEntryFormat.append((type, form))\n }\n\n // .19 file_names_count\n let fileCount = Int(try cursor.readULEB128())\n\n // .20 file_names\n for _ in 0..<fileCount {\n var attributes: [Dwarf_Lhdr_Format: DwarfValue] = [:]\n for (type, form) in fileEntryFormat {\n attributes[type] = try read(form: form,\n at: &cursor,\n addressSize: addressSize ?? 4,\n isDwarf64: dwarf64,\n unit: nil,\n shouldFetchIndirect: true)\n }\n\n let path: String\n if let pathVal = attributes[.DW_LNCT_path],\n case let .string(thePath) = pathVal {\n path = thePath\n } else {\n path = "<unknown>"\n }\n\n let dirIndex = attributes[.DW_LNCT_directory_index]?.intValue()\n let timestamp = attributes[.DW_LNCT_timestamp]?.intValue()\n let size = attributes[.DW_LNCT_size]?.uint64Value()\n let md5sum: [UInt8]?\n if let md5sumVal = attributes[.DW_LNCT_MD5],\n case let .data(theSum) = md5sumVal {\n md5sum = theSum\n } else {\n md5sum = nil\n }\n\n fileInfo.append(DwarfFileInfo(\n path: path,\n directoryIndex: dirIndex,\n timestamp: timestamp,\n size: size,\n md5sum: md5sum))\n }\n }\n\n // The actual program comes next\n let program = cursor.source[cursor.pos..<nextOffset]\n cursor.pos = nextOffset\n\n result.append(DwarfLineNumberInfo(\n baseOffset: baseOffset,\n version: version,\n addressSize: addressSize,\n selectorSize: segmentSelectorSize,\n headerLength: headerLength,\n minimumInstructionLength: minimumInstructionLength,\n maximumOpsPerInstruction: maximumOpsPerInstruction,\n defaultIsStmt: defaultIsStmt,\n lineBase: lineBase,\n lineRange: lineRange,\n opcodeBase: opcodeBase,\n standardOpcodeLengths: standardOpcodeLengths,\n directories: dirNames,\n files: fileInfo,\n program: program,\n shouldSwap: shouldSwap\n ))\n }\n\n return result\n }\n\n private func readAbbrevs(\n at offset: UInt64\n ) throws -> [DwarfAbbrev: AbbrevInfo] {\n var abbrevs: [DwarfAbbrev: AbbrevInfo] = [:]\n var cursor = ImageSourceCursor(source: abbrevSection, offset: offset)\n while true {\n let abbrev = try cursor.readULEB128()\n\n if abbrev == 0 {\n break\n }\n\n let rawTag = try cursor.readULEB128()\n\n guard let tag = Dwarf_Tag(rawValue: rawTag) else {\n throw DwarfError.badTag(rawTag)\n }\n\n let children = try cursor.read(as: Dwarf_ChildDetermination.self)\n\n // Fetch attributes\n var attributes: [(Dwarf_Attribute, Dwarf_Form, Int64?)] = []\n while true {\n let rawAttr = try cursor.readULEB128()\n let rawForm = try cursor.readULEB128()\n\n if rawAttr == 0 && rawForm == 0 {\n break\n }\n\n guard let attr = Dwarf_Attribute(rawValue: UInt32(rawAttr)) else {\n throw DwarfError.badAttribute(rawAttr)\n }\n guard let form = Dwarf_Form(rawValue: Dwarf_Byte(rawForm)) else {\n throw DwarfError.badForm(rawForm)\n }\n\n if form == .DW_FORM_implicit_const {\n let value = try cursor.readSLEB128()\n attributes.append((attr, form, value))\n } else {\n attributes.append((attr, form, nil))\n }\n }\n\n abbrevs[abbrev] = AbbrevInfo(tag: tag,\n hasChildren: children != .DW_CHILDREN_no,\n attributes: attributes)\n }\n\n return abbrevs\n }\n\n enum DwarfValue {\n case flag(Bool)\n case string(String)\n case address(UInt64)\n case integer(Int)\n case unsignedInt8(UInt8)\n case unsignedInt16(UInt16)\n case unsignedInt32(UInt32)\n case signedInt64(Int64)\n case unsignedInt64(UInt64)\n case dieOffset(UInt64)\n case data([UInt8])\n case expression([UInt8])\n case locationList(UInt64)\n case rangeList(UInt64)\n case sectionOffset(UInt64)\n case reference(UInt64)\n case signature([UInt8])\n case supplementaryReference(UInt64)\n case supplementaryString(UInt64)\n case indirectAddress(UInt64)\n case stringFromStrTab(UInt64)\n case stringFromLineStrTab(UInt64)\n case stringViaStrOffsets(UInt64)\n\n func uint64Value() -> UInt64? {\n switch self {\n case let .unsignedInt8(value): return UInt64(value)\n case let .unsignedInt16(value): return UInt64(value)\n case let .unsignedInt32(value): return UInt64(value)\n case let .unsignedInt64(value): return value\n default:\n return nil\n }\n }\n\n func intValue() -> Int? {\n switch self {\n case let .unsignedInt8(value): return Int(value)\n case let .unsignedInt16(value): return Int(value)\n case let .unsignedInt32(value): return Int(value)\n case let .unsignedInt64(value): return Int(value)\n default:\n return nil\n }\n }\n }\n\n private func threeByteToOffset(_ bytes: (UInt8, UInt8, UInt8)) -> UInt64 {\n let offset: UInt64\n #if _endian(big)\n if shouldSwap {\n offset = UInt64(bytes.0) | UInt64(bytes.1) << 8 | UInt64(bytes.2) << 16\n } else {\n offset = UInt64(bytes.2) | UInt64(bytes.1) << 8 | UInt64(bytes.0) << 16\n }\n #else\n if shouldSwap {\n offset = UInt64(bytes.2) | UInt64(bytes.1) << 8 | UInt64(bytes.0) << 16\n } else {\n offset = UInt64(bytes.0) | UInt64(bytes.1) << 8 | UInt64(bytes.2) << 16\n }\n #endif\n return offset\n }\n\n private func read(form theForm: Dwarf_Form,\n at cursor: inout ImageSourceCursor,\n addressSize: Int, isDwarf64: Bool,\n unit: Unit?,\n shouldFetchIndirect: Bool,\n constantValue: Int64? = nil) throws -> DwarfValue {\n let form: Dwarf_Form\n if theForm == .DW_FORM_indirect {\n let rawForm = try cursor.readULEB128()\n guard let theForm = Dwarf_Form(rawValue: Dwarf_Byte(rawForm)) else {\n throw DwarfError.badForm(rawForm)\n }\n form = theForm\n } else {\n form = theForm\n }\n\n switch form {\n case .DW_FORM_implicit_const:\n return .signedInt64(constantValue!)\n\n case .DW_FORM_addr:\n let address: UInt64\n switch addressSize {\n case 4:\n address = UInt64(maybeSwap(try cursor.read(as: UInt32.self)))\n case 8:\n address = maybeSwap(try cursor.read(as: UInt64.self))\n default:\n throw DwarfError.badAddressSize(addressSize)\n }\n return .address(address)\n case .DW_FORM_addrx, .DW_FORM_addrx1, .DW_FORM_addrx2,\n .DW_FORM_addrx3, .DW_FORM_addrx4:\n guard let addrSection = addrSection else {\n throw DwarfError.missingAddrSection\n }\n\n let ndx: UInt64\n switch form {\n case .DW_FORM_addrx:\n ndx = try cursor.readULEB128()\n case .DW_FORM_addrx1:\n ndx = UInt64(try cursor.read(as: UInt8.self))\n case .DW_FORM_addrx2:\n ndx = UInt64(maybeSwap(try cursor.read(as: UInt16.self)))\n case .DW_FORM_addrx3:\n let bytes = try cursor.read(as: (UInt8, UInt8, UInt8).self)\n ndx = threeByteToOffset(bytes)\n case .DW_FORM_addrx4:\n ndx = UInt64(maybeSwap(try cursor.read(as: UInt32.self)))\n default:\n fatalError("unreachable")\n }\n\n if !shouldFetchIndirect {\n return .indirectAddress(ndx)\n } else {\n guard let addrBase = unit?.addrBase else {\n throw DwarfError.missingAddrBase\n }\n\n let address: UInt64\n switch addressSize {\n case 4:\n address = UInt64(maybeSwap(\n try addrSection.fetch(from: ndx * 4 + addrBase,\n as: UInt32.self)))\n case 8:\n address = maybeSwap(try addrSection.fetch(from: ndx * 8 + addrBase,\n as: UInt64.self))\n default:\n throw DwarfError.badAddressSize(addressSize)\n }\n return .address(address)\n }\n case .DW_FORM_block:\n let length = try cursor.readULEB128()\n let bytes = try cursor.read(count: Int(length), as: UInt8.self)\n return .data(bytes)\n case .DW_FORM_block1:\n let length = try cursor.read(as: UInt8.self)\n let bytes = try cursor.read(count: Int(length), as: UInt8.self)\n return .data(bytes)\n case .DW_FORM_block2:\n let length = maybeSwap(try cursor.read(as: UInt16.self))\n let bytes = try cursor.read(count: Int(length), as: UInt8.self)\n return .data(bytes)\n case .DW_FORM_block4:\n let length = maybeSwap(try cursor.read(as: UInt32.self))\n let bytes = try cursor.read(count: Int(length), as: UInt8.self)\n return .data(bytes)\n\n case .DW_FORM_sdata:\n let data = try cursor.readSLEB128()\n return .signedInt64(data)\n\n case .DW_FORM_udata:\n let data = try cursor.readULEB128()\n return .unsignedInt64(data)\n\n case .DW_FORM_data1:\n let data = try cursor.read(as: UInt8.self)\n return .unsignedInt8(data)\n\n case .DW_FORM_data2:\n let data = maybeSwap(try cursor.read(as: UInt16.self))\n return .unsignedInt16(data)\n\n case .DW_FORM_data4:\n let data = maybeSwap(try cursor.read(as: UInt32.self))\n return .unsignedInt32(data)\n\n case .DW_FORM_data8:\n let data = maybeSwap(try cursor.read(as: UInt64.self))\n return .unsignedInt64(data)\n\n case .DW_FORM_data16:\n let data = try cursor.read(count: 16, as: UInt8.self)\n return .data(data)\n\n case .DW_FORM_exprloc:\n let length = try cursor.readULEB128()\n let bytes = try cursor.read(count: Int(length), as: UInt8.self)\n return .expression(bytes)\n\n case .DW_FORM_flag:\n let flag = try cursor.read(as: UInt8.self)\n return .flag(flag != 0)\n\n case .DW_FORM_flag_present:\n return .flag(true)\n\n case .DW_FORM_loclistx:\n let offset = try cursor.readULEB128()\n return .locationList(offset)\n\n case .DW_FORM_sec_offset:\n let offset: UInt64\n if isDwarf64 {\n offset = maybeSwap(try cursor.read(as: UInt64.self))\n } else {\n offset = UInt64(maybeSwap(try cursor.read(as: UInt32.self)))\n }\n return .sectionOffset(offset)\n\n case .DW_FORM_rnglistx:\n let offset = try cursor.readULEB128()\n return .rangeList(offset)\n\n case .DW_FORM_ref1, .DW_FORM_ref2, .DW_FORM_ref4, .DW_FORM_ref8,\n .DW_FORM_ref_udata:\n guard let baseOffset = unit?.baseOffset else {\n throw DwarfError.missingBaseOffset\n }\n\n let offset: Address\n switch form {\n case .DW_FORM_ref1:\n offset = UInt64(try cursor.read(as: UInt8.self))\n case .DW_FORM_ref2:\n offset = UInt64(maybeSwap(try cursor.read(as: UInt16.self)))\n case .DW_FORM_ref4:\n offset = UInt64(maybeSwap(try cursor.read(as: UInt32.self)))\n case .DW_FORM_ref8:\n offset = maybeSwap(try cursor.read(as: UInt64.self))\n case .DW_FORM_ref_udata:\n offset = try cursor.readULEB128()\n default:\n fatalError("unreachable")\n }\n return .reference(offset + baseOffset)\n\n case .DW_FORM_ref_addr:\n let offset: UInt64\n if isDwarf64 {\n offset = maybeSwap(try cursor.read(as: UInt64.self))\n } else {\n offset = UInt64(maybeSwap(try cursor.read(as: UInt32.self)))\n }\n return .reference(offset)\n\n case .DW_FORM_ref_sig8:\n let signature = try cursor.read(count: 8, as: UInt8.self)\n return .signature(signature)\n\n case .DW_FORM_ref_sup4:\n let offset = maybeSwap(try cursor.read(as: UInt32.self))\n return .supplementaryReference(Address(offset))\n\n case .DW_FORM_ref_sup8:\n let offset = maybeSwap(try cursor.read(as: UInt64.self))\n return .supplementaryReference(Address(offset))\n\n case .DW_FORM_string:\n guard let string = try cursor.readString() else {\n throw DwarfError.badString\n }\n return .string(string)\n\n case .DW_FORM_strp:\n guard let strSection = strSection else {\n throw DwarfError.missingStrSection\n }\n\n let offset: UInt64\n if isDwarf64 {\n offset = maybeSwap(try cursor.read(as: UInt64.self))\n } else {\n offset = UInt64(maybeSwap(try cursor.read(as: UInt32.self)))\n }\n\n if !shouldFetchIndirect {\n return .stringFromStrTab(offset)\n } else {\n guard let string = try strSection.fetchString(from: offset) else {\n throw DwarfError.badString\n }\n return .string(string)\n }\n\n case .DW_FORM_strp_sup:\n let offset: UInt64\n if isDwarf64 {\n offset = maybeSwap(try cursor.read(as: UInt64.self))\n } else {\n offset = UInt64(maybeSwap(try cursor.read(as: UInt32.self)))\n }\n return .supplementaryString(offset)\n\n case .DW_FORM_line_strp:\n guard let lineStrSection = lineStrSection else {\n throw DwarfError.missingLineStrSection\n }\n\n let offset: UInt64\n if isDwarf64 {\n offset = maybeSwap(try cursor.read(as: UInt64.self))\n } else {\n offset = UInt64(maybeSwap(try cursor.read(as: UInt32.self)))\n }\n\n if !shouldFetchIndirect {\n return .stringFromLineStrTab(offset)\n } else {\n guard let string = try lineStrSection.fetchString(from: offset) else {\n throw DwarfError.badString\n }\n return .string(string)\n }\n\n case .DW_FORM_strx,\n .DW_FORM_strx1, .DW_FORM_strx2, .DW_FORM_strx3,.DW_FORM_strx4:\n guard let strOffsetsSection = strOffsetsSection else {\n throw DwarfError.missingStrOffsetsSection\n }\n guard let strSection = strSection else {\n throw DwarfError.missingStrSection\n }\n\n let offset: UInt64\n switch form {\n case .DW_FORM_strx:\n offset = try cursor.readULEB128()\n case .DW_FORM_strx1:\n offset = UInt64(try cursor.read(as: UInt8.self))\n case .DW_FORM_strx2:\n offset = UInt64(maybeSwap(try cursor.read(as: UInt16.self)))\n case .DW_FORM_strx3:\n let bytes = try cursor.read(as: (UInt8, UInt8, UInt8).self)\n offset = threeByteToOffset(bytes)\n case .DW_FORM_strx4:\n offset = UInt64(maybeSwap(try cursor.read(as: UInt32.self)))\n default:\n fatalError("unreachable")\n }\n\n if !shouldFetchIndirect {\n return .stringViaStrOffsets(offset)\n } else {\n guard let strBase = unit?.strOffsetsBase else {\n throw DwarfError.missingStrOffsetsBase\n }\n\n let actualOffset: UInt64\n if isDwarf64 {\n actualOffset = maybeSwap(try strOffsetsSection.fetch(\n from: offset * 8 + strBase,\n as: UInt64.self))\n } else {\n actualOffset = UInt64(maybeSwap(try strOffsetsSection.fetch(\n from: offset * 4 + strBase,\n as: UInt32.self)))\n }\n\n guard let string = try strSection.fetchString(from: actualOffset)\n else {\n throw DwarfError.badString\n }\n return .string(string)\n }\n\n case .DW_FORM_indirect:\n // We should have handled this already\n throw DwarfError.doubleIndirectForm\n default:\n throw DwarfError.unknownForm(theForm)\n }\n }\n\n private func readDieAttributes(\n at cursor: inout ImageSourceCursor,\n unit: Unit,\n abbrevInfo: AbbrevInfo,\n shouldFetchIndirect: Bool\n ) throws -> [Dwarf_Attribute:DwarfValue] {\n var attributes: [Dwarf_Attribute:DwarfValue] = [:]\n\n for (attribute, form, constantValue) in abbrevInfo.attributes {\n attributes[attribute] = try read(form: form,\n at: &cursor,\n addressSize: unit.addressSize,\n isDwarf64: unit.isDwarf64,\n unit: unit,\n shouldFetchIndirect: shouldFetchIndirect,\n constantValue: constantValue)\n }\n\n return attributes\n }\n\n struct CallSiteInfo {\n var depth: Int\n var rawName: String?\n var name: String?\n var lowPC: Address\n var highPC: Address\n var filename: String\n var line: Int\n var column: Int\n }\n\n private func buildCallSiteInfo(\n depth: Int,\n unit: Unit,\n attributes: [Dwarf_Attribute:DwarfValue],\n _ fn: (CallSiteInfo) -> ()\n ) throws {\n guard let abstractOriginVal = attributes[.DW_AT_abstract_origin],\n let callFile = attributes[.DW_AT_call_file]?.uint64Value(),\n let callLine = attributes[.DW_AT_call_line]?.uint64Value(),\n let callColumn = attributes[.DW_AT_call_column]?.uint64Value(),\n case let .reference(abstractOrigin) = abstractOriginVal else {\n return\n }\n\n var cursor = ImageSourceCursor(source: infoSection,\n offset: abstractOrigin)\n var abbrev = try cursor.readULEB128()\n if abbrev == 0 {\n return\n }\n\n guard let abbrevInfo = unit.abbrevs[abbrev] else {\n throw DwarfError.missingAbbrev(abbrev)\n }\n\n let tag = abbrevInfo.tag\n\n if tag != .DW_TAG_subprogram {\n return\n }\n\n var refAttrs = try readDieAttributes(\n at: &cursor,\n unit: unit,\n abbrevInfo: abbrevInfo,\n shouldFetchIndirect: true\n )\n\n if let specificationVal = refAttrs[.DW_AT_specification],\n case let .reference(specification) = specificationVal {\n cursor = ImageSourceCursor(source: infoSection,\n offset: specification)\n abbrev = try cursor.readULEB128()\n if abbrev == 0 {\n return\n }\n\n guard let abbrevInfo = unit.abbrevs[abbrev] else {\n throw DwarfError.missingAbbrev(abbrev)\n }\n\n let tag = abbrevInfo.tag\n if tag != .DW_TAG_subprogram {\n return\n }\n\n refAttrs = try readDieAttributes(\n at: &cursor,\n unit: unit,\n abbrevInfo: abbrevInfo,\n shouldFetchIndirect: true\n )\n }\n\n var name: String? = nil\n var rawName: String? = nil\n\n if let nameVal = refAttrs[.DW_AT_name],\n case let .string(theName) = nameVal {\n name = theName\n }\n\n if let linkageNameVal = refAttrs[.DW_AT_linkage_name],\n case let .string(theRawName) = linkageNameVal {\n rawName = theRawName\n } else {\n rawName = name\n }\n\n var filename: String = "<unknown>"\n for info in lineNumberInfo {\n if info.baseOffset == unit.lineBase {\n filename = info.fullPathForFile(index: Int(callFile))\n break\n }\n }\n\n if let lowPCVal = attributes[.DW_AT_low_pc],\n let highPCVal = attributes[.DW_AT_high_pc],\n case let .address(lowPC) = lowPCVal {\n let highPC: Address\n if case let .address(highPCAddr) = highPCVal {\n highPC = highPCAddr\n } else if let highPCOffset = highPCVal.uint64Value() {\n highPC = lowPC + highPCOffset\n } else {\n return\n }\n\n fn(CallSiteInfo(\n depth: depth,\n rawName: rawName,\n name: name,\n lowPC: lowPC,\n highPC: highPC,\n filename: filename,\n line: Int(callLine),\n column: Int(callColumn)))\n } else if let rangeVal = attributes[.DW_AT_ranges],\n let rangesSection = rangesSection,\n case let .sectionOffset(offset) = rangeVal,\n unit.version < 5 {\n // We don't support .debug_rnglists at present (which is what we'd\n // have if unit.version is 5 or higher).\n var rangeCursor = ImageSourceCursor(source: rangesSection,\n offset: offset)\n var rangeBase: Address = unit.lowPC ?? 0\n\n while true {\n let beginning: Address\n let ending: Address\n\n switch unit.addressSize {\n case 4:\n beginning = UInt64(maybeSwap(try rangeCursor.read(as: UInt32.self)))\n ending = UInt64(maybeSwap(try rangeCursor.read(as: UInt32.self)))\n if beginning == 0xffffffff {\n rangeBase = ending\n continue\n }\n case 8:\n beginning = maybeSwap(try rangeCursor.read(as: UInt64.self))\n ending = maybeSwap(try rangeCursor.read(as: UInt64.self))\n if beginning == 0xffffffffffffffff {\n rangeBase = ending\n continue\n }\n default:\n throw DwarfError.badAddressSize(unit.addressSize)\n }\n\n if beginning == 0 && ending == 0 {\n break\n }\n\n fn(CallSiteInfo(\n depth: depth,\n rawName: rawName,\n name: name,\n lowPC: beginning + rangeBase,\n highPC: ending + rangeBase,\n filename: filename,\n line: Int(callLine),\n column: Int(callColumn)))\n }\n }\n }\n\n lazy var inlineCallSites: [CallSiteInfo] = _buildCallSiteList()\n\n private func _buildCallSiteList() -> [CallSiteInfo] {\n var callSites: [CallSiteInfo] = []\n\n for unit in units {\n do {\n var cursor = ImageSourceCursor(source: infoSection,\n offset: unit.dieBounds.base)\n var depth = 0\n\n while cursor.pos < unit.dieBounds.end {\n let abbrev = try cursor.readULEB128()\n\n if abbrev == 0 {\n depth -= 1\n if depth == 0 {\n break\n }\n continue\n }\n\n guard let abbrevInfo = unit.abbrevs[abbrev] else {\n throw DwarfError.missingAbbrev(abbrev)\n }\n\n let tag = abbrevInfo.tag\n\n let attributes = try readDieAttributes(\n at: &cursor,\n unit: unit,\n abbrevInfo: abbrevInfo,\n shouldFetchIndirect: tag == .DW_TAG_inlined_subroutine\n )\n\n if tag == .DW_TAG_inlined_subroutine {\n try buildCallSiteInfo(depth: depth,\n unit: unit,\n attributes: attributes) {\n callSites.append($0)\n }\n }\n\n if abbrevInfo.hasChildren {\n depth += 1\n }\n }\n } catch {\n let name: String\n if let value = unit.attributes[.DW_AT_name],\n case let .string(theName) = value {\n name = theName\n } else {\n name = "<unknown at \(hex(unit.baseOffset))>"\n }\n swift_reportWarning(0,\n """\n swift-runtime: warning: unable to fetch inline \\n frame data for DWARF unit \(name): \(error)\n """)\n }\n }\n\n callSites.sort(\n by: { (a, b) in\n a.lowPC < b.lowPC || (a.lowPC == b.lowPC) && a.depth > b.depth\n })\n\n return callSites\n }\n\n}\n\nstruct DwarfFileInfo {\n var path: String\n var directoryIndex: Int?\n var timestamp: Int?\n var size: UInt64?\n var md5sum: [UInt8]?\n}\n\nstruct DwarfLineNumberState: CustomStringConvertible {\n typealias Address = UInt64\n\n var address: Address\n var opIndex: UInt\n var file: Int\n var path: String\n var line: Int\n var column: Int\n var isStmt: Bool\n var basicBlock: Bool\n var endSequence: Bool\n var prologueEnd: Bool\n var epilogueBegin: Bool\n var isa: UInt\n var discriminator: UInt\n\n var description: String {\n var flags: [String] = []\n if isStmt {\n flags.append("is_stmt")\n }\n if basicBlock {\n flags.append("basic_block")\n }\n if endSequence {\n flags.append("end_sequence")\n }\n if prologueEnd {\n flags.append("prologue_end")\n }\n if epilogueBegin {\n flags.append("epilogue_begin")\n }\n\n let flagsString = flags.joined(separator:" ")\n\n return """\n \(hex(address)) \(pad(line, 6)) \(pad(column, 6)) \(pad(file, 6)) \\n \(pad(isa, 3)) \(pad(discriminator, 13)) \(flagsString)\n """\n }\n}\n\nstruct DwarfLineNumberInfo {\n typealias Address = UInt64\n\n var baseOffset: Address\n var version: Int\n var addressSize: Int?\n var selectorSize: Int?\n var headerLength: UInt64\n var minimumInstructionLength: UInt\n var maximumOpsPerInstruction: UInt\n var defaultIsStmt: Bool\n var lineBase: Int8\n var lineRange: UInt8\n var opcodeBase: UInt8\n var standardOpcodeLengths: [UInt64]\n var directories: [String] = []\n var files: [DwarfFileInfo] = []\n var program: ImageSource\n var shouldSwap: Bool\n\n /// Compute the full path for a file, given its index in the file table.\n func fullPathForFile(index: Int) -> String {\n if index >= files.count {\n return "<unknown>"\n }\n\n let info = files[index]\n if info.path.hasPrefix("/") {\n return info.path\n }\n\n let dirName: String\n if let dirIndex = info.directoryIndex,\n dirIndex < directories.count {\n dirName = directories[dirIndex]\n } else {\n dirName = "<unknown>"\n }\n\n return "\(dirName)/\(info.path)"\n }\n\n /// Execute the line number program, calling a closure for every line\n /// table entry.\n mutating func executeProgram(\n line: (DwarfLineNumberState, inout Bool) -> ()\n ) throws {\n let end = program.bytes.count\n var cursor = ImageSourceCursor(source: program)\n\n func maybeSwap<T: FixedWidthInteger>(_ x: T) -> T {\n if shouldSwap {\n return x.byteSwapped\n }\n return x\n }\n\n // Table 6.4: Line number program initial state\n let initialState = DwarfLineNumberState(\n address: 0,\n opIndex: 0,\n file: 1,\n path: fullPathForFile(index: 1),\n line: 1,\n column: 0,\n isStmt: defaultIsStmt,\n basicBlock: false,\n endSequence: false,\n prologueEnd: false,\n epilogueBegin: false,\n isa: 0,\n discriminator: 0\n )\n\n var state = initialState\n\n // Flag to allow fast exit\n var done = false\n\n while !done && cursor.pos < end {\n let opcode = try cursor.read(as: Dwarf_LNS_Opcode.self)\n\n if opcode.rawValue >= opcodeBase {\n // Special opcode\n let adjustedOpcode = UInt(opcode.rawValue - opcodeBase)\n let advance = adjustedOpcode / UInt(lineRange)\n let lineAdvance = adjustedOpcode % UInt(lineRange)\n let instrAdvance\n = (state.opIndex + advance) / maximumOpsPerInstruction\n let newOp = (state.opIndex + advance) % maximumOpsPerInstruction\n state.address += Address(instrAdvance)\n state.opIndex = newOp\n state.line += Int(lineBase) + Int(lineAdvance)\n\n line(state, &done)\n\n state.discriminator = 0\n state.basicBlock = false\n state.prologueEnd = false\n state.epilogueBegin = false\n } else if opcode == .DW_LNS_extended {\n // Extended opcode\n let length = try cursor.readULEB128()\n let opcode = try cursor.read(as: Dwarf_LNE_Opcode.self)\n\n switch opcode {\n case .DW_LNE_end_sequence:\n state.endSequence = true\n line(state, &done)\n state = initialState\n case .DW_LNE_set_address:\n let address: UInt64\n guard let addressSize = addressSize else {\n throw DwarfError.unspecifiedAddressSize\n }\n switch addressSize {\n case 4:\n address = UInt64(maybeSwap(try cursor.read(as: UInt32.self)))\n case 8:\n address = maybeSwap(try cursor.read(as: UInt64.self))\n default:\n throw DwarfError.badAddressSize(addressSize)\n }\n state.address = Address(address)\n case .DW_LNE_define_file:\n guard let path = try cursor.readString() else {\n throw DwarfError.badString\n }\n let directoryIndex = try cursor.readULEB128()\n let timestamp = try cursor.readULEB128()\n let size = try cursor.readULEB128()\n files.append(DwarfFileInfo(\n path: path,\n directoryIndex: Int(directoryIndex),\n timestamp: timestamp != 0 ? Int(timestamp) : nil,\n size: size != 0 ? size : nil,\n md5sum: nil\n ))\n case .DW_LNE_set_discriminator:\n let discriminator = try cursor.readULEB128()\n state.discriminator = UInt(discriminator)\n default:\n cursor.pos += length - 1\n }\n } else {\n // Standard opcode\n switch opcode {\n case .DW_LNS_copy:\n line(state, &done)\n state.discriminator = 0\n state.basicBlock = false\n state.prologueEnd = false\n state.epilogueBegin = false\n case .DW_LNS_advance_pc:\n let advance = UInt(try cursor.readULEB128())\n let instrAdvance\n = (state.opIndex + advance) / maximumOpsPerInstruction\n let newOp = (state.opIndex + advance) % maximumOpsPerInstruction\n state.address += Address(instrAdvance)\n state.opIndex = newOp\n case .DW_LNS_advance_line:\n let advance = try cursor.readSLEB128()\n state.line += Int(advance)\n case .DW_LNS_set_file:\n let file = Int(try cursor.readULEB128())\n state.file = file\n state.path = fullPathForFile(index: state.file)\n case .DW_LNS_set_column:\n let column = Int(try cursor.readULEB128())\n state.column = column\n case .DW_LNS_negate_stmt:\n state.isStmt = !state.isStmt\n case .DW_LNS_set_basic_block:\n state.basicBlock = true\n case .DW_LNS_const_add_pc:\n let adjustedOpcode = UInt(255 - opcodeBase)\n let advance = adjustedOpcode / UInt(lineRange)\n let instrAdvance\n = (state.opIndex + advance) / maximumOpsPerInstruction\n let newOp = (state.opIndex + advance) % maximumOpsPerInstruction\n state.address += Address(instrAdvance)\n state.opIndex = newOp\n case .DW_LNS_fixed_advance_pc:\n let advance = try cursor.read(as: Dwarf_Half.self)\n state.address += Address(advance)\n state.opIndex = 0\n case .DW_LNS_set_prologue_end:\n state.prologueEnd = true\n case .DW_LNS_set_epilogue_begin:\n state.epilogueBegin = true\n case .DW_LNS_set_isa:\n let isa = UInt(try cursor.readULEB128())\n state.isa = isa\n default:\n // Skip this unknown opcode\n let length = standardOpcodeLengths[Int(opcode.rawValue)]\n for _ in 0..<length {\n _ = try cursor.readULEB128()\n }\n }\n }\n }\n }\n}\n\n// .. Testing ..................................................................\n\n@_spi(DwarfTest)\npublic func testDwarfReaderFor(path: String) -> Bool {\n guard let source = try? ImageSource(path: path) else {\n print("\(path) was not accessible")\n return false\n }\n\n if let elfImage = try? Elf32Image(source: source) {\n print("\(path) is a 32-bit ELF image")\n\n var reader: DwarfReader<Elf32Image>\n do {\n reader = try DwarfReader(source: elfImage)\n } catch {\n print("Unable to create reader - \(error)")\n return false\n }\n\n print("Units:")\n print(reader.units)\n\n print("Call Sites:")\n print(reader.inlineCallSites)\n return true\n } else if let elfImage = try? Elf64Image(source: source) {\n print("\(path) is a 64-bit ELF image")\n\n var reader: DwarfReader<Elf64Image>\n do {\n reader = try DwarfReader(source: elfImage)\n } catch {\n print("Unable to create reader - \(error)")\n return false\n }\n\n print("Units:")\n for unit in reader.units {\n if let value = unit.attributes[.DW_AT_name],\n case let .string(name) = value {\n print(" \(name)")\n } else {\n print(" <unnamed>")\n }\n }\n\n print("Call Sites:")\n print(reader.inlineCallSites)\n return true\n } else {\n print("\(path) is not an ELF image")\n return false\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_Dwarf.swift
cpp_apple_swift_stdlib_public_RuntimeModule_Dwarf.swift
Swift
69,056
0.75
0.148544
0.050781
python-kit
336
2025-07-01T20:14:10.394880
Apache-2.0
false
89eb6ee6b693d194474915b5ffac1a69
//===--- EightByteBuffer.swift --------------------------------*- swift -*-===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// A statically allocated buffer for holding a small number of bytes.\n//\n//===----------------------------------------------------------------------===//\n\nimport Swift\n\nstruct EightByteBuffer {\n var word: UInt64\n\n init() {\n word = 0\n }\n\n init(_ qword: UInt64) {\n word = qword.bigEndian\n }\n\n init(_ qword: Int64) {\n self.init(UInt64(bitPattern: qword))\n }\n\n init<T: FixedWidthInteger>(_ value: T) where T: SignedInteger {\n self.init(Int64(value))\n }\n\n init<T: FixedWidthInteger>(_ value: T) {\n self.init(UInt64(value))\n }\n\n subscript(ndx: Int) -> UInt8 {\n get {\n if ndx < 0 || ndx >= 8 {\n fatalError("Index out of range")\n }\n return withUnsafeBytes(of: word) { buffer in\n return buffer[ndx]\n }\n }\n set(newValue) {\n if ndx < 0 || ndx >= 8 {\n fatalError("Index out of range")\n }\n withUnsafeMutableBytes(of: &word) { buffer in\n buffer[ndx] = newValue\n }\n }\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_EightByteBuffer.swift
cpp_apple_swift_stdlib_public_RuntimeModule_EightByteBuffer.swift
Swift
1,482
0.95
0.083333
0.288462
node-utils
307
2023-07-17T00:21:21.899142
BSD-3-Clause
false
979391e9966dce0113600b47016cc3c6
//===--- Elf.swift - ELF support for Swift --------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Defines various ELF structures and provides types for working with ELF\n// images on disk and in memory.\n//\n//===----------------------------------------------------------------------===//\n\n// ###FIXME: We shouldn't really use String for paths.\n\nimport Swift\n\n#if os(macOS) || os(iOS) || os(tvOS) || os(watchOS)\ninternal import Darwin\n#elseif os(Windows)\ninternal import ucrt\n#elseif canImport(Glibc)\ninternal import Glibc\n#elseif canImport(Musl)\ninternal import Musl\n#endif\ninternal import BacktracingImpl.ImageFormats.Elf\n\n// .. Use *our* Elf definitions ................................................\n\n// On Linux there is an <elf.h> header that can be dragged in via Glibc, which\n// contains slightly different definitions that don't work so well with Swift.\n// We don't want those, so we're using C++ interop and putting ours in the\n// swift::runtime namespace.\n\n// This means we need a lot of typealiases, and also aliases for untyped\n// constants.\n\ntypealias Elf_Byte = swift.runtime.Elf_Byte\ntypealias Elf_Half = swift.runtime.Elf_Half\ntypealias Elf_Word = swift.runtime.Elf_Word\ntypealias Elf_Xword = swift.runtime.Elf_Xword\ntypealias Elf_Sword = swift.runtime.Elf_Sword\ntypealias Elf_Sxword = swift.runtime.Elf_Sxword\n\ntypealias Elf32_Byte = swift.runtime.Elf32_Byte\ntypealias Elf32_Half = swift.runtime.Elf32_Half\ntypealias Elf32_Word = swift.runtime.Elf32_Word\ntypealias Elf32_Sword = swift.runtime.Elf32_Sword\n\ntypealias Elf64_Byte = swift.runtime.Elf64_Byte\ntypealias Elf64_Half = swift.runtime.Elf64_Half\ntypealias Elf64_Word = swift.runtime.Elf64_Word\ntypealias Elf64_Xword = swift.runtime.Elf64_Xword\ntypealias Elf64_Sword = swift.runtime.Elf64_Sword\ntypealias Elf64_Sxword = swift.runtime.Elf64_Sxword\n\ntypealias Elf_Ehdr_Type = swift.runtime.Elf_Ehdr_Type\ntypealias Elf_Ehdr_Machine = swift.runtime.Elf_Ehdr_Machine\ntypealias Elf_Ehdr_Version = swift.runtime.Elf_Ehdr_Version\n\nlet EI_MAG0 = swift.runtime.EI_MAG0\nlet EI_MAG1 = swift.runtime.EI_MAG1\nlet EI_MAG2 = swift.runtime.EI_MAG2\nlet EI_MAG3 = swift.runtime.EI_MAG3\nlet EI_CLASS = swift.runtime.EI_CLASS\nlet EI_DATA = swift.runtime.EI_DATA\nlet EI_VERSION = swift.runtime.EI_VERSION\nlet EI_OSABI = swift.runtime.EI_OSABI\nlet EI_ABIVERSION = swift.runtime.EI_ABIVERSION\nlet EI_PAD = swift.runtime.EI_PAD\n\nlet ELFMAG0 = swift.runtime.ELFMAG0\nlet ELFMAG1 = swift.runtime.ELFMAG1\nlet ELFMAG2 = swift.runtime.ELFMAG2\nlet ELFMAG3 = swift.runtime.ELFMAG3\n\ntypealias Elf_Ehdr_Class = swift.runtime.Elf_Ehdr_Class\ntypealias Elf_Ehdr_Data = swift.runtime.Elf_Ehdr_Data\ntypealias Elf_Ehdr_OsAbi = swift.runtime.Elf_Ehdr_OsAbi\n\nlet SHN_UNDEF = swift.runtime.SHN_UNDEF\nlet SHN_LORESERVE = swift.runtime.SHN_LORESERVE\nlet SHN_LOPROC = swift.runtime.SHN_LOPROC\nlet SHN_HIPROC = swift.runtime.SHN_HIPROC\nlet SHN_LOOS = swift.runtime.SHN_LOOS\nlet SHN_HIOS = swift.runtime.SHN_HIOS\nlet SHN_ABS = swift.runtime.SHN_ABS\nlet SHN_COMMON = swift.runtime.SHN_COMMON\nlet SHN_XINDEX = swift.runtime.SHN_XINDEX\nlet SHN_HIRESERVE = swift.runtime.SHN_HIRESERVE\n\ntypealias Elf_Shdr_Type = swift.runtime.Elf_Shdr_Type\n\nlet SHF_WRITE = swift.runtime.SHF_WRITE\nlet SHF_ALLOC = swift.runtime.SHF_ALLOC\nlet SHF_EXECINSTR = swift.runtime.SHF_EXECINSTR\nlet SHF_MERGE = swift.runtime.SHF_MERGE\nlet SHF_STRINGS = swift.runtime.SHF_STRINGS\nlet SHF_INFO_LINK = swift.runtime.SHF_INFO_LINK\nlet SHF_LINK_ORDER = swift.runtime.SHF_LINK_ORDER\nlet SHF_OS_NONCONFORMING = swift.runtime.SHF_OS_NONCONFORMING\nlet SHF_GROUP = swift.runtime.SHF_GROUP\nlet SHF_TLS = swift.runtime.SHF_TLS\nlet SHF_COMPRESSED = swift.runtime.SHF_COMPRESSED\nlet SHF_MASKOS = swift.runtime.SHF_MASKOS\nlet SHF_MASKPROC = swift.runtime.SHF_MASKPROC\n\nlet GRP_COMDAT = swift.runtime.GRP_COMDAT\nlet GRP_MASKOS = swift.runtime.GRP_MASKOS\nlet GRP_MASKPROC = swift.runtime.GRP_MASKPROC\n\ntypealias Elf_Chdr_Type = swift.runtime.Elf_Chdr_Type\n\ntypealias Elf_Sym_Binding = swift.runtime.Elf_Sym_Binding\ntypealias Elf_Sym_Type = swift.runtime.Elf_Sym_Type\ntypealias Elf_Sym_Visibility = swift.runtime.Elf_Sym_Visibility\n\ntypealias Elf_Phdr_Type = swift.runtime.Elf_Phdr_Type\ntypealias Elf_Phdr_Flags = swift.runtime.Elf_Phdr_Flags\n\nlet PF_X = swift.runtime.PF_X\nlet PF_W = swift.runtime.PF_W\nlet PF_R = swift.runtime.PF_R\n\nlet PF_MASKOS = swift.runtime.PF_MASKOS\nlet PF_MASKPROC = swift.runtime.PF_MASKPROC\n\nlet DT_NULL = swift.runtime.DT_NULL\nlet DT_NEEDED = swift.runtime.DT_NEEDED\nlet DT_PLTRELSZ = swift.runtime.DT_PLTRELSZ\nlet DT_PLTGOT = swift.runtime.DT_PLTGOT\nlet DT_HASH = swift.runtime.DT_HASH\nlet DT_STRTAB = swift.runtime.DT_STRTAB\nlet DT_SYMTAB = swift.runtime.DT_SYMTAB\nlet DT_RELA = swift.runtime.DT_RELA\nlet DT_RELASZ = swift.runtime.DT_RELASZ\nlet DT_RELAENT = swift.runtime.DT_RELAENT\nlet DT_STRSZ = swift.runtime.DT_STRSZ\nlet DT_SYMENT = swift.runtime.DT_SYMENT\nlet DT_INIT = swift.runtime.DT_INIT\nlet DT_FINI = swift.runtime.DT_FINI\nlet DT_SONAME = swift.runtime.DT_SONAME\nlet DT_RPATH = swift.runtime.DT_RPATH\nlet DT_SYMBOLIC = swift.runtime.DT_SYMBOLIC\nlet DT_REL = swift.runtime.DT_REL\nlet DT_RELSZ = swift.runtime.DT_RELSZ\nlet DT_RELENT = swift.runtime.DT_RELENT\nlet DT_PLTREL = swift.runtime.DT_PLTREL\nlet DT_DEBUG = swift.runtime.DT_DEBUG\nlet DT_TEXTREL = swift.runtime.DT_TEXTREL\nlet DT_JMPREL = swift.runtime.DT_JMPREL\nlet DT_BIND_NOW = swift.runtime.DT_BIND_NOW\nlet DT_INIT_ARRAY = swift.runtime.DT_INIT_ARRAY\nlet DT_FINI_ARRAY = swift.runtime.DT_FINI_ARRAY\nlet DT_INIT_ARRAYSZ = swift.runtime.DT_INIT_ARRAYSZ\nlet DT_FINI_ARRAYSZ = swift.runtime.DT_FINI_ARRAYSZ\nlet DT_RUNPATH = swift.runtime.DT_RUNPATH\nlet DT_FLAGS = swift.runtime.DT_FLAGS\nlet DT_ENCODING = swift.runtime.DT_ENCODING\nlet DT_PREINIT_ARRAY = swift.runtime.DT_PREINIT_ARRAY\nlet DT_PREINIT_ARRAYSZ = swift.runtime.DT_PREINIT_ARRAYSZ\nlet DT_LOOS = swift.runtime.DT_LOOS\nlet DT_HIOS = swift.runtime.DT_HIOS\nlet DT_LOPROC = swift.runtime.DT_LOPROC\nlet DT_HIPROC = swift.runtime.DT_HIPROC\n\nlet DF_ORIGIN = swift.runtime.DF_ORIGIN\nlet DF_SYMBOLIC = swift.runtime.DF_SYMBOLIC\nlet DF_TEXTREL = swift.runtime.DF_TEXTREL\nlet DF_BIND_NOW = swift.runtime.DF_BIND_NOW\nlet DF_STATIC_TLS = swift.runtime.DF_STATIC_TLS\n\nlet NT_GNU_ABI_TAG = swift.runtime.NT_GNU_ABI_TAG\nlet NT_GNU_HWCAP = swift.runtime.NT_GNU_HWCAP\nlet NT_GNU_BUILD_ID = swift.runtime.NT_GNU_BUILD_ID\nlet NT_GNU_GOLD_VERSION = swift.runtime.NT_GNU_GOLD_VERSION\nlet NT_GNU_PROPERTY_TYPE_0 = swift.runtime.NT_GNU_PROPERTY_TYPE_0\n\ntypealias Elf32_Ehdr = swift.runtime.Elf32_Ehdr\ntypealias Elf64_Ehdr = swift.runtime.Elf64_Ehdr\n\ntypealias Elf32_Shdr = swift.runtime.Elf32_Shdr\ntypealias Elf64_Shdr = swift.runtime.Elf64_Shdr\n\ntypealias Elf32_Chdr = swift.runtime.Elf32_Chdr\ntypealias Elf64_Chdr = swift.runtime.Elf64_Chdr\n\ntypealias Elf32_Sym = swift.runtime.Elf32_Sym\ntypealias Elf64_Sym = swift.runtime.Elf64_Sym\n\nlet ELF32_ST_BIND = swift.runtime.ELF32_ST_BIND\nlet ELF32_ST_TYPE = swift.runtime.ELF32_ST_TYPE\nlet ELF32_ST_INFO = swift.runtime.ELF32_ST_INFO\nlet ELF32_ST_VISIBILITY = swift.runtime.ELF32_ST_VISIBILITY\n\nlet ELF64_ST_BIND = swift.runtime.ELF64_ST_BIND\nlet ELF64_ST_TYPE = swift.runtime.ELF64_ST_TYPE\nlet ELF64_ST_INFO = swift.runtime.ELF64_ST_INFO\nlet ELF64_ST_VISIBILITY = swift.runtime.ELF64_ST_VISIBILITY\n\ntypealias Elf32_Rel = swift.runtime.Elf32_Rel\ntypealias Elf32_Rela = swift.runtime.Elf32_Rela\ntypealias Elf64_Rel = swift.runtime.Elf64_Rel\ntypealias Elf64_Rela = swift.runtime.Elf64_Rela\n\nlet ELF32_R_SYM = swift.runtime.ELF32_R_SYM\nlet ELF32_R_TYPE = swift.runtime.ELF32_R_TYPE\nlet ELF32_R_INFO = swift.runtime.ELF32_R_INFO\n\nlet ELF64_R_SYM = swift.runtime.ELF64_R_SYM\nlet ELF64_R_TYPE = swift.runtime.ELF64_R_TYPE\nlet ELF64_R_INFO = swift.runtime.ELF64_R_INFO\n\ntypealias Elf32_Phdr = swift.runtime.Elf32_Phdr\ntypealias Elf64_Phdr = swift.runtime.Elf64_Phdr\n\ntypealias Elf32_Nhdr = swift.runtime.Elf32_Nhdr\ntypealias Elf64_Nhdr = swift.runtime.Elf64_Nhdr\n\ntypealias Elf32_Dyn = swift.runtime.Elf32_Dyn\ntypealias Elf64_Dyn = swift.runtime.Elf64_Dyn\n\ntypealias Elf32_Hash = swift.runtime.Elf32_Hash\ntypealias Elf64_Hash = swift.runtime.Elf64_Hash\n\nlet elf_hash = swift.runtime.elf_hash\n\n// .. Utilities ................................................................\n\nprivate func realPath(_ path: String) -> String? {\n guard let result = realpath(path, nil) else {\n return nil\n }\n\n let s = String(cString: result)\n\n free(result)\n\n return s\n}\n\nprivate func dirname(_ path: String) -> Substring {\n guard let lastSlash = path.lastIndex(of: "/") else {\n return ""\n }\n return path.prefix(upTo: lastSlash)\n}\n\nprivate let crc32Table: [UInt32] = [\n 0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419, 0x706af48f,\n 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4, 0xe0d5e91e, 0x97d2d988,\n 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07, 0x90bf1d91, 0x1db71064, 0x6ab020f2,\n 0xf3b97148, 0x84be41de, 0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7,\n 0x136c9856, 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9,\n 0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4, 0xa2677172,\n 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b, 0x35b5a8fa, 0x42b2986c,\n 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3, 0x45df5c75, 0xdcd60dcf, 0xabd13d59,\n 0x26d930ac, 0x51de003a, 0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423,\n 0xcfba9599, 0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924,\n 0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190, 0x01db7106,\n 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f, 0x9fbfe4a5, 0xe8b8d433,\n 0x7807c9a2, 0x0f00f934, 0x9609a88e, 0xe10e9818, 0x7f6a0dbb, 0x086d3d2d,\n 0x91646c97, 0xe6635c01, 0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e,\n 0x6c0695ed, 0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950,\n 0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3, 0xfbd44c65,\n 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2, 0x4adfa541, 0x3dd895d7,\n 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a, 0x346ed9fc, 0xad678846, 0xda60b8d0,\n 0x44042d73, 0x33031de5, 0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa,\n 0xbe0b1010, 0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f,\n 0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17, 0x2eb40d81,\n 0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6, 0x03b6e20c, 0x74b1d29a,\n 0xead54739, 0x9dd277af, 0x04db2615, 0x73dc1683, 0xe3630b12, 0x94643b84,\n 0x0d6d6a3e, 0x7a6a5aa8, 0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1,\n 0xf00f9344, 0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb,\n 0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a, 0x67dd4acc,\n 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5, 0xd6d6a3e8, 0xa1d1937e,\n 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1, 0xa6bc5767, 0x3fb506dd, 0x48b2364b,\n 0xd80d2bda, 0xaf0a1b4c, 0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55,\n 0x316e8eef, 0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236,\n 0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe, 0xb2bd0b28,\n 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31, 0x2cd99e8b, 0x5bdeae1d,\n 0x9b64c2b0, 0xec63f226, 0x756aa39c, 0x026d930a, 0x9c0906a9, 0xeb0e363f,\n 0x72076785, 0x05005713, 0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38,\n 0x92d28e9b, 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242,\n 0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1, 0x18b74777,\n 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c, 0x8f659eff, 0xf862ae69,\n 0x616bffd3, 0x166ccf45, 0xa00ae278, 0xd70dd2ee, 0x4e048354, 0x3903b3c2,\n 0xa7672661, 0xd06016f7, 0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc,\n 0x40df0b66, 0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9,\n 0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605, 0xcdd70693,\n 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8, 0x5d681b02, 0x2a6f2b94,\n 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b, 0x2d02ef8d\n]\n\nprivate func updateCrc(_ crc: UInt32,\n _ bytes: UnsafeRawBufferPointer) -> UInt32 {\n var theCrc = ~crc\n for byte in bytes {\n theCrc = crc32Table[Int(UInt8(truncatingIfNeeded: theCrc)\n ^ byte)] ^ (theCrc >> 8)\n }\n return ~theCrc\n}\n\n// .. Byte swapping ............................................................\n\nextension Elf32_Ehdr: ByteSwappable {\n var byteSwapped: Self {\n return Elf32_Ehdr(\n e_ident: e_ident,\n e_type: Elf_Ehdr_Type(rawValue: e_type.rawValue.byteSwapped)!,\n e_machine: Elf_Ehdr_Machine(rawValue: e_machine.rawValue.byteSwapped)!,\n e_version: Elf_Ehdr_Version(rawValue: e_version.rawValue.byteSwapped)!,\n e_entry: e_entry.byteSwapped,\n e_phoff: e_phoff.byteSwapped,\n e_shoff: e_shoff.byteSwapped,\n e_flags: e_flags.byteSwapped,\n e_ehsize: e_ehsize.byteSwapped,\n e_phentsize: e_phentsize.byteSwapped,\n e_phnum: e_phnum.byteSwapped,\n e_shentsize: e_shentsize.byteSwapped,\n e_shnum: e_shnum.byteSwapped,\n e_shstrndx: e_shstrndx.byteSwapped\n )\n }\n}\n\nextension Elf64_Ehdr: ByteSwappable {\n var byteSwapped: Self {\n return Elf64_Ehdr(\n e_ident: e_ident,\n e_type: Elf_Ehdr_Type(rawValue: e_type.rawValue.byteSwapped)!,\n e_machine: Elf_Ehdr_Machine(rawValue: e_machine.rawValue.byteSwapped)!,\n e_version: Elf_Ehdr_Version(rawValue: e_version.rawValue.byteSwapped)!,\n e_entry: e_entry.byteSwapped,\n e_phoff: e_phoff.byteSwapped,\n e_shoff: e_shoff.byteSwapped,\n e_flags: e_flags.byteSwapped,\n e_ehsize: e_ehsize.byteSwapped,\n e_phentsize: e_phentsize.byteSwapped,\n e_phnum: e_phnum.byteSwapped,\n e_shentsize: e_shentsize.byteSwapped,\n e_shnum: e_shnum.byteSwapped,\n e_shstrndx: e_shstrndx.byteSwapped\n )\n }\n}\n\nextension Elf32_Shdr: ByteSwappable {\n var byteSwapped: Self {\n return Elf32_Shdr(\n sh_name: sh_name.byteSwapped,\n sh_type: Elf_Shdr_Type(rawValue: sh_type.rawValue.byteSwapped)!,\n sh_flags: sh_flags.byteSwapped,\n sh_addr: sh_addr.byteSwapped,\n sh_offset: sh_offset.byteSwapped,\n sh_size: sh_size.byteSwapped,\n sh_link: sh_link.byteSwapped,\n sh_info: sh_info.byteSwapped,\n sh_addralign: sh_addralign.byteSwapped,\n sh_entsize: sh_entsize.byteSwapped\n )\n }\n}\n\nextension Elf64_Shdr: ByteSwappable {\n var byteSwapped: Self {\n return Elf64_Shdr(\n sh_name: sh_name.byteSwapped,\n sh_type: Elf_Shdr_Type(rawValue: sh_type.rawValue.byteSwapped)!,\n sh_flags: sh_flags.byteSwapped,\n sh_addr: sh_addr.byteSwapped,\n sh_offset: sh_offset.byteSwapped,\n sh_size: sh_size.byteSwapped,\n sh_link: sh_link.byteSwapped,\n sh_info: sh_info.byteSwapped,\n sh_addralign: sh_addralign.byteSwapped,\n sh_entsize: sh_entsize.byteSwapped\n )\n }\n}\n\nprotocol Elf_Chdr: ByteSwappable {\n associatedtype Size: FixedWidthInteger\n\n init()\n\n var ch_type: Elf_Chdr_Type { get set }\n var ch_size: Size { get set }\n var ch_addralign: Size { get set }\n}\n\nextension Elf32_Chdr: Elf_Chdr {\n var byteSwapped: Self {\n return Elf32_Chdr(\n ch_type: Elf_Chdr_Type(rawValue: ch_type.rawValue.byteSwapped)!,\n ch_size: ch_size.byteSwapped,\n ch_addralign: ch_addralign.byteSwapped\n )\n }\n}\n\nextension Elf64_Chdr: Elf_Chdr {\n var byteSwapped: Self {\n return Elf64_Chdr(\n ch_type: Elf_Chdr_Type(rawValue: ch_type.rawValue.byteSwapped)!,\n ch_reserved: ch_reserved.byteSwapped,\n ch_size: ch_size.byteSwapped,\n ch_addralign: ch_addralign.byteSwapped\n )\n }\n}\n\nextension Elf_Chdr_Type: ByteSwappable {\n var byteSwapped: Self {\n return Elf_Chdr_Type(rawValue: rawValue.byteSwapped)!\n }\n}\n\nextension Elf32_Sym: ByteSwappable {\n var byteSwapped: Self {\n return Elf32_Sym(\n st_name: st_name.byteSwapped,\n st_value: st_value.byteSwapped,\n st_size: st_size.byteSwapped,\n st_info: st_info.byteSwapped,\n st_other: st_other.byteSwapped,\n st_shndx: st_shndx.byteSwapped\n )\n }\n}\n\nextension Elf64_Sym: ByteSwappable {\n var byteSwapped: Self {\n return Elf64_Sym(\n st_name: st_name.byteSwapped,\n st_info: st_info.byteSwapped,\n st_other: st_other.byteSwapped,\n st_shndx: st_shndx.byteSwapped,\n st_value: st_value.byteSwapped,\n st_size: st_size.byteSwapped\n )\n }\n}\n\nextension Elf32_Rel: ByteSwappable {\n var byteSwapped: Self {\n return Elf32_Rel(\n r_offset: r_offset.byteSwapped,\n r_info: r_info.byteSwapped\n )\n }\n}\n\nextension Elf32_Rela: ByteSwappable {\n var byteSwapped: Self {\n return Elf32_Rela(\n r_offset: r_offset.byteSwapped,\n r_info: r_info.byteSwapped,\n r_addend: r_addend.byteSwapped\n )\n }\n}\n\nextension Elf64_Rel: ByteSwappable {\n var byteSwapped: Self {\n return Elf64_Rel(\n r_offset: r_offset.byteSwapped,\n r_info: r_info.byteSwapped\n )\n }\n}\n\nextension Elf64_Rela: ByteSwappable {\n var byteSwapped: Self {\n return Elf64_Rela(\n r_offset: r_offset.byteSwapped,\n r_info: r_info.byteSwapped,\n r_addend: r_addend.byteSwapped\n )\n }\n}\n\nextension Elf32_Phdr: ByteSwappable {\n var byteSwapped: Self {\n return Elf32_Phdr(\n p_type: Elf_Phdr_Type(rawValue: p_type.rawValue.byteSwapped)!,\n p_offset: p_offset.byteSwapped,\n p_vaddr: p_vaddr.byteSwapped,\n p_paddr: p_paddr.byteSwapped,\n p_filesz: p_filesz.byteSwapped,\n p_memsz: p_memsz.byteSwapped,\n p_flags: p_flags.byteSwapped,\n p_align: p_align.byteSwapped\n )\n }\n}\n\nextension Elf64_Phdr: ByteSwappable {\n var byteSwapped: Self {\n return Elf64_Phdr(\n p_type: Elf_Phdr_Type(rawValue: p_type.rawValue.byteSwapped)!,\n p_flags: p_flags.byteSwapped,\n p_offset: p_offset.byteSwapped,\n p_vaddr: p_vaddr.byteSwapped,\n p_paddr: p_paddr.byteSwapped,\n p_filesz: p_filesz.byteSwapped,\n p_memsz: p_memsz.byteSwapped,\n p_align: p_align.byteSwapped\n )\n }\n}\n\nextension Elf32_Nhdr: ByteSwappable {\n var byteSwapped: Self {\n return Elf32_Nhdr(\n n_namesz: n_namesz.byteSwapped,\n n_descsz: n_descsz.byteSwapped,\n n_type: n_type.byteSwapped\n )\n }\n}\n\nextension Elf64_Nhdr: ByteSwappable {\n var byteSwapped: Self {\n return Elf64_Nhdr(\n n_namesz: n_namesz.byteSwapped,\n n_descsz: n_descsz.byteSwapped,\n n_type: n_type.byteSwapped\n )\n }\n}\n\nextension Elf32_Dyn: ByteSwappable {\n var byteSwapped: Self {\n return Elf32_Dyn(\n d_tag: d_tag.byteSwapped,\n d_un: .init(d_val: d_un.d_val.byteSwapped)\n )\n }\n}\n\nextension Elf64_Dyn: ByteSwappable {\n var byteSwapped: Self {\n return Elf64_Dyn(\n d_tag: d_tag.byteSwapped,\n d_un: .init(d_val: d_un.d_val.byteSwapped)\n )\n }\n}\n\nextension Elf32_Hash: ByteSwappable {\n var byteSwapped: Self {\n return Elf32_Hash(\n h_nbucket: h_nbucket.byteSwapped,\n h_nchain: h_nchain.byteSwapped\n )\n }\n}\n\nextension Elf64_Hash: ByteSwappable {\n var byteSwapped: Self {\n return Elf64_Hash(\n h_nbucket: h_nbucket.byteSwapped,\n h_nchain: h_nchain.byteSwapped\n )\n }\n}\n\n// .. Protocols ................................................................\n\ntypealias Elf_Magic = (UInt8, UInt8, UInt8, UInt8)\n\ntypealias Elf_Ident = (\n UInt8, UInt8, UInt8, UInt8,\n UInt8, UInt8, UInt8, UInt8,\n UInt8, UInt8, UInt8, UInt8,\n UInt8, UInt8, UInt8, UInt8\n)\n\nlet ElfMagic: Elf_Magic = (0x7f, 0x45, 0x4c, 0x46)\n\nprotocol Elf_Ehdr : ByteSwappable {\n associatedtype Address: FixedWidthInteger\n associatedtype Offset: FixedWidthInteger\n\n init()\n\n var e_ident: Elf_Ident { get set }\n var ei_magic: Elf_Magic { get set }\n var ei_class: Elf_Ehdr_Class { get set }\n var ei_data: Elf_Ehdr_Data { get set }\n var ei_version: Elf_Byte { get set }\n var ei_osabi: Elf_Ehdr_OsAbi { get set }\n var ei_abiversion: Elf_Byte { get set }\n\n var e_type: Elf_Ehdr_Type { get set }\n var e_machine: Elf_Ehdr_Machine { get set }\n var e_version: Elf_Ehdr_Version { get set }\n var e_entry: Address { get set }\n var e_phoff: Offset { get set }\n var e_shoff: Offset { get set }\n var e_flags: Elf_Word { get set }\n var e_ehsize: Elf_Half { get set }\n var e_phentsize: Elf_Half { get set }\n var e_phnum: Elf_Half { get set }\n var e_shentsize: Elf_Half { get set }\n var e_shnum: Elf_Half { get set }\n var e_shstrndx: Elf_Half { get set }\n\n var shouldByteSwap: Bool { get }\n}\n\nextension Elf_Ehdr {\n var ei_magic: Elf_Magic {\n get {\n return (e_ident.0, e_ident.1, e_ident.2, e_ident.3)\n }\n set {\n e_ident.0 = newValue.0\n e_ident.1 = newValue.1\n e_ident.2 = newValue.2\n e_ident.3 = newValue.3\n }\n }\n var ei_class: Elf_Ehdr_Class {\n get {\n return Elf_Ehdr_Class(rawValue: e_ident.4)!\n }\n set {\n e_ident.4 = newValue.rawValue\n }\n }\n var ei_data: Elf_Ehdr_Data {\n get {\n return Elf_Ehdr_Data(rawValue: e_ident.5)!\n }\n set {\n e_ident.5 = newValue.rawValue\n }\n }\n var ei_version: UInt8 {\n get {\n return e_ident.6\n }\n set {\n e_ident.6 = newValue\n }\n }\n var ei_osabi: Elf_Ehdr_OsAbi {\n get {\n return Elf_Ehdr_OsAbi(rawValue: e_ident.7)!\n }\n set {\n e_ident.7 = newValue.rawValue\n }\n }\n var ei_abiversion: UInt8 {\n get {\n return e_ident.8\n }\n set {\n e_ident.8 = newValue\n }\n }\n var ei_pad: (UInt8, UInt8, UInt8, UInt8, UInt8, UInt8, UInt8) {\n get {\n return (e_ident.9, e_ident.10, e_ident.11,\n e_ident.12, e_ident.13, e_ident.14,\n e_ident.15)\n }\n set {\n e_ident.9 = newValue.0\n e_ident.10 = newValue.1\n e_ident.11 = newValue.2\n e_ident.12 = newValue.3\n e_ident.13 = newValue.4\n e_ident.14 = newValue.5\n e_ident.15 = newValue.6\n }\n }\n\n var shouldByteSwap: Bool {\n #if _endian(big)\n return ei_data == .ELFDATA2LSB\n #else\n return ei_data == .ELFDATA2MSB\n #endif\n }\n}\n\nextension Elf32_Ehdr : Elf_Ehdr {\n}\n\nextension Elf64_Ehdr : Elf_Ehdr {\n}\n\nprotocol Elf_Shdr : ByteSwappable {\n associatedtype Flags: FixedWidthInteger\n associatedtype Address: FixedWidthInteger\n associatedtype Offset: FixedWidthInteger\n associatedtype Size: FixedWidthInteger\n\n init()\n\n var sh_name: Elf_Word { get set }\n var sh_type: Elf_Shdr_Type { get set }\n var sh_flags: Flags { get set }\n var sh_addr: Address { get set }\n var sh_offset: Offset { get set }\n var sh_size: Size { get set }\n var sh_link: Elf_Word { get set }\n var sh_info: Elf_Word { get set }\n var sh_addralign: Size { get set }\n var sh_entsize: Size { get set }\n}\n\nextension Elf32_Shdr : Elf_Shdr {\n}\n\nextension Elf64_Shdr : Elf_Shdr {\n}\n\nprotocol Elf_Phdr : ByteSwappable {\n associatedtype Address: FixedWidthInteger\n associatedtype Offset: FixedWidthInteger\n associatedtype Size: FixedWidthInteger\n\n init()\n\n var p_type: Elf_Phdr_Type { get set }\n var p_flags: Elf_Phdr_Flags { get set }\n var p_offset: Offset { get set }\n var p_vaddr: Address { get set }\n var p_paddr: Address { get set }\n var p_filesz: Size { get set }\n var p_memsz: Size { get set }\n var p_align: Size { get set }\n}\n\nextension Elf32_Phdr : Elf_Phdr {\n}\n\nextension Elf64_Phdr : Elf_Phdr {\n}\n\nprotocol Elf_Nhdr : ByteSwappable {\n init()\n\n var n_namesz: Elf_Word { get set }\n var n_descsz: Elf_Word { get set }\n var n_type: Elf_Word { get set }\n}\n\nextension Elf32_Nhdr : Elf_Nhdr {\n}\n\nextension Elf64_Nhdr : Elf_Nhdr {\n}\n\nprotocol Elf_Sym {\n associatedtype Address: FixedWidthInteger\n associatedtype Size: FixedWidthInteger\n\n var st_name: Elf_Word { get set }\n var st_value: Address { get set }\n var st_size: Size { get set }\n var st_info: Elf_Byte { get set }\n var st_other: Elf_Byte { get set }\n var st_shndx: Elf_Half { get set }\n\n var st_binding: Elf_Sym_Binding { get set }\n var st_type: Elf_Sym_Type { get set }\n var st_visibility: Elf_Sym_Visibility { get set }\n}\n\nextension Elf32_Sym: Elf_Sym {\n var st_binding: Elf_Sym_Binding {\n get {\n return ELF32_ST_BIND(st_info)\n }\n set {\n st_info = ELF32_ST_INFO(newValue, ELF32_ST_TYPE(st_info))\n }\n }\n\n var st_type: Elf_Sym_Type {\n get {\n return ELF32_ST_TYPE(st_info)\n }\n set {\n st_info = ELF32_ST_INFO(ELF32_ST_BIND(st_info), newValue)\n }\n }\n\n var st_visibility: Elf_Sym_Visibility {\n get {\n return ELF32_ST_VISIBILITY(st_other)\n }\n set {\n st_other = (st_other & ~3) | newValue.rawValue\n }\n }\n}\n\nextension Elf64_Sym: Elf_Sym {\n var st_binding: Elf_Sym_Binding {\n get {\n return ELF64_ST_BIND(st_info)\n }\n set {\n st_info = ELF64_ST_INFO(newValue, ELF64_ST_TYPE(st_info))\n }\n }\n\n var st_type: Elf_Sym_Type {\n get {\n return ELF64_ST_TYPE(st_info)\n }\n set {\n st_info = ELF64_ST_INFO(ELF64_ST_BIND(st_info), newValue)\n }\n }\n\n var st_visibility: Elf_Sym_Visibility {\n get {\n return ELF64_ST_VISIBILITY(st_other)\n }\n set {\n st_other = (st_other & ~3) | newValue.rawValue\n }\n }\n}\n\nextension Elf32_Rel {\n var r_sym: Elf32_Byte {\n get {\n return ELF32_R_SYM(r_info)\n }\n set {\n r_info = ELF32_R_INFO(newValue, ELF32_R_TYPE(r_info))\n }\n }\n\n var r_type: Elf32_Byte {\n get {\n return ELF32_R_TYPE(r_info)\n }\n set {\n r_info = ELF32_R_INFO(ELF32_R_SYM(r_info), newValue)\n }\n }\n}\n\nextension Elf32_Rela {\n var r_sym: Elf32_Byte {\n get {\n return ELF32_R_SYM(r_info)\n }\n set {\n r_info = ELF32_R_INFO(newValue, ELF32_R_TYPE(r_info))\n }\n }\n\n var r_type: Elf32_Byte {\n get {\n return ELF32_R_TYPE(r_info)\n }\n set {\n r_info = ELF32_R_INFO(ELF32_R_SYM(r_info), newValue)\n }\n }\n}\n\nextension Elf64_Rel {\n var r_sym: Elf64_Word {\n get {\n return ELF64_R_SYM(r_info)\n }\n set {\n r_info = ELF64_R_INFO(newValue, ELF64_R_TYPE(r_info))\n }\n }\n\n var r_type: Elf64_Word {\n get {\n return ELF64_R_TYPE(r_info)\n }\n set {\n r_info = ELF64_R_INFO(ELF64_R_SYM(r_info), newValue)\n }\n }\n}\n\nextension Elf64_Rela {\n var r_sym: Elf64_Word {\n get {\n return ELF64_R_SYM(r_info)\n }\n set {\n r_info = ELF64_R_INFO(newValue, ELF64_R_TYPE(r_info))\n }\n }\n\n var r_type: Elf64_Word {\n get {\n return ELF64_R_TYPE(r_info)\n }\n set {\n r_info = ELF64_R_INFO(ELF64_R_SYM(r_info), newValue)\n }\n }\n}\n\n// .. Traits ...................................................................\n\nprotocol ElfTraits {\n associatedtype Address: FixedWidthInteger\n associatedtype Offset: FixedWidthInteger\n associatedtype Size: FixedWidthInteger\n\n associatedtype Ehdr: Elf_Ehdr where Ehdr.Address == Address,\n Ehdr.Offset == Offset\n associatedtype Phdr: Elf_Phdr where Phdr.Address == Address,\n Phdr.Offset == Offset,\n Phdr.Size == Size\n associatedtype Shdr: Elf_Shdr where Shdr.Address == Address,\n Shdr.Offset == Offset,\n Shdr.Size == Size\n associatedtype Nhdr: Elf_Nhdr\n associatedtype Chdr: Elf_Chdr where Chdr.Size == Size\n associatedtype Sym: Elf_Sym where Sym.Address == Address, Sym.Size == Size\n\n static var elfClass: Elf_Ehdr_Class { get }\n}\n\nstruct Elf32Traits: ElfTraits {\n typealias Address = UInt32\n typealias Offset = UInt32\n typealias Size = UInt32\n\n typealias Ehdr = Elf32_Ehdr\n typealias Phdr = Elf32_Phdr\n typealias Shdr = Elf32_Shdr\n typealias Nhdr = Elf32_Nhdr\n typealias Chdr = Elf32_Chdr\n typealias Sym = Elf32_Sym\n\n static let elfClass: Elf_Ehdr_Class = .ELFCLASS32\n}\n\nstruct Elf64Traits: ElfTraits {\n typealias Address = UInt64\n typealias Offset = UInt64\n typealias Size = UInt64\n\n typealias Ehdr = Elf64_Ehdr\n typealias Phdr = Elf64_Phdr\n typealias Shdr = Elf64_Shdr\n typealias Nhdr = Elf64_Nhdr\n typealias Chdr = Elf64_Chdr\n typealias Sym = Elf64_Sym\n\n static let elfClass: Elf_Ehdr_Class = .ELFCLASS64\n}\n\n// .. ElfStringSection .........................................................\n\nstruct ElfStringSection {\n let source: ImageSource\n\n func getStringAt(index: Int) -> String? {\n if index < 0 || index >= source.bytes.count {\n return nil\n }\n\n let slice = UnsafeRawBufferPointer(rebasing: source.bytes[index...])\n var len: Int = 0\n len = strnlen(slice.baseAddress!, slice.count)\n return String(decoding: source.bytes[index..<index+len], as: UTF8.self)\n }\n}\n\n// .. ElfImage .................................................................\n\nenum ElfImageError: Error {\n case notAnElfImage\n case wrongClass\n case badNoteName\n case badStringTableSectionIndex\n}\n\nprotocol ElfSymbolProtocol: Equatable {\n associatedtype Address: FixedWidthInteger\n associatedtype Size: FixedWidthInteger\n\n var name: String { get set }\n var value: Address { get set }\n var size: Size { get set }\n var sectionIndex: Int { get set }\n var binding: Elf_Sym_Binding { get set }\n var type: Elf_Sym_Type { get set }\n var visibility: Elf_Sym_Visibility { get set }\n}\n\nprotocol ElfSymbolTableProtocol {\n associatedtype Traits: ElfTraits\n associatedtype Symbol: ElfSymbolProtocol where Symbol.Address == Traits.Address,\n Symbol.Size == Traits.Size\n\n func lookupSymbol(address: Traits.Address) -> Symbol?\n}\n\nprotocol ElfSymbolLookupProtocol {\n associatedtype Traits: ElfTraits\n typealias CallSiteInfo = DwarfReader<ElfImage<Traits>>.CallSiteInfo\n typealias SourceLocation = SymbolicatedBacktrace.SourceLocation\n\n func lookupSymbol(address: Traits.Address) -> ImageSymbol?\n func inlineCallSites(at address: Traits.Address) -> ArraySlice<CallSiteInfo>\n func sourceLocation(for address: Traits.Address) throws -> SourceLocation?\n}\n\nstruct ElfSymbolTable<SomeElfTraits: ElfTraits>: ElfSymbolTableProtocol {\n typealias Traits = SomeElfTraits\n\n struct Symbol: ElfSymbolProtocol {\n typealias Address = Traits.Address\n typealias Size = Traits.Size\n\n var name: String\n var value: Address\n var size: Size\n var sectionIndex: Int\n var binding: Elf_Sym_Binding\n var type: Elf_Sym_Type\n var visibility: Elf_Sym_Visibility\n }\n\n private var _symbols: [Symbol] = []\n\n init() {}\n\n @_specialize(kind: full, where SomeElfTraits == Elf32Traits)\n @_specialize(kind: full, where SomeElfTraits == Elf64Traits)\n init?(image: ElfImage<Traits>) {\n guard let strtab = image.getSection(".strtab", debug: false),\n let symtab = image.getSection(".symtab", debug: false) else {\n return nil\n }\n\n let stringSect = ElfStringSection(source: strtab)\n\n // Extract all the data\n symtab.bytes.withMemoryRebound(to: Traits.Sym.self) { symbols in\n for symbol in symbols {\n // Ignore things that are not functions\n if symbol.st_type != .STT_FUNC {\n continue\n }\n\n // Ignore anything undefined\n if symbol.st_shndx == SHN_UNDEF {\n continue\n }\n\n _symbols.append(\n Symbol(\n name: (stringSect.getStringAt(index: Int(symbol.st_name))\n ?? "<unknown>"),\n value: symbol.st_value,\n size: symbol.st_size,\n sectionIndex: Int(symbol.st_shndx),\n binding: symbol.st_binding,\n type: symbol.st_type,\n visibility: symbol.st_visibility\n )\n )\n }\n }\n\n // Now sort by address\n _symbols.sort(by: {\n $0.value < $1.value || (\n $0.value == $1.value && $0.size < $1.size\n )\n })\n }\n\n private init(sortedSymbols: [Symbol]) {\n _symbols = sortedSymbols\n }\n\n @_specialize(kind: full, where SomeElfTraits == Elf32Traits)\n @_specialize(kind: full, where SomeElfTraits == Elf64Traits)\n public func merged(with other: ElfSymbolTable<Traits>) -> ElfSymbolTable<Traits> {\n var merged: [Symbol] = []\n\n var ourNdx = 0, theirNdx = 0\n\n while ourNdx < _symbols.count && theirNdx < other._symbols.count {\n let ourSym = _symbols[ourNdx]\n let theirSym = other._symbols[theirNdx]\n\n if ourSym.value < theirSym.value {\n merged.append(ourSym)\n ourNdx += 1\n } else if ourSym.value > theirSym.value {\n merged.append(theirSym)\n theirNdx += 1\n } else if ourSym == theirSym {\n merged.append(ourSym)\n ourNdx += 1\n theirNdx += 1\n } else {\n if ourSym.size <= theirSym.size {\n merged.append(ourSym)\n }\n merged.append(theirSym)\n if ourSym.size > theirSym.size {\n merged.append(theirSym)\n }\n ourNdx += 1\n theirNdx += 1\n }\n }\n\n if ourNdx < _symbols.count {\n merged.append(contentsOf:_symbols[ourNdx...])\n }\n if theirNdx < other._symbols.count {\n merged.append(contentsOf:other._symbols[theirNdx...])\n }\n\n return ElfSymbolTable(sortedSymbols: merged)\n }\n\n @_specialize(kind: full, where SomeElfTraits == Elf32Traits)\n @_specialize(kind: full, where SomeElfTraits == Elf64Traits)\n public func lookupSymbol(address: Traits.Address) -> Symbol? {\n var min = 0\n var max = _symbols.count\n\n while min < max {\n let mid = min + (max - min) / 2\n let symbol = _symbols[mid]\n let nextValue: Traits.Address\n if mid == _symbols.count - 1 {\n nextValue = ~Traits.Address(0)\n } else {\n nextValue = _symbols[mid + 1].value\n }\n\n if symbol.value <= address && nextValue > address {\n var ndx = mid\n while ndx > 0 && _symbols[ndx - 1].value == address {\n ndx -= 1\n }\n return _symbols[ndx]\n } else if symbol.value <= address {\n min = mid + 1\n } else if symbol.value > address {\n max = mid\n }\n }\n\n return nil\n }\n}\n\nfinal class ElfImage<SomeElfTraits: ElfTraits>\n : DwarfSource, ElfSymbolLookupProtocol {\n typealias Traits = SomeElfTraits\n typealias SymbolTable = ElfSymbolTable<SomeElfTraits>\n\n // This is arbitrary and it isn't in the spec\n let maxNoteNameLength = 256\n\n var baseAddress: ImageSource.Address\n var endAddress: ImageSource.Address\n\n var source: ImageSource\n var header: Traits.Ehdr\n var programHeaders: [Traits.Phdr]\n var sectionHeaders: [Traits.Shdr]?\n var shouldByteSwap: Bool { return header.shouldByteSwap }\n\n @_specialize(kind: full, where SomeElfTraits == Elf32Traits)\n @_specialize(kind: full, where SomeElfTraits == Elf64Traits)\n required init(source: ImageSource,\n baseAddress: ImageSource.Address = 0,\n endAddress: ImageSource.Address = 0) throws {\n self.source = source\n self.baseAddress = baseAddress\n self.endAddress = endAddress\n\n header = try source.fetch(from: 0, as: Traits.Ehdr.self)\n if header.ei_magic != ElfMagic {\n throw ElfImageError.notAnElfImage\n }\n\n if header.ei_class != Traits.elfClass {\n throw ElfImageError.wrongClass\n }\n\n if header.shouldByteSwap {\n header = header.byteSwapped\n }\n\n let byteSwap = header.shouldByteSwap\n func maybeSwap<T: ByteSwappable>(_ x: T) -> T {\n if byteSwap {\n return x.byteSwapped\n }\n return x\n }\n\n var phdrs: [Traits.Phdr] = []\n var phAddr = ImageSource.Address(header.e_phoff)\n for _ in 0..<header.e_phnum {\n let phdr = maybeSwap(try source.fetch(from: phAddr, as: Traits.Phdr.self))\n phdrs.append(phdr)\n phAddr += ImageSource.Address(header.e_phentsize)\n }\n programHeaders = phdrs\n\n if source.isMappedImage {\n sectionHeaders = nil\n } else {\n var shdrs: [Traits.Shdr] = []\n var shAddr = ImageSource.Address(header.e_shoff)\n for _ in 0..<header.e_shnum {\n let shdr = maybeSwap(try source.fetch(from: shAddr, as: Traits.Shdr.self))\n shdrs.append(shdr)\n shAddr += ImageSource.Address(header.e_shentsize)\n }\n sectionHeaders = shdrs\n }\n\n if header.e_shstrndx >= header.e_shnum {\n throw ElfImageError.badStringTableSectionIndex\n }\n }\n\n struct Note {\n let name: String\n let type: UInt32\n let desc: [UInt8]\n }\n\n struct Notes: Sequence {\n var image: ElfImage<Traits>\n\n struct NoteIterator: IteratorProtocol {\n var image: ElfImage<Traits>\n\n var hdrNdx = -1\n var noteAddr = ImageSource.Address()\n var noteEnd = ImageSource.Address()\n\n @_specialize(kind: full, where SomeElfTraits == Elf32Traits)\n @_specialize(kind: full, where SomeElfTraits == Elf64Traits)\n init(image: ElfImage<Traits>) {\n self.image = image\n }\n\n mutating func startHeader() {\n let ph = image.programHeaders[hdrNdx]\n\n if image.source.isMappedImage {\n noteAddr = ImageSource.Address(ph.p_vaddr)\n noteEnd = noteAddr + ImageSource.Address(ph.p_memsz)\n } else {\n noteAddr = ImageSource.Address(ph.p_offset)\n noteEnd = noteAddr + ImageSource.Address(ph.p_filesz)\n }\n }\n\n @_specialize(kind: full, where SomeElfTraits == Elf32Traits)\n @_specialize(kind: full, where SomeElfTraits == Elf64Traits)\n mutating func next() -> Note? {\n let byteSwap = image.shouldByteSwap\n func maybeSwap<T: ByteSwappable>(_ x: T) -> T {\n if byteSwap {\n return x.byteSwapped\n }\n return x\n }\n\n if hdrNdx >= image.programHeaders.count {\n return nil\n }\n while true {\n while noteAddr >= noteEnd {\n repeat {\n hdrNdx += 1\n if hdrNdx >= image.programHeaders.count {\n return nil\n }\n } while image.programHeaders[hdrNdx].p_type != .PT_NOTE\n startHeader()\n }\n\n do {\n let nhdr = maybeSwap(try image.source.fetch(from: noteAddr,\n as: Traits.Nhdr.self))\n\n noteAddr += ImageSource.Address(MemoryLayout<Traits.Nhdr>.size)\n\n if noteEnd - noteAddr < nhdr.n_namesz {\n // The segment is probably corrupted\n noteAddr = noteEnd\n continue\n }\n\n let nameLen = nhdr.n_namesz > 0 ? nhdr.n_namesz - 1 : 0\n guard let name = try image.source.fetchString(from: noteAddr,\n length: Int(nameLen))\n else {\n // Bad note name\n noteAddr = noteEnd\n continue\n }\n\n noteAddr += ImageSource.Address(nhdr.n_namesz)\n if (noteAddr & 3) != 0 {\n noteAddr += 4 - (noteAddr & 3)\n }\n\n if noteEnd - noteAddr < nhdr.n_descsz {\n // The segment is probably corrupted\n noteAddr = noteEnd\n continue\n }\n\n let desc = try image.source.fetch(from: noteAddr,\n count: Int(nhdr.n_descsz),\n as: UInt8.self)\n\n noteAddr += ImageSource.Address(nhdr.n_descsz)\n if (noteAddr & 3) != 0 {\n noteAddr += 4 - (noteAddr & 3)\n }\n\n return Note(name: name, type: UInt32(nhdr.n_type), desc: desc)\n } catch {\n hdrNdx = image.programHeaders.count\n return nil\n }\n }\n }\n }\n\n func makeIterator() -> NoteIterator {\n return NoteIterator(image: image)\n }\n }\n\n var notes: Notes {\n return Notes(image: self)\n }\n\n private var _uuid: [UInt8]?\n var uuid: [UInt8]? {\n if let uuid = _uuid {\n return uuid\n }\n\n for note in notes {\n if note.name == "GNU" && note.type == NT_GNU_BUILD_ID {\n _uuid = note.desc\n return _uuid\n }\n }\n\n return nil\n }\n\n private var _debugLinkCRC: UInt32?\n var debugLinkCRC: UInt32 {\n if let crc = _debugLinkCRC {\n return crc\n }\n\n let crc = updateCrc(0, source.bytes)\n _debugLinkCRC = crc\n return crc\n }\n\n struct Range {\n var base: ImageSource.Address\n var size: ImageSource.Size\n }\n\n struct EHFrameInfo {\n var ehFrameSection: Range?\n var ehFrameHdrSection: Range?\n }\n\n private var _ehFrameInfo: EHFrameInfo?\n var ehFrameInfo: EHFrameInfo? {\n if let ehFrameInfo = _ehFrameInfo {\n return ehFrameInfo\n }\n\n var ehFrameInfo = EHFrameInfo()\n\n for phdr in programHeaders {\n if phdr.p_type == .PT_GNU_EH_FRAME {\n var ehFrameHdrRange: Range\n if source.isMappedImage {\n ehFrameHdrRange = Range(base: ImageSource.Address(phdr.p_vaddr),\n size: ImageSource.Size(phdr.p_memsz))\n } else {\n ehFrameHdrRange = Range(base: ImageSource.Address(phdr.p_offset),\n size: ImageSource.Size(phdr.p_filesz))\n }\n\n if (ehFrameHdrRange.size < MemoryLayout<EHFrameHdr>.size) {\n continue\n }\n\n guard let ehdr = try? source.fetch(\n from: ImageSource.Address(ehFrameHdrRange.base),\n as: EHFrameHdr.self\n ) else {\n continue\n }\n\n if ehdr.version != 1 {\n continue\n }\n\n let pc = ehFrameHdrRange.base + ImageSource.Address(MemoryLayout<EHFrameHdr>.size)\n guard let (_, eh_frame_ptr) =\n try? source.fetchEHValue(from: ImageSource.Address(pc),\n with: ehdr.eh_frame_ptr_enc,\n pc: ImageSource.Address(pc)) else {\n continue\n }\n\n ehFrameInfo.ehFrameHdrSection = ehFrameHdrRange\n\n // The .eh_frame_hdr section doesn't specify the size of the\n // .eh_frame section, so we just rely on it being properly\n // terminated. This does mean that bulk fetching the entire\n // thing isn't a good idea.\n ehFrameInfo.ehFrameSection = Range(base: ImageSource.Address(eh_frame_ptr),\n size: ~ImageSource.Size(0))\n }\n }\n\n if let sectionHeaders = sectionHeaders {\n let stringShdr = sectionHeaders[Int(header.e_shstrndx)]\n let base = ImageSource.Address(stringShdr.sh_offset)\n let end = base + ImageSource.Size(stringShdr.sh_size)\n let stringSource = source[base..<end]\n let stringSect = ElfStringSection(source: stringSource)\n\n for shdr in sectionHeaders {\n guard let name = stringSect.getStringAt(index: Int(shdr.sh_name)) else {\n continue\n }\n\n if name == ".eh_frame" {\n ehFrameInfo.ehFrameSection = Range(base: ImageSource.Address(shdr.sh_offset),\n size: ImageSource.Size(shdr.sh_size))\n }\n }\n }\n\n return ehFrameInfo\n }\n\n // Image name\n private var _imageName: String?\n var imageName: String {\n if let imageName = _imageName {\n return imageName\n }\n\n let name: String\n if let path = source.path {\n name = path\n } else if let uuid = uuid {\n name = "image \(hex(uuid))"\n } else {\n name = "<unknown image>"\n }\n\n _imageName = name\n return name\n }\n\n // If we have external debug information, this points at it\n private var _checkedDebugImage: Bool?\n private var _debugImage: ElfImage<Traits>?\n var debugImage: ElfImage<Traits>? {\n if let checked = _checkedDebugImage, checked {\n return _debugImage\n }\n\n let tryPath = { [self] (_ path: String) -> ElfImage<Traits>? in\n do {\n let fileSource = try ImageSource(path: path)\n let image = try ElfImage<Traits>(source: fileSource)\n _debugImage = image\n return image\n } catch {\n return nil\n }\n }\n\n if let uuid = uuid {\n let uuidString = hex(uuid)\n let uuidSuffix = uuidString.dropFirst(2)\n let uuidPrefix = uuidString.prefix(2)\n let path = "/usr/lib/debug/.build-id/\(uuidPrefix)/\(uuidSuffix).debug"\n if let image = tryPath(path) {\n _debugImage = image\n _checkedDebugImage = true\n return image\n }\n }\n\n if let imagePath = source.path, let realImagePath = realPath(imagePath) {\n let imageDir = dirname(realImagePath)\n let debugLink = getDebugLink()\n let debugAltLink = getDebugAltLink()\n\n let tryLink = { (_ link: String) -> ElfImage<Traits>? in\n if let image = tryPath("\(imageDir)/\(link)") {\n return image\n }\n if let image = tryPath("\(imageDir)/.debug/\(link)") {\n return image\n }\n if let image = tryPath("/usr/lib/debug/\(imageDir)/\(link)") {\n return image\n }\n return nil\n }\n\n if let debugAltLink = debugAltLink, let image = tryLink(debugAltLink.link),\n image.uuid == debugAltLink.uuid {\n _debugImage = image\n _checkedDebugImage = true\n return image\n }\n\n if let debugLink = debugLink, let image = tryLink(debugLink.link),\n image.debugLinkCRC == debugLink.crc {\n _debugImage = image\n _checkedDebugImage = true\n return image\n }\n }\n\n if let debugData = getSection(".gnu_debugdata") {\n do {\n let source = try ImageSource(lzmaCompressedImageSource: debugData)\n _debugImage = try ElfImage<Traits>(source: source)\n _checkedDebugImage = true\n return _debugImage\n } catch let CompressedImageSourceError.libraryNotFound(library) {\n swift_reportWarning(0,\n """\n swift-runtime: warning: \(library) not found, \\n unable to decode the .gnu_debugdata section in \\n \(imageName)\n """)\n } catch {\n }\n }\n\n _checkedDebugImage = true\n return nil\n }\n\n /// Find the named section and return an ImageSource pointing at it.\n ///\n /// In general, the section may be compressed or even in a different image;\n /// this is particularly the case for debug sections. We will only attempt\n /// to look for other images if `debug` is `true`.\n @_specialize(kind: full, where SomeElfTraits == Elf32Traits)\n @_specialize(kind: full, where SomeElfTraits == Elf64Traits)\n func getSection(_ name: String, debug: Bool = false) -> ImageSource? {\n if let sectionHeaders = sectionHeaders {\n let zname = ".z" + name.dropFirst()\n let stringShdr = sectionHeaders[Int(header.e_shstrndx)]\n do {\n let base = ImageSource.Address(stringShdr.sh_offset)\n let end = base + ImageSource.Size(stringShdr.sh_size)\n let stringSource = source[base..<end]\n let stringSect = ElfStringSection(source: stringSource)\n\n for shdr in sectionHeaders {\n guard let sname\n = stringSect.getStringAt(index: Int(shdr.sh_name)) else {\n continue\n }\n\n if name == sname {\n let base = ImageSource.Address(shdr.sh_offset)\n let end = base + ImageSource.Size(shdr.sh_size)\n let subSource = source[base..<end]\n\n if (shdr.sh_flags & Traits.Shdr.Flags(SHF_COMPRESSED)) != 0 {\n return try ImageSource(elfCompressedImageSource: subSource,\n traits: Traits.self)\n } else {\n return subSource\n }\n }\n\n if zname == sname {\n let base = ImageSource.Address(shdr.sh_offset)\n let end = base + ImageSource.Size(shdr.sh_size)\n let subSource = source[base..<end]\n\n return try ImageSource(gnuCompressedImageSource: subSource)\n }\n }\n } catch let CompressedImageSourceError.libraryNotFound(library) {\n swift_reportWarning(0,\n """\n swift-runtime: warning: \(library) not found, \\n unable to decode the \(name) section in \\n \(imageName)\n """)\n } catch {\n }\n }\n\n if debug, let image = debugImage {\n return image.getSection(name)\n }\n\n return nil\n }\n\n struct DebugLinkInfo {\n var link: String\n var crc: UInt32\n }\n\n struct DebugAltLinkInfo {\n var link: String\n var uuid: [UInt8]\n }\n\n /// Get and decode a .gnu_debuglink section\n func getDebugLink() -> DebugLinkInfo? {\n guard let section = getSection(".gnu_debuglink") else {\n return nil\n }\n\n guard let link = try? section.fetchString(from: 0) else {\n return nil\n }\n\n let nullIndex = ImageSource.Address(link.utf8.count)\n let crcIndex = (nullIndex + 4) & ~3\n\n guard let unswappedCrc = try? section.fetch(\n from: crcIndex, as: UInt32.self\n ) else {\n return nil\n }\n\n let crc: UInt32\n if shouldByteSwap {\n crc = unswappedCrc.byteSwapped\n } else {\n crc = unswappedCrc\n }\n\n return DebugLinkInfo(link: link, crc: crc)\n }\n\n /// Get and decode a .gnu_debugaltlink section\n func getDebugAltLink() -> DebugAltLinkInfo? {\n guard let section = getSection(".gnu_debugaltlink") else {\n return nil\n }\n\n guard let link = try? section.fetchString(from: 0) else {\n return nil\n }\n\n let nullIndex = link.utf8.count\n\n let uuid = [UInt8](section.bytes[(nullIndex + 1)...])\n\n return DebugAltLinkInfo(link: link, uuid: uuid)\n }\n\n /// Find the named section and read a string out of it.\n func getSectionAsString(_ name: String) -> String? {\n guard let sectionSource = getSection(name) else {\n return nil\n }\n\n return String(decoding: sectionSource.bytes, as: UTF8.self)\n }\n\n struct ElfSymbol {\n var name: String\n var value: Traits.Address\n var size: Traits.Size\n var sectionIndex: Int\n var binding: Elf_Sym_Binding\n var type: Elf_Sym_Type\n var visibility: Elf_Sym_Visibility\n }\n\n var _symbolTable: SymbolTable? = nil\n var symbolTable: SymbolTable { return _getSymbolTable(debug: false) }\n\n func _getSymbolTable(debug: Bool) -> SymbolTable {\n if let table = _symbolTable {\n return table\n }\n\n let debugTable: SymbolTable?\n if !debug, let debugImage = debugImage {\n debugTable = debugImage._getSymbolTable(debug: true)\n as any ElfSymbolTableProtocol\n as? SymbolTable\n } else {\n debugTable = nil\n }\n\n guard let localTable = SymbolTable(image: self) else {\n // If we have no symbol table, try the debug image\n let table = debugTable ?? SymbolTable()\n _symbolTable = table\n return table\n }\n\n // Check if we have a debug image; if we do, get its symbol table and\n // merge it with this one. This means that it doesn't matter which\n // symbols have been stripped in both images.\n if let debugTable = debugTable {\n let merged = localTable.merged(with: debugTable)\n _symbolTable = merged\n return merged\n }\n\n _symbolTable = localTable\n return localTable\n }\n\n public func lookupSymbol(address: Traits.Address) -> ImageSymbol? {\n let relativeAddress = address - Traits.Address(baseAddress)\n guard let symbol = symbolTable.lookupSymbol(address: relativeAddress) else {\n return nil\n }\n\n return ImageSymbol(name: symbol.name,\n offset: Int(relativeAddress - symbol.value))\n }\n\n func getDwarfSection(_ section: DwarfSection) -> ImageSource? {\n switch section {\n case .debugAbbrev: return getSection(".debug_abbrev")\n case .debugAddr: return getSection(".debug_addr")\n case .debugARanges: return getSection(".debug_aranges")\n case .debugFrame: return getSection(".debug_frame")\n case .debugInfo: return getSection(".debug_info")\n case .debugLine: return getSection(".debug_line")\n case .debugLineStr: return getSection(".debug_line_str")\n case .debugLoc: return getSection(".debug_loc")\n case .debugLocLists: return getSection(".debug_loclists")\n case .debugMacInfo: return getSection(".debug_macinfo")\n case .debugMacro: return getSection(".debug_macro")\n case .debugNames: return getSection(".debug_names")\n case .debugPubNames: return getSection(".debug_pubnames")\n case .debugPubTypes: return getSection(".debug_pubtypes")\n case .debugRanges: return getSection(".debug_ranges")\n case .debugRngLists: return getSection(".debug_rnglists")\n case .debugStr: return getSection(".debug_str")\n case .debugStrOffsets: return getSection(".debug_str_offsets")\n case .debugSup: return getSection(".debug_sup")\n case .debugTypes: return getSection(".debug_types")\n case .debugCuIndex: return getSection(".debug_cu_index")\n case .debugTuIndex: return getSection(".debug_tu_index")\n }\n }\n\n private lazy var dwarfReader = { [unowned self] in\n try? DwarfReader(source: self, shouldSwap: header.shouldByteSwap)\n }()\n\n typealias CallSiteInfo = DwarfReader<ElfImage>.CallSiteInfo\n\n func inlineCallSites(\n at address: Traits.Address\n ) -> ArraySlice<CallSiteInfo> {\n guard let callSiteInfo = dwarfReader?.inlineCallSites else {\n return [][0..<0]\n }\n\n var min = 0\n var max = callSiteInfo.count\n\n while min < max {\n let mid = min + (max - min) / 2\n let callSite = callSiteInfo[mid]\n\n if callSite.lowPC <= address && callSite.highPC > address {\n var first = mid, last = mid\n while first > 0\n && callSiteInfo[first - 1].lowPC <= address\n && callSiteInfo[first - 1].highPC > address {\n first -= 1\n }\n while last < callSiteInfo.count - 1\n && callSiteInfo[last + 1].lowPC <= address\n && callSiteInfo[last + 1].highPC > address {\n last += 1\n }\n\n return callSiteInfo[first...last]\n } else if callSite.highPC <= address {\n min = mid + 1\n } else if callSite.lowPC > address {\n max = mid\n }\n }\n\n return []\n }\n\n typealias SourceLocation = SymbolicatedBacktrace.SourceLocation\n\n func sourceLocation(\n for address: Traits.Address\n ) throws -> SourceLocation? {\n var result: SourceLocation? = nil\n var prevState: DwarfLineNumberState? = nil\n guard let dwarfReader = dwarfReader else {\n return nil\n }\n for ndx in 0..<dwarfReader.lineNumberInfo.count {\n var info = dwarfReader.lineNumberInfo[ndx]\n try info.executeProgram { (state, done) in\n if let oldState = prevState,\n address >= oldState.address && address < state.address {\n result = SourceLocation(\n path: oldState.path,\n line: oldState.line,\n column: oldState.column\n )\n done = true\n }\n\n if state.endSequence {\n prevState = nil\n } else {\n prevState = state\n }\n }\n }\n\n return result\n }\n}\n\ntypealias Elf32Image = ElfImage<Elf32Traits>\ntypealias Elf64Image = ElfImage<Elf64Traits>\n\n// .. Checking for ELF images ..................................................\n\n/// Test if there is a valid ELF image at the specified address; if there is,\n/// extract the address range for the text segment and the UUID, if any.\n@_specialize(kind: full, where R == UnsafeLocalMemoryReader)\n#if os(macOS) || os(Linux)\n@_specialize(kind: full, where R == RemoteMemoryReader)\n#endif\n#if os(Linux)\n@_specialize(kind: full, where R == MemserverMemoryReader)\n#endif\nfunc getElfImageInfo<R: MemoryReader>(at address: R.Address,\n using reader: R)\n -> (endOfText: R.Address, uuid: [UInt8]?)?\n{\n do {\n // Check the magic number first\n let magic = try reader.fetch(from: address, as: Elf_Magic.self)\n\n if magic != ElfMagic {\n return nil\n }\n\n // Read the class from the next byte\n let elfClass = Elf_Ehdr_Class(rawValue: try reader.fetch(from: address + 4,\n as: UInt8.self))\n\n if elfClass == .ELFCLASS32 {\n return try getElfImageInfo(at: address, using: reader,\n traits: Elf32Traits.self)\n } else if elfClass == .ELFCLASS64 {\n return try getElfImageInfo(at: address, using: reader,\n traits: Elf64Traits.self)\n } else {\n return nil\n }\n } catch {\n return nil\n }\n}\n\n@_specialize(kind: full, where R == UnsafeLocalMemoryReader, Traits == Elf32Traits)\n@_specialize(kind: full, where R == UnsafeLocalMemoryReader, Traits == Elf64Traits)\n#if os(macOS) || os(Linux)\n@_specialize(kind: full, where R == RemoteMemoryReader, Traits == Elf32Traits)\n@_specialize(kind: full, where R == RemoteMemoryReader, Traits == Elf64Traits)\n#endif\n#if os(Linux)\n@_specialize(kind: full, where R == MemserverMemoryReader, Traits == Elf32Traits)\n@_specialize(kind: full, where R == MemserverMemoryReader, Traits == Elf64Traits)\n#endif\nfunc getElfImageInfo<R: MemoryReader, Traits: ElfTraits>(\n at address: R.Address,\n using reader: R,\n traits: Traits.Type\n) throws -> (endOfText: R.Address, uuid: [UInt8]?)? {\n // Grab the whole 32-bit header\n let unswappedHeader = try reader.fetch(from: address, as: Traits.Ehdr.self)\n\n let header: Traits.Ehdr\n if unswappedHeader.shouldByteSwap {\n header = unswappedHeader.byteSwapped\n } else {\n header = unswappedHeader\n }\n\n let byteSwap = header.shouldByteSwap\n func maybeSwap<T: ByteSwappable>(_ x: T) -> T {\n if byteSwap {\n return x.byteSwapped\n }\n return x\n }\n\n var endOfText = address\n var uuid: [UInt8]? = nil\n\n // Find the last loadable executable segment, and scan for PT_NOTE\n // segments that contain the UUID\n var phAddr = ImageSource.Address(address) + ImageSource.Size(header.e_phoff)\n for _ in 0..<header.e_phnum {\n let phdr = maybeSwap(try reader.fetch(from: phAddr, as: Traits.Phdr.self))\n if phdr.p_type == .PT_LOAD && (phdr.p_flags & PF_X) != 0 {\n endOfText = max(endOfText, address + ImageSource.Address(phdr.p_vaddr)\n + ImageSource.Size(phdr.p_memsz))\n }\n if phdr.p_type == .PT_NOTE {\n var noteAddr = address + ImageSource.Address(phdr.p_vaddr)\n let noteEnd = noteAddr + ImageSource.Size(phdr.p_memsz)\n\n while noteAddr < noteEnd {\n let nhdr = maybeSwap(try reader.fetch(\n from: noteAddr, as: Traits.Nhdr.self))\n\n noteAddr += ImageSource.Size(MemoryLayout<Traits.Nhdr>.size)\n\n if noteEnd - noteAddr < nhdr.n_namesz {\n // This segment is probably corrupted, so skip it\n noteAddr = noteEnd\n continue\n }\n\n var isBuildId = false\n let nameLen = nhdr.n_namesz > 0 ? nhdr.n_namesz - 1 : 0\n\n // Test if this is a "GNU" NT_GNU_BUILD_ID note\n if nameLen == 3 {\n let byte0 = try reader.fetch(from: noteAddr, as: UInt8.self)\n let byte1 = try reader.fetch(from: noteAddr + 1, as: UInt8.self)\n let byte2 = try reader.fetch(from: noteAddr + 2, as: UInt8.self)\n\n if byte0 == 0x47 && byte1 == 0x4e && byte2 == 0x55 &&\n UInt32(nhdr.n_type) == NT_GNU_BUILD_ID {\n isBuildId = true\n }\n }\n\n noteAddr += ImageSource.Size(nhdr.n_namesz)\n if (noteAddr & 3) != 0 {\n noteAddr += 4 - (noteAddr & 3)\n }\n\n if noteEnd - noteAddr < nhdr.n_descsz {\n // Corrupted segment, skip\n noteAddr = noteEnd\n continue\n }\n\n if isBuildId {\n uuid = try reader.fetch(from: noteAddr,\n count: Int(nhdr.n_descsz),\n as: UInt8.self)\n }\n\n noteAddr += ImageSource.Size(nhdr.n_descsz)\n if (noteAddr & 3) != 0 {\n noteAddr += 4 - (noteAddr & 3)\n }\n }\n }\n\n phAddr += ImageSource.Address(header.e_phentsize)\n }\n\n return (endOfText: endOfText, uuid: uuid)\n}\n\n// .. Testing ..................................................................\n\n@_spi(ElfTest)\npublic func testElfImageAt(path: String) -> Bool {\n guard let source = try? ImageSource(path: path) else {\n print("\(path) was not accessible")\n return false\n }\n\n let debugSections: [String] = [\n ".debug_info",\n ".debug_line",\n ".debug_abbrev",\n ".debug_ranges",\n ".debug_str",\n ".debug_addr",\n ".debug_str_offsets",\n ".debug_line_str",\n ".debug_rnglists"\n ]\n\n if let elfImage = try? Elf32Image(source: source) {\n print("\(path) is a 32-bit ELF image")\n\n if let uuid = elfImage.uuid {\n print(" uuid: \(hex(uuid))")\n } else {\n print(" uuid: <no uuid>")\n }\n\n if let debugImage = elfImage.debugImage {\n print(" debug image: \(debugImage.imageName)")\n } else {\n print(" debug image: <none>")\n }\n\n for section in debugSections {\n if let _ = elfImage.getSection(section, debug: true) {\n print(" \(section): found")\n } else {\n print(" \(section): not found")\n }\n }\n\n return true\n } else if let elfImage = try? Elf64Image(source: source) {\n print("\(path) is a 64-bit ELF image")\n\n if let uuid = elfImage.uuid {\n print(" uuid: \(hex(uuid))")\n } else {\n print(" uuid: <no uuid>")\n }\n\n if let debugImage = elfImage.debugImage {\n print(" debug image: \(debugImage.imageName)")\n } else {\n print(" debug image: <none>")\n }\n\n for section in debugSections {\n if let _ = elfImage.getSection(section, debug: true) {\n print(" \(section): found")\n } else {\n print(" \(section): not found")\n }\n }\n\n return true\n } else {\n print("\(path) is not an ELF image")\n return false\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_Elf.swift
cpp_apple_swift_stdlib_public_RuntimeModule_Elf.swift
Swift
62,450
0.75
0.085319
0.046823
react-lib
862
2024-05-08T08:42:35.543705
BSD-3-Clause
false
a8e42296384113e97e10d3d266b5678e
//===--- ElfImageCache.swift - ELF support for Swift ----------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Provides a per-thread Elf image cache that improves efficiency when\n// taking multiple backtraces by avoiding loading ELF images multiple times.\n//\n//===----------------------------------------------------------------------===//\n\nimport Swift\n\n#if os(macOS) || os(iOS) || os(tvOS) || os(watchOS)\ninternal import Darwin\n#elseif os(Windows)\ninternal import ucrt\n#elseif canImport(Glibc)\ninternal import Glibc\n#elseif canImport(Musl)\ninternal import Musl\n#endif\n\n/// Provides a per-thread image cache for ELF image processing. This means\n/// if you take multiple backtraces from a thread, you won't load the same\n/// image multiple times.\nfinal class ElfImageCache {\n var elf32: [String: Elf32Image] = [:]\n var elf64: [String: Elf64Image] = [:]\n\n func purge() {\n elf32 = [:]\n elf64 = [:]\n }\n\n enum Result {\n case elf32Image(Elf32Image)\n case elf64Image(Elf64Image)\n }\n func lookup(path: String?) -> Result? {\n guard let path = path else {\n return nil\n }\n if let image = elf32[path] {\n return .elf32Image(image)\n }\n if let image = elf64[path] {\n return .elf64Image(image)\n }\n if let source = try? ImageSource(path: path) {\n if let elfImage = try? Elf32Image(source: source) {\n elf32[path] = elfImage\n return .elf32Image(elfImage)\n }\n if let elfImage = try? Elf64Image(source: source) {\n elf64[path] = elfImage\n return .elf64Image(elfImage)\n }\n }\n return nil\n }\n\n private static var key: pthread_key_t = {\n var theKey = pthread_key_t()\n let err = pthread_key_create(\n &theKey,\n { rawPtr in\n let ptr = Unmanaged<ElfImageCache>.fromOpaque(\n notMutable(notOptional(rawPtr))\n )\n ptr.release()\n }\n )\n if err != 0 {\n fatalError("Unable to create TSD key for ElfImageCache")\n }\n return theKey\n }()\n\n static var threadLocal: ElfImageCache {\n guard let rawPtr = pthread_getspecific(key) else {\n let cache = Unmanaged<ElfImageCache>.passRetained(ElfImageCache())\n pthread_setspecific(key, cache.toOpaque())\n return cache.takeUnretainedValue()\n }\n let cache = Unmanaged<ElfImageCache>.fromOpaque(rawPtr)\n return cache.takeUnretainedValue()\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_ElfImageCache.swift
cpp_apple_swift_stdlib_public_RuntimeModule_ElfImageCache.swift
Swift
2,757
0.95
0.178947
0.272727
node-utils
862
2025-03-11T17:29:11.951938
Apache-2.0
false
ef36840940db28a798ac79bc100a9d3a
//===--- FramePointerUnwinder.swift ---------------------------*- swift -*-===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Unwind the stack by chasing the frame pointer.\n//\n//===----------------------------------------------------------------------===//\n\nimport Swift\n\n@_spi(Unwinders)\npublic struct FramePointerUnwinder<C: Context, M: MemoryReader>: Sequence, IteratorProtocol {\n public typealias Context = C\n public typealias MemoryReader = M\n public typealias Address = Context.Address\n\n var pc: Address\n var fp: Address\n var asyncContext: Address\n var first: Bool\n var isAsync: Bool\n var done: Bool\n\n #if os(Linux)\n var images: ImageMap?\n #endif\n\n var reader: MemoryReader\n\n @_specialize(exported: true, kind: full, where C == HostContext, M == UnsafeLocalMemoryReader)\n #if os(macOS) || os(Linux)\n @_specialize(exported: true, kind: full, where C == HostContext, M == RemoteMemoryReader)\n #endif\n #if os(Linux)\n @_specialize(exported: true, kind: full, where C == HostContext, M == MemserverMemoryReader)\n #endif\n public init(context: Context,\n images: ImageMap?,\n memoryReader: MemoryReader) {\n\n pc = Address(context.programCounter)\n fp = Address(context.framePointer)\n first = true\n isAsync = false\n done = false\n asyncContext = 0\n reader = memoryReader\n\n // On Linux, the unwinder needs images in order to spot async frames\n #if os(Linux)\n self.images = images\n #endif\n }\n\n private func isAsyncSymbol(_ mangledName: String) -> Bool {\n let mangledUTF8 = mangledName.utf8\n if mangledUTF8.last == UInt8(ascii: "_") {\n let withoutUnderscore = mangledUTF8.dropLast(1)\n if let beforeIndexNdx = withoutUnderscore.lastIndex(\n where: { $0 < UInt8(ascii: "0") || $0 > UInt8(ascii: "9") }\n ) {\n let beforeIndex = withoutUnderscore[...beforeIndexNdx]\n let suffix = beforeIndex.suffix(2)\n let awaitResume = "TY".utf8\n let suspendResume = "TQ".utf8\n return suffix.elementsEqual(awaitResume) ||\n suffix.elementsEqual(suspendResume)\n }\n }\n return false\n }\n\n @_specialize(exported: true, kind: full, where C == HostContext, M == UnsafeLocalMemoryReader)\n #if os(macOS) || os(Linux)\n @_specialize(exported: true, kind: full, where C == HostContext, M == RemoteMemoryReader)\n #endif\n #if os(Linux)\n @_specialize(exported: true, kind: full, where C == HostContext, M == MemserverMemoryReader)\n #endif\n private mutating func isAsyncPC(_ pc: Address) -> Bool {\n // On Linux, we need to examine the PC to see if this is an async frame\n #if os(Linux)\n let address = MemoryReader.Address(pc)\n\n if let images = images,\n let imageNdx = images.indexOfImage(at: Backtrace.Address(address)) {\n let base = MemoryReader.Address(images[imageNdx].baseAddress)!\n let relativeAddress = address - base\n let cache = ElfImageCache.threadLocal\n\n if let hit = cache.lookup(path: images[imageNdx].path) {\n switch hit {\n case let .elf32Image(image):\n if let theSymbol = image.lookupSymbol(\n address: Elf32Image.Traits.Address(relativeAddress)\n ) {\n return isAsyncSymbol(theSymbol.name)\n }\n case let .elf64Image(image):\n if let theSymbol = image.lookupSymbol(\n address: Elf64Image.Traits.Address(relativeAddress)) {\n return isAsyncSymbol(theSymbol.name)\n }\n }\n }\n }\n #endif\n\n return false\n }\n\n @_specialize(exported: true, kind: full, where C == HostContext, M == UnsafeLocalMemoryReader)\n #if os(macOS) || os(Linux)\n @_specialize(exported: true, kind: full, where C == HostContext, M == RemoteMemoryReader)\n #endif\n #if os(Linux)\n @_specialize(exported: true, kind: full, where C == HostContext, M == MemserverMemoryReader)\n #endif\n private func isAsyncFrame(_ storedFp: Address) -> Bool {\n #if (os(macOS) || os(iOS) || os(watchOS)) && (arch(arm64) || arch(arm64_32) || arch(x86_64))\n // On Darwin, we borrow a bit of the frame pointer to indicate async\n // stack frames\n return (storedFp & (1 << 60)) != 0\n #else\n return false\n #endif\n }\n\n @_specialize(exported: true, kind: full, where C == HostContext, M == UnsafeLocalMemoryReader)\n #if os(macOS) || os(Linux)\n @_specialize(exported: true, kind: full, where C == HostContext, M == RemoteMemoryReader)\n #endif\n #if os(Linux)\n @_specialize(exported: true, kind: full, where C == HostContext, M == MemserverMemoryReader)\n #endif\n private func stripPtrAuth(_ address: Address) -> Address {\n return Context.stripPtrAuth(address: address)\n }\n\n @_specialize(exported: true, kind: full, where C == HostContext, M == UnsafeLocalMemoryReader)\n #if os(macOS) || os(Linux)\n @_specialize(exported: true, kind: full, where C == HostContext, M == RemoteMemoryReader)\n #endif\n #if os(Linux)\n @_specialize(exported: true, kind: full, where C == HostContext, M == MemserverMemoryReader)\n #endif\n private mutating func fetchAsyncContext() -> Bool {\n let strippedFp = stripPtrAuth(fp)\n\n do {\n asyncContext = try reader.fetch(from: MemoryReader.Address(strippedFp - 8),\n as: Address.self)\n return true\n } catch {\n return false\n }\n }\n\n @_specialize(exported: true, kind: full, where C == HostContext, M == UnsafeLocalMemoryReader)\n #if os(macOS) || os(Linux)\n @_specialize(exported: true, kind: full, where C == HostContext, M == RemoteMemoryReader)\n #endif\n #if os(Linux)\n @_specialize(exported: true, kind: full, where C == HostContext, M == MemserverMemoryReader)\n #endif\n public mutating func next() -> RichFrame<Address>? {\n if done {\n return nil\n }\n\n if first {\n first = false\n pc = stripPtrAuth(pc)\n return .programCounter(pc)\n }\n\n if !isAsync {\n if !isAsyncPC(pc) {\n // Try to read the next fp/pc pair\n var next: Address = 0\n let strippedFp = stripPtrAuth(fp)\n\n if strippedFp == 0\n || !Context.isAlignedForStack(framePointer:strippedFp) {\n done = true\n return nil\n }\n\n do {\n pc = stripPtrAuth(try reader.fetch(\n from:MemoryReader.Address(\n strippedFp\n + Address(MemoryLayout<Address>.size)\n ),\n as: Address.self))\n next = try reader.fetch(from: MemoryReader.Address(strippedFp),\n as: Address.self)\n } catch {\n done = true\n return nil\n }\n\n if next <= fp || pc == 0 {\n done = true\n return nil\n }\n\n if !isAsyncFrame(next) {\n fp = next\n return .returnAddress(pc)\n }\n }\n\n isAsync = true\n if !fetchAsyncContext() {\n done = true\n return nil\n }\n }\n\n // If we get here, we're in async mode\n\n var next: Address = 0\n let strippedCtx = stripPtrAuth(asyncContext)\n\n if strippedCtx == 0 {\n done = true\n return nil\n }\n\n #if arch(arm64_32)\n\n // On arm64_32, the two pointers at the start of the context are 32-bit,\n // although the stack layout is identical to vanilla arm64\n do {\n var next32 = try reader.fetch(from: MemoryReader.Address(strippedCtx),\n as: UInt32.self)\n var pc32 = try reader.fetch(from: MemoryReader.Address(strippedCtx + 4),\n as: UInt32.self)\n\n next = Address(next32)\n pc = stripPtrAuth(Address(pc32))\n } catch {\n done = true\n return nil\n }\n #else\n\n // Otherwise it's two 64-bit words\n do {\n next = try reader.fetch(from: MemoryReader.Address(strippedCtx),\n as: Address.self)\n pc = stripPtrAuth(try reader.fetch(from: MemoryReader.Address(strippedCtx + 8),\n as: Address.self))\n } catch {\n done = true\n return nil\n }\n\n #endif\n\n asyncContext = next\n return .asyncResumePoint(pc)\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_FramePointerUnwinder.swift
cpp_apple_swift_stdlib_public_RuntimeModule_FramePointerUnwinder.swift
Swift
8,593
0.95
0.172161
0.251046
python-kit
986
2023-09-08T17:22:10.075325
MIT
false
7b9806fbbac361f79caf3e7a1ea03f72
//===--- Image.swift - Binary image protocol for Swift --------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2022 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Defines a protocol for binary image files that allows us to fetch what\n// we need without knowing all of the gory details.\n//\n//===----------------------------------------------------------------------===//\n\nimport Swift\n\nstruct ImageSymbol {\n var name: String\n var offset: Int\n}\n\ninternal protocol Image {\n typealias UUID = [UInt8]\n typealias Address = ImageSource.Address\n\n init(source: ImageSource, baseAddress: Address, endAddress: Address) throws\n\n var baseAddress: Address { get set }\n var endAddress: Address { get set }\n\n var source: ImageSource { get }\n var uuid: UUID? { get }\n var shouldByteSwap: Bool { get }\n\n func swapIfRequired<T: FixedWidthInteger>(_ x: T) -> T\n func swapIfRequired<T: ByteSwappable>(_ x: T) -> T\n func swapIfRequired<T>(_ x: T) -> T\n\n func swapIfRequired<T: FixedWidthInteger>(array: inout [T])\n func swapIfRequired<T: ByteSwappable>(array: inout [T])\n func swapIfRequired<T>(array: inout [T])\n\n func swapIfRequired<T: FixedWidthInteger>(buffer: UnsafeMutableBufferPointer<T>)\n func swapIfRequired<T: ByteSwappable>(buffer: UnsafeMutableBufferPointer<T>)\n func swapIfRequired<T>(buffer: UnsafeMutableBufferPointer<T>)\n\n func swapIfRequired<T: FixedWidthInteger>(pointer: UnsafeMutablePointer<T>)\n func swapIfRequired<T: ByteSwappable>(pointer: UnsafeMutablePointer<T>)\n func swapIfRequired<T>(pointer: UnsafeMutablePointer<T>)\n\n func fetch<T>(from addr: Address,\n into buffer: UnsafeMutableBufferPointer<T>) throws\n func fetch<T>(from addr: Address, into pointer: UnsafeMutablePointer<T>) throws\n func fetch<T>(from addr: Address, count: Int, as: T.Type) throws -> [T]\n func fetch<T>(from addr: Address, as type: T.Type) throws -> T\n\n func fetchUnswapped<T>(from addr: Address,\n into buffer: UnsafeMutableBufferPointer<T>) throws\n func fetchUnswapped<T>(from addr: Address,\n into pointer: UnsafeMutablePointer<T>) throws\n func fetchUnswapped<T>(from addr: Address, count: Int, as: T.Type) throws -> [T]\n func fetchUnswapped<T>(from addr: Address, as type: T.Type) throws -> T\n\n func lookupSymbol(address: Address) -> ImageSymbol?\n}\n\nextension Image {\n public func swapIfRequired<T: FixedWidthInteger>(_ x: T) -> T {\n if shouldByteSwap {\n return x.byteSwapped\n }\n return x\n }\n\n public func swapIfRequired<T: ByteSwappable>(_ x: T) -> T {\n if shouldByteSwap {\n return x.byteSwapped\n }\n return x\n }\n\n public func swapIfRequired<T>(_ x: T) -> T {\n return x\n }\n\n public func swapIfRequired<T: ByteSwappable>(array: inout [T]) {\n if shouldByteSwap {\n array.swapBytes()\n }\n }\n public func swapIfRequired<T: FixedWidthInteger>(array: inout [T]) {\n if shouldByteSwap {\n array.swapBytes()\n }\n }\n public func swapIfRequired<T>(array: inout [T]) {\n // Nothing to do\n }\n\n public func swapIfRequired<T: ByteSwappable>(buffer: UnsafeMutableBufferPointer<T>) {\n if shouldByteSwap {\n buffer.swapBytes()\n }\n }\n public func swapIfRequired<T: FixedWidthInteger>(buffer: UnsafeMutableBufferPointer<T>) {\n if shouldByteSwap {\n buffer.swapBytes()\n }\n }\n public func swapIfRequired<T>(buffer: UnsafeMutableBufferPointer<T>) {\n // Nothing to do\n }\n\n public func swapIfRequired<T: ByteSwappable>(pointer: UnsafeMutablePointer<T>) {\n if shouldByteSwap {\n pointer.pointee = pointer.pointee.byteSwapped\n }\n }\n public func swapIfRequired<T: FixedWidthInteger>(pointer: UnsafeMutablePointer<T>) {\n if shouldByteSwap {\n pointer.pointee = pointer.pointee.byteSwapped\n }\n }\n public func swapIfRequired<T>(pointer: UnsafeMutablePointer<T>) {\n // Nothing to do\n }\n\n\n public func fetchUnswapped<T>(from addr: Address,\n into buffer: UnsafeMutableBufferPointer<T>) throws {\n return try source.fetch(from: addr, into: buffer)\n }\n public func fetchUnswapped<T>(from addr: Address,\n into pointer: UnsafeMutablePointer<T>) throws {\n return try source.fetch(from: addr, into: pointer)\n }\n public func fetchUnswapped<T>(from addr: Address, count: Int, as type: T.Type) throws -> [T] {\n return try source.fetch(from: addr, count: count, as: type)\n }\n public func fetchUnswapped<T>(from addr: Address, as type: T.Type) throws -> T {\n return try source.fetch(from: addr, as: type)\n }\n\n public func fetch<T>(from addr: Address,\n into buffer: UnsafeMutableBufferPointer<T>) throws {\n try fetchUnswapped(from: addr, into: buffer)\n swapIfRequired(buffer: buffer)\n }\n public func fetch<T>(from addr: Address,\n into pointer: UnsafeMutablePointer<T>) throws {\n try fetchUnswapped(from: addr, into: pointer)\n swapIfRequired(pointer: pointer)\n }\n public func fetch<T>(from addr: Address, count: Int, as type: T.Type) throws -> [T]{\n var result = try fetchUnswapped(from: addr, count: count, as: type)\n swapIfRequired(array: &result)\n return result\n }\n public func fetch<T>(from addr: Address, as type: T.Type) throws -> T {\n return swapIfRequired(try fetchUnswapped(from: addr, as: type))\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_Image.swift
cpp_apple_swift_stdlib_public_RuntimeModule_Image.swift
Swift
5,601
0.95
0.121212
0.132867
vue-tools
110
2024-10-04T13:28:11.189615
BSD-3-Clause
false
385219b931c3df99c1f1ce9faab4ddf2
//===--- ImageMap+Darwin.swift --------------------------------*- swift -*-===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Darwin specifics for ImageMap capture.\n//\n//===----------------------------------------------------------------------===//\n\n#if os(macOS) || os(iOS) || os(watchOS) || os(tvOS)\n\nimport Swift\n\ninternal import Darwin\ninternal import BacktracingImpl.OS.Darwin\n\nfileprivate func getSysCtlString(_ name: String) -> String? {\n return withUnsafeTemporaryAllocation(byteCount: 256, alignment: 16) {\n (buffer: UnsafeMutableRawBufferPointer) -> String? in\n\n var len = buffer.count\n let ret = sysctlbyname(name,\n buffer.baseAddress, &len,\n nil, 0)\n if ret != 0 {\n return nil\n }\n\n return String(validatingUTF8:\n buffer.baseAddress!.assumingMemoryBound(to: CChar.self))\n }\n}\n\nextension ImageMap {\n\n private static let platform = {\n #if os(macOS)\n var platform = "macOS"\n #elseif os(iOS)\n var platform = "iOS"\n #elseif os(watchOS)\n var platform = "watchOS"\n #elseif os(tvOS)\n var platform = "tvOS"\n #elseif os(visionOS)\n var platform = "visionOS"\n #endif\n\n let osVersion = getSysCtlString("kern.osversion") ?? "<unknown>"\n let osProductVersion = getSysCtlString("kern.osproductversion") ?? "<unknown>"\n\n return "\(platform) \(osProductVersion) (\(osVersion))"\n }()\n\n private static func withDyldProcessInfo<T>(for task: task_t,\n fn: (OpaquePointer?) throws -> T)\n rethrows -> T {\n var kret = kern_return_t(KERN_SUCCESS)\n let dyldInfo = _dyld_process_info_create(task, 0, &kret)\n\n if kret != KERN_SUCCESS {\n fatalError("error: cannot create dyld process info")\n }\n\n defer {\n _dyld_process_info_release(dyldInfo)\n }\n\n return try fn(dyldInfo)\n }\n\n @_spi(Internal)\n public static func capture(for process: Any) -> ImageMap {\n var images: [Image] = []\n let task = process as! task_t\n\n withDyldProcessInfo(for: task) { dyldInfo in\n _dyld_process_info_for_each_image(dyldInfo) {\n (machHeaderAddress, uuid, path) in\n\n if let path = path, let uuid = uuid {\n let pathString = String(cString: path)\n let theUUID = Array(UnsafeBufferPointer(start: uuid,\n count: MemoryLayout<uuid_t>.size))\n let name: String\n if let slashIndex = pathString.lastIndex(of: "/") {\n name = String(pathString.suffix(from:\n pathString.index(after:slashIndex)))\n } else {\n name = pathString\n }\n\n // Find the end of the __TEXT segment\n var endOfText = machHeaderAddress + 4096\n\n _dyld_process_info_for_each_segment(dyldInfo, machHeaderAddress) {\n address, size, name in\n\n if let name = String(validatingCString: name!), name == "__TEXT" {\n endOfText = address + size\n }\n }\n\n images.append(Image(name: name,\n path: pathString,\n uniqueID: theUUID,\n baseAddress: machHeaderAddress,\n endOfText: endOfText))\n }\n }\n }\n\n images.sort(by: { $0.baseAddress < $1.baseAddress })\n\n return ImageMap(\n platform: ImageMap.platform,\n images: images,\n wordSize: .sixtyFourBit\n )\n }\n\n}\n\n#endif // os(macOS) || os(iOS) || os(watchOS) || os(tvOS)\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_ImageMap+Darwin.swift
cpp_apple_swift_stdlib_public_RuntimeModule_ImageMap+Darwin.swift
Swift
3,964
0.95
0.10687
0.226415
react-lib
348
2023-07-21T09:33:30.280700
Apache-2.0
false
9a3c2b5e5a3497621394d186900f1933
//===--- ImageMap+Linux.swift --------------------------------*- swift -*-===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Linux specifics for ImageMap capture.\n//\n//===----------------------------------------------------------------------===//\n\n#if os(Linux)\n\nimport Swift\n\n#if canImport(Glibc)\ninternal import Glibc\n#elseif canImport(Musl)\ninternal import Musl\n#endif\n\ninternal import BacktracingImpl.ImageFormats.Elf\n\nfileprivate func readOSRelease(fd: CInt) -> [String:String]? {\n let len = lseek(fd, 0, SEEK_END)\n guard len >= 0 else {\n return nil\n }\n return withUnsafeTemporaryAllocation(byteCount: len, alignment: 16) {\n (buffer: UnsafeMutableRawBufferPointer) -> [String:String]? in\n\n _ = lseek(fd, 0, SEEK_SET)\n let bytesRead = read(fd, buffer.baseAddress, buffer.count)\n guard bytesRead == buffer.count else {\n return nil\n }\n\n let asString = String(decoding: buffer, as: UTF8.self)\n return Dictionary(OSReleaseScanner(asString),\n uniquingKeysWith: { $1 })\n }\n}\n\nfileprivate func readOSRelease() -> [String:String]? {\n var fd = open("/etc/os-release", O_RDONLY)\n if fd == -1 {\n fd = open("/usr/lib/os-release", O_RDONLY)\n }\n if fd == -1 {\n return nil\n }\n defer {\n close(fd)\n }\n\n return readOSRelease(fd: fd)\n}\n\nextension ImageMap {\n\n private static var platform = {\n guard let info = readOSRelease(),\n let pretty = info["PRETTY_NAME"] else {\n return "Linux (unknown)"\n }\n\n return "Linux (\(pretty))"\n }()\n\n private struct AddressRange {\n var low: Address = 0\n var high: Address = 0\n }\n\n @_specialize(exported: true, kind: full, where M == UnsafeLocalMemoryReader)\n @_specialize(exported: true, kind: full, where M == RemoteMemoryReader)\n @_specialize(exported: true, kind: full, where M == LocalMemoryReader)\n @_spi(Internal)\n public static func capture<M: MemoryReader>(\n using reader: M,\n forProcess pid: Int? = nil\n ) -> ImageMap {\n var images: [Image] = []\n\n let wordSize: WordSize\n\n #if arch(x86_64) || arch(arm64) || arch(arm64_32)\n wordSize = .sixtyFourBit\n #elseif arch(i386) || arch(arm)\n wordSize = .thirtyTwoBit\n #endif\n\n let path: String\n if let pid = pid {\n path = "/proc/\(pid)/maps"\n } else {\n path = "/proc/self/maps"\n }\n\n guard let procMaps = readString(from: path) else {\n return ImageMap(platform: ImageMap.platform, images: [], wordSize: wordSize)\n }\n\n // Find all the mapped files and get high/low ranges\n var mappedFiles: [Substring:AddressRange] = [:]\n for match in ProcMapsScanner(procMaps) {\n let path = stripWhitespace(match.pathname)\n if match.inode == "0" || path == "" {\n continue\n }\n guard let start = Address(match.start, radix: 16),\n let end = Address(match.end, radix: 16) else {\n continue\n }\n\n if let range = mappedFiles[path] {\n mappedFiles[path] = AddressRange(low: Swift.min(start, range.low),\n high: Swift.max(end, range.high))\n } else {\n mappedFiles[path] = AddressRange(low: start,\n high: end)\n }\n }\n\n // Look at each mapped file to see if it's an ELF image\n for (path, range) in mappedFiles {\n // Extract the filename from path\n let name: Substring\n if let slashIndex = path.lastIndex(of: "/") {\n name = path.suffix(from: path.index(after: slashIndex))\n } else {\n name = path\n }\n\n // Inspect the image and extract the UUID and end of text\n guard let (endOfText, uuid) = getElfImageInfo(\n at: M.Address(exactly: range.low)!,\n using: reader\n ) else {\n // Not an ELF image\n continue\n }\n\n let image = Image(name: String(name),\n path: String(path),\n uniqueID: uuid,\n baseAddress: range.low,\n endOfText: Address(endOfText))\n\n images.append(image)\n }\n\n images.sort(by: { $0.baseAddress < $1.baseAddress })\n\n return ImageMap(\n platform: ImageMap.platform,\n images: images,\n wordSize: wordSize\n )\n }\n\n}\n\n#endif // os(Linux)\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_ImageMap+Linux.swift
cpp_apple_swift_stdlib_public_RuntimeModule_ImageMap+Linux.swift
Swift
4,643
0.95
0.088757
0.198582
node-utils
90
2024-06-17T01:24:02.830928
BSD-3-Clause
false
247179fe290f719258a1e4d4d8efc722
//===--- ImageMap.swift ----------------------------------------*- swift -*-===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Defines the `ImageMap` struct that represents a captured list of loaded\n// images.\n//\n//===----------------------------------------------------------------------===//\n\nimport Swift\n\n#if os(macOS) || os(iOS) || os(watchOS) || os(tvOS)\ninternal import Darwin\ninternal import BacktracingImpl.OS.Darwin\n#endif\n\n/// Holds a map of the process's address space.\npublic struct ImageMap: Collection, Sendable, Hashable {\n\n /// A type representing the sequence's elements.\n public typealias Element = Backtrace.Image\n\n /// A type that represents a position in the collection.\n public typealias Index = Int\n\n /// Tells us what size of machine words were used when capturing the\n /// image map.\n enum WordSize: Sendable {\n case sixteenBit\n case thirtyTwoBit\n case sixtyFourBit\n }\n\n /// We use UInt64s for addresses here.\n typealias Address = UInt64\n\n /// The internal representation of an image.\n struct Image: Sendable, Hashable {\n var name: String?\n var path: String?\n var uniqueID: [UInt8]?\n var baseAddress: Address\n var endOfText: Address\n }\n\n /// The name of the platform that captured this image map.\n public private(set) var platform: String\n\n /// The actual image storage.\n var images: [Image]\n\n /// The size of words used when capturing.\n var wordSize: WordSize\n\n /// Construct an ImageMap.\n init(platform: String, images: [Image], wordSize: WordSize) {\n self.platform = platform\n self.images = images\n self.wordSize = wordSize\n }\n\n /// Construct an ImageMap from CompactImageMap data {\n @_spi(Internal)\n public init?(compactImageMapData: some Sequence<UInt8>) {\n var decoder = CompactImageMapFormat.Decoder(compactImageMapData)\n guard let (platform, images, wordSize) = decoder.decode() else {\n return nil\n }\n self.init(platform: platform, images: images, wordSize: wordSize)\n }\n\n /// The position of the first element in a non-empty collection.\n public var startIndex: Self.Index {\n return 0\n }\n\n /// The collection's "past the end" position---that is, the position one\n /// greater than the last valid subscript argument.\n public var endIndex: Self.Index {\n return images.count\n }\n\n /// Accesses the element at the specified position.\n public subscript(_ ndx: Self.Index) -> Self.Element {\n return Backtrace.Image(images[ndx], wordSize: wordSize)\n }\n\n /// Look-up an image by address.\n public func indexOfImage(at address: Backtrace.Address) -> Int? {\n let addr = UInt64(address)!\n var lo = 0, hi = images.count\n while lo < hi {\n let mid = (lo + hi) / 2\n if images[mid].baseAddress > addr {\n hi = mid\n } else if images[mid].endOfText <= addr {\n lo = mid + 1\n } else {\n return mid\n }\n }\n\n return nil\n }\n\n /// Returns the position immediately after the given index.\n public func index(after ndx: Self.Index) -> Self.Index {\n return ndx + 1\n }\n\n /// Capture the image map for the current process.\n public static func capture() -> ImageMap {\n #if os(macOS) || os(iOS) || os(watchOS) || os(tvOS)\n return capture(for: mach_task_self())\n #else\n return capture(using: UnsafeLocalMemoryReader())\n #endif\n }\n}\n\nextension ImageMap: CustomStringConvertible {\n /// Generate a description of an ImageMap\n public var description: String {\n var lines: [String] = ["Platform: \(platform)", ""]\n let addressWidth: Int\n switch wordSize {\n case .sixteenBit: addressWidth = 4\n case .thirtyTwoBit: addressWidth = 8\n case .sixtyFourBit: addressWidth = 16\n }\n\n for image in images {\n let hexBase = hex(image.baseAddress, width: addressWidth)\n let hexEnd = hex(image.endOfText, width: addressWidth)\n let buildId: String\n if let bytes = image.uniqueID {\n buildId = hex(bytes)\n } else {\n buildId = "<no build ID>"\n }\n let path = image.path ?? "<unknown>"\n let name = image.name ?? "<unknown>"\n\n lines.append("\(hexBase)-\(hexEnd) \(buildId) \(name) \(path)")\n }\n\n return lines.joined(separator: "\n")\n }\n}\n\nextension Backtrace.Image {\n /// Convert an ImageMap.Image to a Backtrace.Image.\n ///\n /// Backtrace.Image is the public, user-visible type; ImageMap.Image\n /// is an in-memory representation.\n init(_ image: ImageMap.Image, wordSize: ImageMap.WordSize) {\n let baseAddress: Backtrace.Address\n let endOfText: Backtrace.Address\n\n switch wordSize {\n case .sixteenBit:\n baseAddress = Backtrace.Address(\n UInt16(truncatingIfNeeded: image.baseAddress)\n )\n endOfText = Backtrace.Address(\n UInt16(truncatingIfNeeded: image.endOfText)\n )\n case .thirtyTwoBit:\n baseAddress = Backtrace.Address(\n UInt32(truncatingIfNeeded: image.baseAddress)\n )\n endOfText = Backtrace.Address(\n UInt32(truncatingIfNeeded: image.endOfText)\n )\n case .sixtyFourBit:\n baseAddress = Backtrace.Address(image.baseAddress)\n endOfText = Backtrace.Address(image.endOfText)\n }\n\n self.init(name: image.name,\n path: image.path,\n uniqueID: image.uniqueID,\n baseAddress: baseAddress,\n endOfText: endOfText)\n }\n}\n\nextension ImageMap: Codable {\n\n public func encode(to encoder: any Encoder) throws {\n var container = encoder.singleValueContainer()\n let cimfEncoder = CompactImageMapFormat.Encoder(self)\n let base64 = stringFrom(sequence: Base64Encoder(source: cimfEncoder))\n try container.encode(base64)\n }\n\n public init(from decoder: any Decoder) throws {\n let container = try decoder.singleValueContainer()\n let base64 = try container.decode(String.self)\n self.init(compactImageMapData: Base64Decoder(source: base64.utf8))!\n }\n\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_ImageMap.swift
cpp_apple_swift_stdlib_public_RuntimeModule_ImageMap.swift
Swift
6,289
0.95
0.080569
0.25
node-utils
236
2024-02-13T03:40:30.321079
BSD-3-Clause
false
2d89fce5247ce6771337f7762f05d19c
//===--- ImageSource.swift - A place from which to read image data --------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Defines ImageSource, which tells us where to look for image data.\n//\n//===----------------------------------------------------------------------===//\n\nimport Swift\n\n#if os(macOS) || os(iOS) || os(tvOS) || os(watchOS)\ninternal import Darwin\n#elseif os(Windows)\ninternal import ucrt\n#elseif canImport(Glibc)\ninternal import Glibc\n#elseif canImport(Musl)\ninternal import Musl\n#endif\n\nenum ImageSourceError: Error {\n case outOfBoundsRead\n case posixError(Int32)\n}\n\nstruct ImageSource {\n\n private class Storage {\n /// Says how we allocated the buffer.\n private enum MemoryBufferKind {\n /// Currently empty\n case empty\n\n /// Allocated with UnsafeRawBufferPointer.allocate()\n case allocated(Int)\n\n /// Allocated by mapping memory with mmap() or similar\n case mapped\n\n /// A reference to a subordinate storage\n case substorage(Storage)\n\n /// Not allocated (probably points to a loaded image)\n case unowned\n }\n\n private var kind: MemoryBufferKind\n\n /// The pointer to the actual memory\n private(set) var bytes: UnsafeRawBufferPointer!\n\n /// Gets a mutable pointer to the actual memory\n var mutableBytes: UnsafeMutableRawBufferPointer {\n guard case let .allocated(count) = kind else {\n fatalError("attempted to get mutable reference to immutable ImageSource")\n }\n return UnsafeMutableRawBufferPointer(\n mutating: UnsafeRawBufferPointer(rebasing: bytes[0..<count])\n )\n }\n\n /// Gets a mutable pointer to the unused space\n var unusedBytes: UnsafeMutableRawBufferPointer {\n guard case let .allocated(count) = kind else {\n fatalError("attempted to get mutable reference to immutable ImageSource")\n }\n return UnsafeMutableRawBufferPointer(\n mutating: UnsafeRawBufferPointer(rebasing: bytes[count...])\n )\n }\n\n /// Return the number of bytes in this ImageSource\n var count: Int {\n switch kind {\n case .empty:\n return 0\n case let .allocated(count):\n return count\n case .mapped, .substorage, .unowned:\n return bytes.count\n }\n }\n\n @inline(__always)\n private func _rangeCheck(_ ndx: Int) {\n if ndx < 0 || ndx >= count {\n fatalError("ImageSource access out of range")\n }\n }\n\n init() {\n self.kind = .empty\n self.bytes = nil\n }\n\n init(unowned buffer: UnsafeRawBufferPointer) {\n self.kind = .unowned\n self.bytes = buffer\n }\n\n init(mapped buffer: UnsafeRawBufferPointer) {\n self.kind = .mapped\n self.bytes = buffer\n }\n\n init(allocated buffer: UnsafeMutableRawBufferPointer, count: Int? = nil) {\n self.kind = .allocated(count ?? buffer.count)\n self.bytes = UnsafeRawBufferPointer(buffer)\n }\n\n convenience init(capacity: Int, alignment: Int = 0x4000) {\n self.init(allocated: UnsafeMutableRawBufferPointer.allocate(\n byteCount: capacity,\n alignment: 0x1000\n ),\n count: 0)\n }\n\n init(parent: Storage, range: Range<Int>) {\n let chunk = UnsafeRawBufferPointer(rebasing: parent.bytes[range])\n\n self.kind = .substorage(parent)\n self.bytes = chunk\n }\n\n convenience init(path: String) throws {\n let fd = open(path, O_RDONLY, 0)\n if fd < 0 {\n throw ImageSourceError.posixError(errno)\n }\n defer { close(fd) }\n let size = lseek(fd, 0, SEEK_END)\n if size < 0 {\n throw ImageSourceError.posixError(errno)\n }\n let base = mmap(nil, Int(size), PROT_READ, MAP_FILE|MAP_PRIVATE, fd, 0)\n if base == nil || base! == UnsafeRawPointer(bitPattern: -1)! {\n throw ImageSourceError.posixError(errno)\n }\n\n self.init(mapped: UnsafeRawBufferPointer(\n start: base, count: Int(size)))\n }\n\n deinit {\n switch kind {\n case .allocated:\n mutableBytes.deallocate()\n case .mapped:\n munmap(UnsafeMutableRawPointer(mutating: bytes.baseAddress),\n bytes.count)\n case .substorage, .unowned, .empty:\n break\n }\n }\n\n /// Subscripting (read-only, for subranges)\n subscript(range: Range<Int>) -> Storage {\n return Storage(parent: self, range: range)\n }\n\n /// Resize the buffer; only supported for allocated or empty storage\n func resize(newSize: Int) -> UnsafeMutableRawBufferPointer {\n let newBuffer = UnsafeMutableRawBufferPointer.allocate(\n byteCount: newSize,\n alignment: 0x1000\n )\n switch kind {\n case .empty:\n kind = .allocated(0)\n case let .allocated(count):\n assert(newSize >= count)\n\n let oldPart = UnsafeMutableRawBufferPointer(\n rebasing: newBuffer[0..<count]\n )\n oldPart.copyMemory(from: bytes)\n mutableBytes.deallocate()\n kind = .allocated(count)\n default:\n fatalError("Cannot resize immutable image source storage")\n }\n\n bytes = UnsafeRawBufferPointer(newBuffer)\n\n return newBuffer\n }\n\n /// Make sure the buffer has at least a certain number of bytes;\n /// only supported for allocated or empty storage.\n func requireAtLeast(byteCount: Int) -> UnsafeMutableRawBufferPointer {\n let capacity: Int\n switch kind {\n case .empty:\n capacity = 0\n case .allocated:\n capacity = bytes.count\n default:\n fatalError("Cannot resize immutable image source storage")\n }\n\n if capacity >= byteCount {\n return mutableBytes\n }\n\n let extra = byteCount - capacity\n\n let increment: Int\n if capacity < 1048576 {\n let roundedExtra = (extra + 0xffff) & ~0xffff\n increment = max(roundedExtra, capacity)\n } else {\n let roundedExtra = (extra + 0xfffff) & ~0xfffff\n let topBit = capacity.bitWidth - capacity.leadingZeroBitCount\n increment = max(roundedExtra, 1048576 * (topBit - 20))\n }\n\n return resize(newSize: capacity + increment)\n }\n\n /// Mark a number of bytes in the mutable buffer as in use. This is\n /// used when passing `unusedBytes` to some other code that fills in\n /// part of the buffer.\n func used(bytes: Int) {\n guard bytes >= 0 else {\n fatalError("Bytes should not be less than zero")\n }\n guard case let .allocated(count) = kind else {\n fatalError("Cannot append to immutable image source storage")\n }\n guard mutableBytes.count - count <= bytes else {\n fatalError("Buffer overrun detected")\n }\n kind = .allocated(count + bytes)\n }\n\n /// Append bytes to the mutable buffer; this is only supported for\n /// allocated or empty storage.\n func append(bytes toAppend: UnsafeRawBufferPointer) {\n // Short circuit, otherwise we get in a muddle in requireAtLeast()\n if toAppend.count == 0 {\n return\n }\n\n let newCount = count + toAppend.count\n\n let mutableBytes = requireAtLeast(byteCount: newCount)\n\n guard case let .allocated(count) = kind else {\n fatalError("Cannot append to immutable image source storage")\n }\n\n let dest = UnsafeMutableRawBufferPointer(\n rebasing: mutableBytes[count..<newCount]\n )\n dest.copyMemory(from: toAppend)\n kind = .allocated(newCount)\n }\n }\n\n /// The storage holding the image data.\n private var storage: Storage\n\n /// The number of bytes of data this ImageSource holds.\n var count: Int { return storage.count }\n\n /// The memory holding the image data.\n var bytes: UnsafeRawBufferPointer { return storage.bytes }\n\n /// A mutable refernece to the image data (only for allocated storage)\n var mutableBytes: UnsafeMutableRawBufferPointer { return storage.mutableBytes }\n\n /// A mutable reference to unused bytes in the storage\n var unusedBytes: UnsafeMutableRawBufferPointer { return storage.unusedBytes }\n\n /// Says whether we are looking at a loaded (i.e. with ld.so or dyld) image.\n private(set) var isMappedImage: Bool\n\n /// If this ImageSource knows its path, this will be non-nil.\n private(set) var path: String?\n\n /// Private initialiser, not for general use\n private init(storage: Storage, isMappedImage: Bool, path: String?) {\n self.storage = storage\n self.isMappedImage = isMappedImage\n self.path = path\n }\n\n /// Initialise an empty storage\n init(isMappedImage: Bool, path: String? = nil) {\n self.init(storage: Storage(), isMappedImage: isMappedImage, path: path)\n }\n\n /// Initialise from unowned storage\n init(unowned: UnsafeRawBufferPointer, isMappedImage: Bool, path: String? = nil) {\n self.init(storage: Storage(unowned: unowned),\n isMappedImage: isMappedImage, path: path)\n }\n\n /// Initialise from mapped storage\n init(mapped: UnsafeRawBufferPointer, isMappedImage: Bool, path: String? = nil) {\n self.init(storage: Storage(mapped: mapped),\n isMappedImage: isMappedImage, path: path)\n }\n\n /// Initialise with a specified capacity\n init(capacity: Int, isMappedImage: Bool, path: String? = nil) {\n self.init(storage: Storage(capacity: capacity),\n isMappedImage: isMappedImage, path: path)\n }\n\n /// Initialise with a mapped file\n init(path: String) throws {\n self.init(storage: try Storage(path: path),\n isMappedImage: false, path: path)\n }\n\n /// Get a sub-range of this ImageSource as an ImageSource\n subscript(range: Range<Address>) -> ImageSource {\n let intRange = Int(range.lowerBound)..<Int(range.upperBound)\n return ImageSource(storage: storage[intRange],\n isMappedImage: isMappedImage,\n path: path)\n }\n\n /// Mark unused bytes in the storage as used\n func used(bytes: Int) {\n storage.used(bytes: bytes)\n }\n\n /// Append bytes to an empty or allocated storage\n func append(bytes toAppend: UnsafeRawBufferPointer) {\n storage.append(bytes: toAppend)\n }\n}\n\n// MemoryReader support\nextension ImageSource: MemoryReader {\n public func fetch(from address: Address,\n into buffer: UnsafeMutableRawBufferPointer) throws {\n let offset = Int(address)\n guard bytes.count >= buffer.count &&\n offset <= bytes.count - buffer.count else {\n throw ImageSourceError.outOfBoundsRead\n }\n buffer.copyMemory(from: UnsafeRawBufferPointer(\n rebasing: bytes[offset..<offset + buffer.count]))\n }\n\n public func fetch<T>(from address: Address, as type: T.Type) throws -> T {\n let size = MemoryLayout<T>.size\n let offset = Int(address)\n guard offset <= bytes.count - size else {\n throw ImageSourceError.outOfBoundsRead\n }\n return bytes.loadUnaligned(fromByteOffset: offset, as: type)\n }\n\n public func fetchString(from address: Address) throws -> String? {\n let offset = Int(address)\n let len = strnlen(bytes.baseAddress! + offset, bytes.count - offset)\n let stringBytes = bytes[offset..<offset+len]\n return String(decoding: stringBytes, as: UTF8.self)\n }\n\n public func fetchString(from address: Address, length: Int) throws -> String? {\n let offset = Int(address)\n let stringBytes = bytes[offset..<offset+length]\n return String(decoding: stringBytes, as: UTF8.self)\n }\n}\n\n/// Used as a cursor by the DWARF code\nstruct ImageSourceCursor {\n typealias Address = ImageSource.Address\n typealias Size = ImageSource.Size\n\n var source: ImageSource\n var pos: Address\n\n init(source: ImageSource, offset: Address = 0) {\n self.source = source\n self.pos = offset\n }\n\n mutating func read(into buffer: UnsafeMutableRawBufferPointer) throws {\n try source.fetch(from: pos, into: buffer)\n pos += Size(buffer.count)\n }\n\n mutating func read<T>(into buffer: UnsafeMutableBufferPointer<T>) throws {\n try source.fetch(from: pos, into: buffer)\n pos += Size(MemoryLayout<T>.stride * buffer.count)\n }\n\n mutating func read<T>(into pointer: UnsafeMutablePointer<T>) throws {\n try source.fetch(from: pos, into: pointer)\n pos += Size(MemoryLayout<T>.stride)\n }\n\n mutating func read<T>(as type: T.Type) throws -> T {\n let result = try source.fetch(from: pos, as: type)\n pos += Size(MemoryLayout<T>.stride)\n return result\n }\n\n mutating func read<T>(count: Int, as type: T.Type) throws -> [T] {\n let result = try source.fetch(from: pos, count: count, as: type)\n pos += Size(MemoryLayout<T>.stride * count)\n return result\n }\n\n mutating func readString() throws -> String? {\n guard let result = try source.fetchString(from: pos) else {\n return nil\n }\n pos += Size(result.utf8.count + 1) // +1 for the NUL\n return result\n }\n\n mutating func readString(length: Int) throws -> String? {\n guard let result = try source.fetchString(from: pos, length: length) else {\n return nil\n }\n pos += Size(length)\n return result\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_ImageSource.swift
cpp_apple_swift_stdlib_public_RuntimeModule_ImageSource.swift
Swift
13,389
0.95
0.071101
0.158904
awesome-app
79
2024-03-18T21:42:32.872639
BSD-3-Clause
false
477ba1a48835446af96b8a6866da6c7e
//===--- Libc.swift - libc utility functions ------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Since we are using C++ interop, some utility functions we've defined in\n// modules/OS/Libc.h end up in a namespace. This file moves them out again.\n//\n//===----------------------------------------------------------------------===//\n\ninternal import BacktracingImpl.OS.Libc\n\nlet _swift_open = swift.runtime.backtrace._swift_open\nlet _swift_get_errno = swift.runtime.backtrace._swift_get_errno\nlet _swift_set_errno = swift.runtime.backtrace._swift_set_errno\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_Libc.swift
cpp_apple_swift_stdlib_public_RuntimeModule_Libc.swift
Swift
975
0.95
0.090909
0.8
awesome-app
709
2025-06-05T15:35:18.519153
Apache-2.0
false
c1b3e18aa120ee61b2bf24c45e9a1586
//===--- LimitSequence.swift ----------------------------------*- swift -*-===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Defines a sequence adapter that implements the ability to limit the\n// number of items in its output in various ways.\n//\n//===----------------------------------------------------------------------===//\n\nimport Swift\n\n/// Sequences you wish to use with `LimitSequence` must use an element type\n/// that implements this protocol, so that `LimitSequence` can indicate when\n/// it omits or truncates the sequence.\n@usableFromInline\nprotocol LimitableElement {\n static func omitted(_: Int) -> Self\n static var truncated: Self { get }\n}\n\n/// A `Sequence` that adds the ability to limit the output of another sequence.\n@usableFromInline\nstruct LimitSequence<T: LimitableElement, S: Sequence>: Sequence\n where S.Element == T\n{\n /// The element type, which must conform to `LimitableElement`\n @usableFromInline\n typealias Element = T\n\n /// The source sequence\n @usableFromInline\n typealias Source = S\n\n var source: Source\n\n /// The maximum number of items that we want in the output of this sequence.\n /// This includes `.omitted()` and `.truncated` items.\n var limit: Int\n\n /// The number of items to drop from the head of the sequence.\n var offset: Int\n\n /// The minimum number of items to capture at the tail end of the input\n /// sequence. This can be _at most_ `limit - 1`.\n var top: Int\n\n /// Initialise the `LimitSequence`\n ///\n /// - source: The sequence to draw items from.\n /// - limit: The maximum number of items of output we desire.\n /// - offset: The number of items to drop from the head of the input sequence.\n /// - top: The minimum number of items to capture at the tail end of the\n /// input sequence.\n ///\n /// A `LimitSequence` will read from `source` and emit at most `limit` items,\n /// after discarding the first `offset` items from `source`, including a\n /// minimum of `top` items.\n ///\n /// When `LimitSequence` omits items or truncates the sequence, it will\n /// insert `.omitted(count)` or `.truncated` items into its output.\n @usableFromInline\n init(_ source: Source, limit: Int, offset: Int = 0, top: Int = 0) {\n self.source = source\n self.limit = limit\n self.offset = offset\n self.top = top\n }\n\n /// Create an iterator for this sequence.\n public func makeIterator() -> Iterator {\n return Iterator(source.makeIterator(), limit: limit, offset: offset, top: top)\n }\n\n /// The `LimitSequence` Iterator implementation.\n ///\n /// This works by buffering an element ahead of where we are in the input\n /// sequence, so that it can tell whether or not there is more input to\n /// follow at any given point.\n @usableFromInline\n struct Iterator: IteratorProtocol {\n /// The iterator for the input sequence.\n var iterator: Source.Iterator\n\n /// We read one element ahead in the input sequence; that element is\n /// stored here.\n var readAhead: Element?\n\n /// Tracks the number of items emitted before getting to `top`.\n var count = 0\n\n /// The maximum number of items to emit, including the `.truncated`\n /// or `.omitted()` markers.\n var limit: Int\n\n /// The minimum number of items to capture from the tail of the input\n /// sequence. Must be strictly less than `limit`.\n var top: Int\n\n /// A ring buffer that we use to capture the tail.\n var topBuffer: [Element]\n\n /// Points at the first item in `topBuffer`.\n var topBase: Int\n\n /// The index in `topBuffer` that we should output from the next\n /// call to `next()`.\n var topNdx: Int\n\n /// Tracks the iterator state.\n var state: State\n\n enum State {\n case normal\n case outputTop\n case done\n }\n\n /// Fill `readAhead` with the next element from the input sequence.\n private mutating func readNext() {\n if let elt = self.iterator.next() {\n readAhead = elt\n } else {\n readAhead = nil\n }\n }\n\n /// Initialise the iterator, and fill in the first read ahead element.\n init(_ iterator: Source.Iterator, limit: Int, offset: Int, top: Int) {\n self.iterator = iterator\n\n for _ in 0..<offset {\n if self.iterator.next() == nil {\n break\n }\n }\n\n self.readAhead = nil\n self.limit = limit\n self.top = Swift.min(top, limit - 1)\n self.state = .normal\n self.topBuffer = []\n self.topBuffer.reserveCapacity(top)\n self.topBase = 0\n self.topNdx = 0\n\n readNext()\n }\n\n /// Retrieve the next element in the output sequence.\n public mutating func next() -> Element? {\n switch state {\n case .done:\n return nil\n case .outputTop:\n let result = topBuffer[topNdx]\n topNdx += 1\n if topNdx == top {\n topNdx = 0\n }\n if topNdx == topBase {\n state = .done\n }\n return result\n case .normal:\n break\n }\n\n guard let element = readAhead else {\n state = .done\n return nil\n }\n\n readNext()\n\n // Capture the easy part\n if count < limit - top - 1 {\n count += 1\n return element\n }\n\n if top == 0 && readAhead != nil {\n state = .done\n return .truncated\n }\n\n let beforeTop = element\n\n // Fill the top buffer\n while let elt = readAhead, topBuffer.count < top{\n topBuffer.append(elt)\n\n readNext()\n }\n\n if readAhead == nil {\n // No elements means we just output beforeTop and we're done\n if topBuffer.count == 0 {\n state = .done\n return beforeTop\n }\n\n // Otherwise, output beforeTop and then the top buffer\n topNdx = 0\n if topBuffer.count < top {\n topBase = topBuffer.count\n }\n state = .outputTop\n return beforeTop\n }\n\n // Use the top buffer as a circular buffer\n var omitted = 1\n while let elt = readAhead {\n topBuffer[topBase] = elt\n topBase += 1\n omitted += 1\n if topBase == top {\n topBase = 0\n }\n\n readNext()\n }\n\n topNdx = topBase\n state = .outputTop\n return .omitted(omitted)\n }\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_LimitSequence.swift
cpp_apple_swift_stdlib_public_RuntimeModule_LimitSequence.swift
Swift
6,657
0.95
0.075949
0.341709
vue-tools
238
2024-05-09T03:45:47.259427
GPL-3.0
false
2678fcac567c948801704ecfe51f2e44
//===--- MemoryReader.swift -----------------------------------*- swift -*-===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Provides the ability to read memory, both in the current process and\n// remotely.\n//\n//===----------------------------------------------------------------------===//\n\nimport Swift\n\n#if os(macOS) || os(iOS) || os(tvOS) || os(watchOS)\ninternal import Darwin\n#elseif os(Windows)\ninternal import ucrt\n#elseif canImport(Glibc)\ninternal import Glibc\n#elseif canImport(Musl)\ninternal import Musl\n#endif\n\n#if os(macOS)\ninternal import BacktracingImpl.OS.Darwin\n#endif\n\n@_spi(MemoryReaders) public protocol MemoryReader {\n typealias Address = UInt64\n typealias Size = UInt64\n\n /// Fill the specified buffer with data from the specified location in\n /// the source.\n func fetch(from address: Address,\n into buffer: UnsafeMutableRawBufferPointer) throws\n\n /// Fill the specified buffer with data from the specified location in\n /// the source.\n func fetch<T>(from address: Address,\n into buffer: UnsafeMutableBufferPointer<T>) throws\n\n /// Write data from the specified location in the source through a pointer\n func fetch<T>(from addr: Address,\n into pointer: UnsafeMutablePointer<T>) throws\n\n /// Fetch an array of Ts from the specified location in the source\n func fetch<T>(from addr: Address, count: Int, as: T.Type) throws -> [T]\n\n /// Fetch a T from the specified location in the source\n func fetch<T>(from addr: Address, as: T.Type) throws -> T\n\n /// Fetch a NUL terminated string from the specified location in the source\n func fetchString(from addr: Address) throws -> String?\n\n /// Fetch a fixed-length string from the specified location in the source\n func fetchString(from addr: Address, length: Int) throws -> String?\n}\n\nextension MemoryReader {\n\n public func fetch<T>(from address: Address,\n into buffer: UnsafeMutableBufferPointer<T>) throws {\n try fetch(from: address, into: UnsafeMutableRawBufferPointer(buffer))\n }\n\n public func fetch<T>(from addr: Address,\n into pointer: UnsafeMutablePointer<T>) throws {\n try fetch(from: addr,\n into: UnsafeMutableBufferPointer(start: pointer, count: 1))\n }\n\n public func fetch<T>(from addr: Address, count: Int, as: T.Type) throws -> [T] {\n let array = try Array<T>(unsafeUninitializedCapacity: count){\n buffer, initializedCount in\n\n try fetch(from: addr, into: buffer)\n\n initializedCount = count\n }\n\n return array\n }\n\n public func fetch<T>(from addr: Address, as: T.Type) throws -> T {\n return try withUnsafeTemporaryAllocation(of: T.self, capacity: 1) { buf in\n try fetch(from: addr, into: buf)\n return buf[0]\n }\n }\n\n public func fetchString(from addr: Address) throws -> String? {\n var bytes: [UInt8] = []\n var ptr = addr\n while true {\n let ch = try fetch(from: ptr, as: UInt8.self)\n if ch == 0 {\n break\n }\n bytes.append(ch)\n ptr += 1\n }\n\n return String(decoding: bytes, as: UTF8.self)\n }\n\n public func fetchString(from addr: Address, length: Int) throws -> String? {\n let bytes = try fetch(from: addr, count: length, as: UInt8.self)\n return String(decoding: bytes, as: UTF8.self)\n }\n}\n\n@_spi(MemoryReaders) public struct UnsafeLocalMemoryReader: MemoryReader {\n public init() {}\n\n public func fetch(from address: Address,\n into buffer: UnsafeMutableRawBufferPointer) throws {\n buffer.baseAddress!.copyMemory(\n from: UnsafeRawPointer(bitPattern: UInt(address))!,\n byteCount: buffer.count\n )\n }\n\n public func fetch<T>(from address: Address, as type: T.Type) throws -> T {\n let ptr = UnsafeRawPointer(bitPattern: UInt(address))!\n return ptr.loadUnaligned(fromByteOffset: 0, as: type)\n }\n\n public func fetchString(from address: Address) throws -> String? {\n let ptr = UnsafeRawPointer(bitPattern: UInt(address))!\n return String(validatingUTF8: ptr.assumingMemoryBound(to: CChar.self))\n }\n}\n\n#if os(macOS)\n@_spi(MemoryReaders) public struct MachError: Error {\n var result: kern_return_t\n}\n\n@_spi(MemoryReaders)\npublic struct UncachedRemoteMemoryReader: MemoryReader {\n private var task: task_t\n\n // Sadly we can't expose the type of this argument\n public init(task: Any) {\n self.task = task as! task_t\n }\n\n public func fetch(from address: Address,\n into buffer: UnsafeMutableRawBufferPointer) throws {\n let size = buffer.count\n var sizeOut = UInt64(0)\n let result = mach_vm_read_overwrite(task,\n UInt64(address),\n UInt64(size),\n mach_vm_address_t(\n Int(bitPattern: buffer.baseAddress)\n ),\n &sizeOut)\n\n if result != KERN_SUCCESS {\n throw MachError(result: result)\n }\n }\n}\n\n@_spi(MemoryReaders)\npublic struct UncachedLocalMemoryReader: MemoryReader {\n public typealias Address = UInt64\n public typealias Size = UInt64\n\n public func fetch(from address: Address,\n into buffer: UnsafeMutableRawBufferPointer) throws {\n let reader = UncachedRemoteMemoryReader(task: mach_task_self())\n return try reader.fetch(from: address, into: buffer)\n }\n}\n#endif\n\n#if os(Linux)\n@_spi(MemoryReaders) public struct POSIXError: Error {\n var errno: CInt\n}\n\n@_spi(MemoryReaders) public struct MemserverError: Error {\n var message: String\n}\n\n@_spi(MemoryReaders)\npublic struct UncachedMemserverMemoryReader: MemoryReader {\n private var fd: CInt\n\n public init(fd: CInt) {\n self.fd = fd\n }\n\n private func safeRead(_ fd: CInt, _ buffer: UnsafeMutableRawBufferPointer) throws -> Int {\n var done = 0\n while done < buffer.count {\n var ret: ssize_t = 0\n repeat {\n ret = read(fd, buffer.baseAddress! + done, buffer.count - done)\n } while ret < 0 && _swift_get_errno() == EINTR\n if ret < 0 {\n throw POSIXError(errno: _swift_get_errno())\n }\n if ret == 0 {\n break\n }\n done += Int(ret)\n }\n\n return done\n }\n\n private func safeWrite(_ fd: CInt, _ buffer: UnsafeRawBufferPointer) throws -> Int {\n var done = 0\n while done < buffer.count {\n var ret: ssize_t = 0\n repeat {\n ret = write(fd, buffer.baseAddress! + done, buffer.count - done)\n } while ret < 0 && _swift_get_errno() == EINTR\n if ret < 0 {\n throw POSIXError(errno: _swift_get_errno())\n }\n if ret == 0 {\n break\n }\n done += Int(ret)\n }\n\n return done\n }\n\n private func sendRequest(for bytes: Size, from addr: Address) throws {\n var request = memserver_req(addr: addr, len: bytes)\n try withUnsafeBytes(of: &request){ ptr in\n let ret = try safeWrite(fd, ptr)\n if ret != ptr.count {\n throw MemserverError(message: "Channel closed prematurely")\n }\n }\n }\n\n private func receiveReply() throws -> memserver_resp {\n var response = memserver_resp(addr: 0, len: 0)\n try withUnsafeMutableBytes(of: &response){ ptr in\n let ret = try safeRead(fd, ptr)\n if ret != ptr.count {\n throw MemserverError(message: "Channel closed prematurely")\n }\n }\n return response\n }\n\n public func fetch(from addr: Address,\n into buffer: UnsafeMutableRawBufferPointer) throws {\n try sendRequest(for: Size(buffer.count), from: addr)\n\n var done = 0\n while done < buffer.count {\n let reply = try receiveReply()\n\n if reply.len < 0 {\n throw MemserverError(message: "Unreadable at \(hex(addr))")\n }\n\n if buffer.count - done < Int(reply.len) {\n throw MemserverError(message: "Overrun at \(hex(addr)) trying to read \(buffer.count) bytes")\n }\n\n let ret = try safeRead(fd,\n UnsafeMutableRawBufferPointer(\n rebasing: buffer[done..<done+Int(reply.len)]))\n\n if ret != reply.len {\n throw MemserverError(message: "Channel closed prematurely")\n }\n\n done += Int(reply.len)\n }\n }\n}\n\n@_spi(MemoryReaders)\npublic struct UncachedRemoteMemoryReader: MemoryReader {\n private var pid: pid_t\n\n public init(pid: Any) {\n self.pid = pid as! pid_t\n }\n\n public func fetch(from address: Address,\n into buffer: UnsafeMutableRawBufferPointer) throws {\n let size = buffer.count\n var fromIOVec = iovec(iov_base: UnsafeMutableRawPointer(\n bitPattern: UInt(address)),\n iov_len: size)\n var toIOVec = iovec(iov_base: buffer.baseAddress, iov_len: size)\n let result = process_vm_readv(pid, &toIOVec, 1, &fromIOVec, 1, 0)\n if result != size {\n throw POSIXError(errno: _swift_get_errno())\n }\n }\n}\n\n@_spi(MemoryReaders)\npublic struct UncachedLocalMemoryReader: MemoryReader {\n private var reader: RemoteMemoryReader\n\n init() {\n reader = RemoteMemoryReader(pid: getpid())\n }\n\n public func fetch(from address: Address,\n into buffer: UnsafeMutableRawBufferPointer) throws {\n return try reader.fetch(from: address, into: buffer)\n }\n}\n#endif\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_MemoryReader.swift
cpp_apple_swift_stdlib_public_RuntimeModule_MemoryReader.swift
Swift
9,650
0.95
0.132308
0.137546
node-utils
697
2025-03-13T19:50:35.473171
GPL-3.0
false
1922aa5875b3774007b475fae3314abd
//===--- OSReleaseScanner.swift --------------------------------*- swift -*-===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Defines OSReleaseScanner, which is for scanning the /etc/os-release\n// file on Linux.\n//\n//===----------------------------------------------------------------------===//\n\n#if os(Linux)\n\nimport Swift\n\n// Lines in /etc/os-release consist of KEY=VALUE pairs.\n//\n// The VALUE may be quoted with single quotes, in which case its contents\n// are left alone.\n//\n// It may also be quoted with double quotes, in which case slash escapes\n// are processed.\n//\n// If it is unquoted, whitespace will be stripped.\n\nstruct OSReleaseScanner<S: StringProtocol>: Sequence, IteratorProtocol {\n typealias SS = S.SubSequence\n\n private enum State {\n case normal\n case badLine\n case comment\n case key\n case beforeEquals\n case beforeValue\n case value\n case valueWhitespace\n case singleQuote\n case doubleQuote\n case escape\n case awaitingNewline\n }\n\n private var asString: S\n private var asUTF8: S.UTF8View\n private var pos: S.UTF8View.Index\n private var state: State\n\n init(_ string: S) {\n asString = string\n asUTF8 = string.utf8\n pos = asUTF8.startIndex\n state = .normal\n }\n\n mutating func next() -> (String, String)? {\n var chunkStart = pos\n var whitespaceStart = pos\n var key: String = ""\n var quotedValue: String = ""\n\n while pos < asUTF8.endIndex {\n let ch = asUTF8[pos]\n switch state {\n case .normal:\n if ch == 32 || ch == 9 || ch == 13 || ch == 10 {\n break\n }\n if ch == UInt8(ascii: "#") {\n state = .comment\n break\n }\n chunkStart = pos\n state = .key\n case .badLine, .comment, .awaitingNewline:\n if ch == 13 || ch == 10 {\n state = .normal\n }\n case .key:\n if ch == 32 || ch == 9 {\n key = String(asString[chunkStart..<pos])\n state = .beforeEquals\n break\n }\n if ch == 13 || ch == 10 {\n state = .normal\n break\n }\n if ch == UInt8(ascii: "=") {\n key = String(asString[chunkStart..<pos])\n state = .beforeValue\n break\n }\n case .beforeEquals:\n if ch == UInt8(ascii: "=") {\n state = .beforeValue\n break\n }\n if ch == 32 || ch == 9 {\n break\n }\n state = .badLine\n case .beforeValue:\n if ch == 32 || ch == 9 {\n break\n }\n if ch == UInt8(ascii: "\"") {\n state = .doubleQuote\n chunkStart = asUTF8.index(after: pos)\n quotedValue = ""\n break\n }\n if ch == UInt8(ascii: "'") {\n state = .singleQuote\n chunkStart = asUTF8.index(after: pos)\n break\n }\n chunkStart = pos\n state = .value\n case .value:\n if ch == 13 || ch == 10 {\n let value = String(asString[chunkStart..<pos])\n state = .normal\n return (key, value)\n }\n if ch == 32 || ch == 9 {\n state = .valueWhitespace\n whitespaceStart = pos\n }\n case .valueWhitespace:\n if ch == 13 || ch == 10 {\n let value = String(asString[chunkStart..<whitespaceStart])\n state = .normal\n return (key, value)\n }\n if ch != 32 && ch != 9 {\n state = .value\n }\n case .singleQuote:\n if ch == UInt8(ascii: "'") {\n let value = String(asString[chunkStart..<pos])\n state = .awaitingNewline\n return (key, value)\n }\n case .doubleQuote:\n if ch == UInt8(ascii: "\\") {\n let chunk = String(asString[chunkStart..<pos])\n quotedValue += chunk\n chunkStart = asUTF8.index(after: pos)\n state = .escape\n break\n }\n if ch == UInt8(ascii: "\"") {\n let chunk = String(asString[chunkStart..<pos])\n quotedValue += chunk\n state = .awaitingNewline\n return (key, quotedValue)\n }\n case .escape:\n let toEscape = asString[chunkStart...pos]\n switch toEscape {\n case "n":\n quotedValue += "\n"\n case "t":\n quotedValue += "\t"\n default:\n quotedValue += toEscape\n }\n chunkStart = asUTF8.index(after: pos)\n state = .doubleQuote\n }\n\n pos = asUTF8.index(after: pos)\n }\n\n return nil\n }\n}\n\n#endif // os(Linux)\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_OSReleaseScanner.swift
cpp_apple_swift_stdlib_public_RuntimeModule_OSReleaseScanner.swift
Swift
5,118
0.95
0.134409
0.155172
react-lib
369
2025-06-01T03:55:40.554118
MIT
false
9b3868084a716af6811dc9d9049f73c3
//===--- ProcMapsScanner.swift --------------------------------*- swift -*-===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Defines ProcMapsScanner, which is for scanning the /proc/<pid>/maps\n// pseudofiles on Linux.\n//\n//===----------------------------------------------------------------------===//\n\n#if os(Linux)\n\nimport Swift\n\n// Lines in /proc/pid/maps files match the following regex:\n//\n// ^(?<start>[A-Fa-f0-9]+)-(?<end>[A-Fa-f0-9]+)\s+\n// (?<perms>[-rwxsp]{4})\s+\n// (?<offset>[A-Fa-f0-9]+)\s+\n// (?<major>[A-Fa-f0-9]+):(?<minor>[A-Fa-f0-9]+)\s+\n// (?<inode>\d+)\s+\n// (?<pathname>.*)\s*$\n\nstruct ProcMapsScanner<S: StringProtocol>: Sequence, IteratorProtocol {\n typealias SS = S.SubSequence\n\n struct Match {\n var start: SS\n var end: SS\n var perms: SS\n var offset: SS\n var major: SS\n var minor: SS\n var inode: SS\n var pathname: SS\n }\n\n private enum State {\n case start\n case scanningStart\n case scanningEnd\n case afterEnd\n case scanningPerms(Int)\n case afterPerms\n case scanningOffset\n case afterOffset\n case scanningMajor\n case scanningMinor\n case afterMinor\n case scanningInode\n case afterInode\n case scanningPathname\n case scanningPathnameWhitespace\n }\n\n private var procMaps: S\n private var procMapsUTF8: S.UTF8View\n private var ndx: S.UTF8View.Index\n\n init(_ string: S) {\n procMaps = string\n procMapsUTF8 = string.utf8\n ndx = procMapsUTF8.startIndex\n }\n\n mutating func scanMatch() -> Match? {\n var match: Match = Match(start: "",\n end: "",\n perms: "",\n offset: "",\n major: "",\n minor: "",\n inode: "",\n pathname: "")\n var currentChunk = ndx\n var state: State = .start\n\n func isPerm(_ ch: UInt8) -> Bool {\n return ch == UInt8(ascii: "-") || ch == UInt8(ascii: "r")\n || ch == UInt8(ascii: "w") || ch == UInt8(ascii: "x")\n || ch == UInt8(ascii: "s") || ch == UInt8(ascii: "p")\n }\n\n func isDecimal(_ ch: UInt8) -> Bool {\n return ch >= UInt8(ascii: "0") && ch <= UInt8(ascii: "9")\n }\n\n func isHex(_ ch: UInt8) -> Bool {\n return ch >= UInt8(ascii: "A") && ch <= UInt8(ascii: "F")\n || ch >= UInt8(ascii: "a") && ch <= UInt8(ascii: "f")\n || ch >= UInt8(ascii: "0") && ch <= UInt8(ascii: "9")\n }\n\n func isWhitespace(_ ch: UInt8) -> Bool {\n return ch == UInt8(ascii: " ") || ch == UInt8(ascii: "\t")\n }\n\n while ndx < procMapsUTF8.endIndex {\n let ch = procMapsUTF8[ndx]\n let next = procMapsUTF8.index(after: ndx)\n\n switch state {\n case .start:\n if !isHex(ch) {\n return nil\n }\n state = .scanningStart\n case .scanningStart:\n if ch == UInt8(ascii: "-") {\n match.start = procMaps[currentChunk..<ndx]\n state = .scanningEnd\n currentChunk = next\n } else if !isHex(ch) {\n return nil\n }\n case .scanningEnd:\n if isWhitespace(ch) {\n match.end = procMaps[currentChunk..<ndx]\n state = .afterEnd\n } else if !isHex(ch) {\n return nil\n }\n case .afterEnd:\n if isPerm(ch) {\n currentChunk = ndx\n state = .scanningPerms(1)\n } else if !isWhitespace(ch) {\n return nil\n }\n case let .scanningPerms(length):\n if length == 4 {\n if isWhitespace(ch) {\n match.perms = procMaps[currentChunk..<ndx]\n state = .afterPerms\n } else {\n return nil\n }\n } else if isPerm(ch) {\n state = .scanningPerms(length + 1)\n } else {\n return nil\n }\n case .afterPerms:\n if isHex(ch) {\n currentChunk = ndx\n state = .scanningOffset\n } else if !isWhitespace(ch) {\n return nil\n }\n case .scanningOffset:\n if isWhitespace(ch) {\n match.offset = procMaps[currentChunk..<ndx]\n state = .afterOffset\n } else if !isHex(ch) {\n return nil\n }\n case .afterOffset:\n if isHex(ch) {\n currentChunk = ndx\n state = .scanningMajor\n } else if !isWhitespace(ch) {\n return nil\n }\n case .scanningMajor:\n if ch == UInt8(ascii: ":") {\n match.major = procMaps[currentChunk..<ndx]\n state = .scanningMinor\n currentChunk = next\n } else if !isHex(ch) {\n return nil\n }\n case .scanningMinor:\n if isWhitespace(ch) {\n match.minor = procMaps[currentChunk..<ndx]\n state = .afterMinor\n } else if !isHex(ch) {\n return nil\n }\n case .afterMinor:\n if isDecimal(ch) {\n currentChunk = ndx\n state = .scanningInode\n } else if !isWhitespace(ch) {\n return nil\n }\n case .scanningInode:\n if isWhitespace(ch) {\n match.inode = procMaps[currentChunk..<ndx]\n state = .afterInode\n } else if !isDecimal(ch) {\n return nil\n }\n case .afterInode:\n if ch == 0x0a {\n ndx = next\n return match\n } else if !isWhitespace(ch) {\n currentChunk = ndx\n state = .scanningPathname\n }\n case .scanningPathname:\n if isWhitespace(ch) || ch == 0x0a {\n match.pathname = procMaps[currentChunk..<ndx]\n state = .scanningPathnameWhitespace\n if ch == 0x0a {\n ndx = next\n return match\n }\n }\n case .scanningPathnameWhitespace:\n if !isWhitespace(ch) {\n state = .scanningPathname\n } else if ch == 0x0a {\n ndx = next\n return match\n }\n }\n\n ndx = next\n }\n\n if case .scanningPathname = state {\n match.pathname = procMaps[currentChunk...]\n }\n\n return match\n }\n\n mutating func next() -> Match? {\n while ndx != procMapsUTF8.endIndex {\n if let match = scanMatch() {\n return match\n }\n\n // If we failed to match, skip to end of line and retry\n while ndx != procMapsUTF8.endIndex {\n let ch = procMapsUTF8[ndx]\n ndx = procMapsUTF8.index(after: ndx)\n if ch == 0x0a {\n break\n }\n }\n }\n\n return nil\n }\n}\n\n#endif // os(Linux)\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_ProcMapsScanner.swift
cpp_apple_swift_stdlib_public_RuntimeModule_ProcMapsScanner.swift
Swift
7,094
0.95
0.160156
0.115385
vue-tools
337
2025-06-23T01:52:04.617612
GPL-3.0
false
b9c92f35cd4f3b59fe77594aef166e59
//===--- Registers.swift - Dwarf register mapping -------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2022 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Holds enums that define DWARF register mappings for the architectures we\n// care about.\n//\n//===----------------------------------------------------------------------===//\n\nimport Swift\n\n// .. x86-64 .................................................................\n\n// https://gitlab.com/x86-psABIs/x86-64-ABI\n@_spi(Registers) public enum X86_64Register: Int, Strideable, Comparable {\n\n public func advanced(by n: Int) -> X86_64Register {\n return X86_64Register(rawValue: self.rawValue + n)!\n }\n\n public func distance(to other: X86_64Register) -> Int {\n return other.rawValue - self.rawValue\n }\n\n public static func < (lhs: Self, rhs: Self) -> Bool {\n return lhs.rawValue < rhs.rawValue\n }\n\ncase rax = 0\ncase rdx = 1\ncase rcx = 2\ncase rbx = 3\ncase rsi = 4\ncase rdi = 5\ncase rbp = 6\ncase rsp = 7\ncase r8 = 8\ncase r9 = 9\ncase r10 = 10\ncase r11 = 11\ncase r12 = 12\ncase r13 = 13\ncase r14 = 14\ncase r15 = 15\ncase ra = 16\ncase xmm0 = 17\ncase xmm1 = 18\ncase xmm2 = 19\ncase xmm3 = 20\ncase xmm4 = 21\ncase xmm5 = 22\ncase xmm6 = 23\ncase xmm7 = 24\ncase xmm8 = 25\ncase xmm9 = 26\ncase xmm10 = 27\ncase xmm11 = 28\ncase xmm12 = 29\ncase xmm13 = 30\ncase xmm14 = 31\ncase xmm15 = 32\ncase st0 = 33\ncase st1 = 34\ncase st2 = 35\ncase st3 = 36\ncase st4 = 37\ncase st5 = 38\ncase st6 = 39\ncase st7 = 40\ncase mm0 = 41\ncase mm1 = 42\ncase mm2 = 43\ncase mm3 = 44\ncase mm4 = 45\ncase mm5 = 46\ncase mm6 = 47\ncase mm7 = 48\ncase rflags = 49\ncase es = 50\ncase cs = 51\ncase ss = 52\ncase ds = 53\ncase fs = 54\ncase gs = 55\n // 56-57 are reserved\ncase fs_base = 58\ncase gs_base = 59\n // 60-61 are reserved\ncase tr = 62\ncase ldtr = 63\ncase mxcsr = 64\ncase fcw = 65\ncase fsw = 66\ncase xmm16 = 67\ncase xmm17 = 68\ncase xmm18 = 69\ncase xmm19 = 70\ncase xmm20 = 71\ncase xmm21 = 72\ncase xmm22 = 73\ncase xmm23 = 74\ncase xmm24 = 75\ncase xmm25 = 76\ncase xmm26 = 77\ncase xmm27 = 78\ncase xmm28 = 79\ncase xmm29 = 80\ncase xmm30 = 81\ncase xmm31 = 82\n // 83-117 are reserved\ncase k0 = 118\ncase k1 = 119\ncase k2 = 120\ncase k3 = 121\ncase k4 = 122\ncase k5 = 123\ncase k6 = 124\ncase k7 = 125\n // 126-129 are reserved\n}\n\n// .. i386 ...................................................................\n\n// https://gitlab.com/x86-psABIs/i386-ABI\n@_spi(Registers) public enum I386Register: Int, Strideable, Comparable {\n\n public func advanced(by n: Int) -> I386Register {\n return I386Register(rawValue: self.rawValue + n)!\n }\n\n public func distance(to other: I386Register) -> Int {\n return other.rawValue - self.rawValue\n }\n\n public static func < (lhs: Self, rhs: Self) -> Bool {\n return lhs.rawValue < rhs.rawValue\n }\n\ncase eax = 0\ncase ecx = 1\ncase edx = 2\ncase ebx = 3\ncase esp = 4\ncase ebp = 5\ncase esi = 6\ncase edi = 7\ncase ra = 8\ncase eflags = 9\n // 10 is reserved\ncase st0 = 11\ncase st1 = 12\ncase st2 = 13\ncase st3 = 14\ncase st4 = 15\ncase st5 = 16\ncase st6 = 17\ncase st7 = 18\n // 19-20 are reserved\ncase xmm0 = 21\ncase xmm1 = 22\ncase xmm2 = 23\ncase xmm3 = 24\ncase xmm4 = 25\ncase xmm5 = 26\ncase xmm6 = 27\ncase xmm7 = 28\ncase mm0 = 29\ncase mm1 = 30\ncase mm2 = 31\ncase mm3 = 32\ncase mm4 = 33\ncase mm5 = 34\ncase mm6 = 35\ncase mm7 = 36\n // 36-38 are reserved\ncase mxcsr = 39\ncase es = 40\ncase cs = 41\ncase ss = 42\ncase ds = 43\ncase fs = 44\ncase gs = 45\n // 46-47 are reserved\ncase tr = 48\ncase ldtr = 49\n // 50-92 are reserved\ncase fs_base = 93\ncase gs_base = 94\n}\n\n// .. arm64 ..................................................................\n\n// https://github.com/ARM-software/abi-aa/tree/main/aadwarf64\n@_spi(Registers) public enum ARM64Register: Int, Strideable, Comparable {\n\n public func advanced(by n: Int) -> ARM64Register {\n return ARM64Register(rawValue: self.rawValue + n)!\n }\n\n public func distance(to other: ARM64Register) -> Int {\n return other.rawValue - self.rawValue\n }\n\n public static func < (lhs: Self, rhs: Self) -> Bool {\n return lhs.rawValue < rhs.rawValue\n }\n\ncase x0 = 0\ncase x1 = 1\ncase x2 = 2\ncase x3 = 3\ncase x4 = 4\ncase x5 = 5\ncase x6 = 6\ncase x7 = 7\ncase x8 = 8\ncase x9 = 9\ncase x10 = 10\ncase x11 = 11\ncase x12 = 12\ncase x13 = 13\ncase x14 = 14\ncase x15 = 15\ncase x16 = 16\ncase x17 = 17\ncase x18 = 18\ncase x19 = 19\ncase x20 = 20\ncase x21 = 21\ncase x22 = 22\ncase x23 = 23\ncase x24 = 24\ncase x25 = 25\ncase x26 = 26\ncase x27 = 27\ncase x28 = 28\ncase x29 = 29 // fp\ncase x30 = 30 // lr\ncase sp = 31 // x31\ncase pc = 32\ncase elr_mode = 33\ncase ra_sign_state = 34\ncase tpidrro_el0 = 35\ncase tpidr_el0 = 36\ncase tpidr_el1 = 37\ncase tpidr_el2 = 38\ncase tpidr_el3 = 39\n // 40-45 are reserved\ncase vg = 46\ncase ffr = 47\ncase p0 = 48\ncase p1 = 49\ncase p2 = 50\ncase p3 = 51\ncase p4 = 52\ncase p5 = 53\ncase p6 = 54\ncase p7 = 55\ncase p8 = 56\ncase p9 = 57\ncase p10 = 58\ncase p11 = 59\ncase p12 = 60\ncase p13 = 61\ncase p14 = 62\ncase p15 = 63\ncase v0 = 64\ncase v1 = 65\ncase v2 = 66\ncase v3 = 67\ncase v4 = 68\ncase v5 = 69\ncase v6 = 70\ncase v7 = 71\ncase v8 = 72\ncase v9 = 73\ncase v10 = 74\ncase v11 = 75\ncase v12 = 76\ncase v13 = 77\ncase v14 = 78\ncase v15 = 79\ncase v16 = 80\ncase v17 = 81\ncase v18 = 82\ncase v19 = 83\ncase v20 = 84\ncase v21 = 85\ncase v22 = 86\ncase v23 = 87\ncase v24 = 88\ncase v25 = 89\ncase v26 = 90\ncase v27 = 91\ncase v28 = 92\ncase v29 = 93\ncase v30 = 94\ncase v31 = 95\ncase z0 = 96\ncase z1 = 97\ncase z2 = 98\ncase z3 = 99\ncase z4 = 100\ncase z5 = 101\ncase z6 = 102\ncase z7 = 103\ncase z8 = 104\ncase z9 = 105\ncase z10 = 106\ncase z11 = 107\ncase z12 = 108\ncase z13 = 109\ncase z14 = 110\ncase z15 = 111\ncase z16 = 112\ncase z17 = 113\ncase z18 = 114\ncase z19 = 115\ncase z20 = 116\ncase z21 = 117\ncase z22 = 118\ncase z23 = 119\ncase z24 = 120\ncase z25 = 121\ncase z26 = 122\ncase z27 = 123\ncase z28 = 124\ncase z29 = 125\ncase z30 = 126\ncase z31 = 127\n}\n\n// .. arm ....................................................................\n\n// https://github.com/ARM-software/abi-aa/tree/main/aadwarf32\n@_spi(Registers) public enum ARMRegister: Int, Strideable, Comparable {\n\n public func advanced(by n: Int) -> ARMRegister {\n return ARMRegister(rawValue: self.rawValue + n)!\n }\n\n public func distance(to other: ARMRegister) -> Int {\n return other.rawValue - self.rawValue\n }\n\n public static func < (lhs: Self, rhs: Self) -> Bool {\n return lhs.rawValue < rhs.rawValue\n }\n\ncase r0 = 0\ncase r1 = 1\ncase r2 = 2\ncase r3 = 3\ncase r4 = 4\ncase r5 = 5\ncase r6 = 6\ncase r7 = 7\ncase r8 = 8\ncase r9 = 9\ncase r10 = 10\ncase r11 = 11 // fp\ncase r12 = 12 // ip - scratch register (NOT "instruction pointer")\ncase r13 = 13 // sp\ncase r14 = 14 // lr\ncase r15 = 15 // pc\n\n // Obsolescent, overlapping mappings for FPA and VFP\ncase old_f0_s0 = 16\ncase old_f1_s1 = 17\ncase old_f2_s2 = 18\ncase old_f3_s3 = 19\ncase old_f4_s4 = 20\ncase old_f5_s5 = 21\ncase old_f6_s6 = 22\ncase old_f7_s7 = 23\ncase old_s8 = 24\ncase old_s9 = 25\ncase old_s10 = 26\ncase old_s11 = 27\ncase old_s12 = 28\ncase old_s13 = 29\ncase old_s14 = 30\ncase old_s15 = 31\ncase old_s16 = 32\ncase old_s17 = 33\ncase old_s18 = 34\ncase old_s19 = 35\ncase old_s20 = 36\ncase old_s21 = 37\ncase old_s22 = 38\ncase old_s23 = 39\ncase old_s24 = 40\ncase old_s25 = 41\ncase old_s26 = 42\ncase old_s27 = 43\ncase old_s28 = 44\ncase old_s29 = 45\ncase old_s30 = 46\ncase old_s31 = 47\n\n // Legacy VFPv2\ncase s0 = 64\ncase s1 = 65\ncase s2 = 66\ncase s3 = 67\ncase s4 = 68\ncase s5 = 69\ncase s6 = 70\ncase s7 = 71\ncase s8 = 72\ncase s9 = 73\ncase s10 = 74\ncase s11 = 75\ncase s12 = 76\ncase s13 = 77\ncase s14 = 78\ncase s15 = 79\ncase s16 = 80\ncase s17 = 81\ncase s18 = 82\ncase s19 = 83\ncase s20 = 84\ncase s21 = 85\ncase s22 = 86\ncase s23 = 87\ncase s24 = 88\ncase s25 = 89\ncase s26 = 90\ncase s27 = 91\ncase s28 = 92\ncase s29 = 93\ncase s30 = 94\ncase s31 = 95\n\n // Obsolescent FPA registers\ncase f0 = 96\ncase f1 = 97\ncase f2 = 98\ncase f3 = 99\ncase f4 = 100\ncase f5 = 101\ncase f6 = 102\ncase f7 = 103\n\n // Intel wireless MMX GPRs / XScale accumulators\ncase wcgr0_acc0 = 104\ncase wcgr1_acc1 = 105\ncase wcgr2_acc2 = 106\ncase wcgr3_acc3 = 107\ncase wcgr4_acc4 = 108\ncase wcgr5_acc5 = 109\ncase wcgr6_acc6 = 110\ncase wcgr7_acc7 = 111\n\n // Intel wireless MMX data registers\ncase wr0 = 112\ncase wr1 = 113\ncase wr2 = 114\ncase wr3 = 115\ncase wr4 = 116\ncase wr5 = 117\ncase wr6 = 118\ncase wr7 = 119\ncase wr8 = 120\ncase wr9 = 121\ncase wr10 = 122\ncase wr11 = 123\ncase wr12 = 124\ncase wr13 = 125\ncase wr14 = 126\ncase wr15 = 127\n\ncase spsr = 128\ncase spsr_fiq = 129\ncase spsr_irq = 130\ncase spsr_abt = 131\ncase spsr_und = 132\ncase spsr_svc = 133\n\n // 134-142 are reserved\n\ncase ra_auth_code = 143\n\ncase r8_usr = 144\ncase r9_usr = 145\ncase r10_usr = 146\ncase r11_usr = 147\ncase r12_usr = 148\ncase r13_usr = 149\ncase r14_usr = 150\n\ncase r8_fiq = 151\ncase r9_fiq = 152\ncase r10_fiq = 153\ncase r11_fiq = 154\ncase r12_fiq = 155\ncase r13_fiq = 156\ncase r14_fiq = 157\n\ncase r13_irq = 158\ncase r14_irq = 159\n\ncase r13_abt = 160\ncase r14_abt = 161\n\ncase r13_und = 162\ncase r14_und = 163\n\ncase r13_svc = 164\ncase r14_svc = 165\n\n // 166-191 are reserved\n\n // Intel wqireless MMX control register\ncase wc0 = 192\ncase wc1 = 193\ncase wc2 = 194\ncase wc3 = 195\ncase wc4 = 196\ncase wc5 = 197\ncase wc6 = 198\ncase wc7 = 199\n\n // 200-255 are reserved\n\ncase d0 = 256\ncase d1 = 257\ncase d2 = 258\ncase d3 = 259\ncase d4 = 260\ncase d5 = 261\ncase d6 = 262\ncase d7 = 263\ncase d8 = 264\ncase d9 = 265\ncase d10 = 266\ncase d11 = 267\ncase d12 = 268\ncase d13 = 269\ncase d14 = 270\ncase d15 = 271\ncase d16 = 272\ncase d17 = 273\ncase d18 = 274\ncase d19 = 275\ncase d20 = 276\ncase d21 = 277\ncase d22 = 278\ncase d23 = 279\ncase d24 = 280\ncase d25 = 281\ncase d26 = 282\ncase d27 = 283\ncase d28 = 284\ncase d29 = 285\ncase d30 = 286\ncase d31 = 287\n\n // 288-319 are reserved\n\ncase tpidruro = 320\ncase tpidrurw = 321\ncase tpidpr = 322\ncase htpidpr = 323\n\n // 324-8191 are reserved\n // 8192-16383 are for vendor co-processors\n}\n\n#if arch(x86_64)\n@_spi(Registers) public typealias HostRegister = X86_64Register\n#elseif arch(i386)\n@_spi(Registers) public typealias HostRegister = I386Register\n#elseif arch(arm64) || arch(arm64_32)\n@_spi(Registers) public typealias HostRegister = ARM64Register\n#elseif arch(arm)\n@_spi(Registers) public typealias HostRegister = ARMRegister\n#endif\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_Registers.swift
cpp_apple_swift_stdlib_public_RuntimeModule_Registers.swift
Swift
10,540
0.95
0.010239
0.09462
vue-tools
856
2025-01-12T09:23:47.295909
GPL-3.0
false
ee8b5eeb193d4fa629f6206f5184f1a1
//===--- RichFrame.swift --------------------------------------*- swift -*-===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Defines the default rich frame storage type used by `Backtrace`\n//\n//===----------------------------------------------------------------------===//\n\nimport Swift\n\n@_spi(Internal)\npublic enum RichFrame<T: FixedWidthInteger>: CustomStringConvertible, Equatable {\n public typealias Address = T\n\n /// A program counter value.\n ///\n /// This might come from a signal handler, or an exception or some\n /// other situation in which we have captured the actual program counter.\n ///\n /// These can be directly symbolicated, as-is, with no adjustment.\n case programCounter(Address)\n\n /// A return address.\n ///\n /// Corresponds to a normal function call.\n ///\n /// Requires adjustment when symbolicating for a backtrace, because it\n /// points at the address after the one that triggered the child frame.\n case returnAddress(Address)\n\n /// An async resume point.\n ///\n /// Corresponds to an `await` in an async task.\n ///\n /// Can be directly symbolicated, as-is.\n case asyncResumePoint(Address)\n\n /// Indicates a discontinuity in the backtrace.\n ///\n /// This occurs when you set a limit and a minimum number of frames at\n /// the top. For example, if you set a limit of 10 frames and a minimum\n /// of 4 top frames, but the backtrace generated 100 frames, you will see\n ///\n /// 0: frame 100 <----- bottom of call stack\n /// 1: frame 99\n /// 2: frame 98\n /// 3: frame 97\n /// 4: frame 96\n /// 5: ... <----- omittedFrames(92)\n /// 6: frame 3\n /// 7: frame 2\n /// 8: frame 1\n /// 9: frame 0 <----- top of call stack\n ///\n /// Note that the limit *includes* the discontinuity.\n ///\n /// This is good for handling cases involving deep recursion.\n case omittedFrames(Int)\n\n /// Indicates a discontinuity of unknown length.\n ///\n /// This can only be present at the end of a backtrace; in other cases\n /// we will know how many frames we have omitted. For instance,\n ///\n /// 0: frame 100 <----- bottom of call stack\n /// 1: frame 99\n /// 2: frame 98\n /// 3: frame 97\n /// 4: frame 96\n /// 5: ... <----- truncated\n case truncated\n\n /// The program counter, without any adjustment.\n public var originalProgramCounter: Address {\n switch self {\n case let .returnAddress(addr):\n return addr\n case let .programCounter(addr):\n return addr\n case let .asyncResumePoint(addr):\n return addr\n case .omittedFrames, .truncated:\n return 0\n }\n }\n\n /// The adjusted program counter to use for symbolication.\n public var adjustedProgramCounter: Address {\n switch self {\n case let .returnAddress(addr):\n return addr - 1\n case let .programCounter(addr):\n return addr\n case let .asyncResumePoint(addr):\n return addr\n case .omittedFrames, .truncated:\n return 0\n }\n }\n\n /// A textual description of this frame.\n public var description: String {\n switch self {\n case let .programCounter(addr):\n return "\(hex(addr))"\n case let .returnAddress(addr):\n return "\(hex(addr)) [ra]"\n case let .asyncResumePoint(addr):\n return "\(hex(addr)) [async]"\n case .omittedFrames, .truncated:\n return "..."\n }\n }\n}\n\nextension RichFrame: LimitableElement {\n // LimitableElement wants to call this "omitted"\n public static func omitted(_ count: Int) -> Self {\n return .omittedFrames(count)\n }\n}\n\nextension Backtrace.Frame {\n init<T>(_ frame: RichFrame<T>) {\n switch frame {\n case let .returnAddress(addr):\n self = .returnAddress(Backtrace.Address(addr)!)\n case let .programCounter(addr):\n self = .programCounter(Backtrace.Address(addr)!)\n case let .asyncResumePoint(addr):\n self = .asyncResumePoint(Backtrace.Address(addr)!)\n case let .omittedFrames(count):\n self = .omittedFrames(count)\n case .truncated:\n self = .truncated\n }\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_RichFrame.swift
cpp_apple_swift_stdlib_public_RuntimeModule_RichFrame.swift
Swift
4,461
0.95
0.075342
0.5
node-utils
939
2024-03-11T16:04:31.644881
GPL-3.0
false
5d24af50ada86f2473310533fd8f4be9
//===--- Runtime.swift - Imports from libswiftCore ------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Since we are using C++ interop, the imported APIs end up inside a namespace,\n// which makes everything very verbose. Define aliases in this file.\n//\n//===----------------------------------------------------------------------===//\n\nimport Swift\n\ninternal import BacktracingImpl.Runtime\n\ntypealias CrashInfo = swift.runtime.backtrace.CrashInfo\n\n#if os(Linux)\ntypealias memserver_req = swift.runtime.backtrace.memserver_req\ntypealias memserver_resp = swift.runtime.backtrace.memserver_resp\ntypealias thread = swift.runtime.backtrace.thread\n#endif\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_Runtime.swift
cpp_apple_swift_stdlib_public_RuntimeModule_Runtime.swift
Swift
1,063
0.95
0.107143
0.75
vue-tools
1
2024-04-01T08:48:48.915932
BSD-3-Clause
false
7e0c21b080df1509d337ca05a4280f07
//===--- Backtrace.swift --------------------------------------*- swift -*-===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Defines the `SymbolicatedBacktrace` struct that represents a captured\n// backtrace with symbols.\n//\n//===----------------------------------------------------------------------===//\n\nimport Swift\n\n#if os(macOS) || os(iOS) || os(watchOS) || os(tvOS)\ninternal import BacktracingImpl.OS.Darwin\n#endif\n\n#if os(macOS) || os(iOS) || os(tvOS) || os(watchOS)\ninternal import Darwin\n#elseif os(Windows)\ninternal import ucrt\n#elseif canImport(Glibc)\ninternal import Glibc\n#elseif canImport(Musl)\ninternal import Musl\n#endif\ninternal import BacktracingImpl.Runtime\n\n/// A symbolicated backtrace\npublic struct SymbolicatedBacktrace: CustomStringConvertible {\n /// The `Backtrace` from which this was constructed\n public var backtrace: Backtrace\n\n /// Represents a location in source code.\n ///\n /// The information in this structure comes from compiler-generated\n /// debug information and may not correspond to the current state of\n /// the filesystem --- it might even hold a path that only works\n /// from an entirely different machine.\n public struct SourceLocation: CustomStringConvertible, Sendable, Hashable {\n /// The path of the source file.\n public var path: String\n\n /// The line number.\n public var line: Int\n\n /// The column number.\n public var column: Int\n\n /// Provide a textual description.\n public var description: String {\n if column > 0 && line > 0 {\n return "\(path):\(line):\(column)"\n } else if line > 0 {\n return "\(path):\(line)"\n } else {\n return path\n }\n }\n }\n\n /// Represents an individual frame in the backtrace.\n public struct Frame: CustomStringConvertible {\n /// The captured frame from the `Backtrace`.\n public var captured: Backtrace.Frame\n\n /// The result of doing a symbol lookup for this frame.\n public var symbol: Symbol?\n\n /// If `true`, then this frame was inlined\n public var inlined: Bool = false\n\n /// `true` if this frame represents a Swift runtime failure.\n public var isSwiftRuntimeFailure: Bool {\n symbol?.isSwiftRuntimeFailure ?? false\n }\n\n /// `true` if this frame represents a Swift thunk function.\n public var isSwiftThunk: Bool {\n symbol?.isSwiftThunk ?? false\n }\n\n /// `true` if this frame is a system frame.\n public var isSystem: Bool {\n symbol?.isSystemFunction ?? false\n }\n\n /// A textual description of this frame.\n public var description: String {\n if let symbol = symbol {\n let isInlined = inlined ? " [inlined]" : ""\n let isThunk = isSwiftThunk ? " [thunk]" : ""\n return "\(captured.description)\(isInlined)\(isThunk) \(symbol)"\n } else {\n return captured.description\n }\n }\n }\n\n /// Represents a symbol we've located\n public class Symbol: CustomStringConvertible {\n /// The index of the image in which the symbol for this address is located.\n public var imageIndex: Int\n\n /// The name of the image in which the symbol for this address is located.\n public var imageName: String\n\n /// The raw symbol name, before demangling.\n public var rawName: String\n\n /// The demangled symbol name.\n public lazy var name: String = demangleRawName()\n\n /// The offset from the symbol.\n public var offset: Int\n\n /// The source location, if available.\n public var sourceLocation: SourceLocation?\n\n /// True if this symbol represents a Swift runtime failure.\n public var isSwiftRuntimeFailure: Bool {\n guard let sourceLocation = sourceLocation else {\n return false\n }\n\n let symName: Substring\n if rawName.hasPrefix("_") {\n symName = rawName.dropFirst()\n } else {\n symName = rawName.dropFirst(0)\n }\n\n return symName.hasPrefix("Swift runtime failure: ")\n && sourceLocation.line == 0\n && sourceLocation.column == 0\n && sourceLocation.path.hasSuffix("<compiler-generated>")\n }\n\n /// True if this symbol is a Swift thunk function.\n public var isSwiftThunk: Bool {\n return _swift_backtrace_isThunkFunction(rawName)\n }\n\n private func maybeUnderscore(_ sym: String) -> String {\n #if os(macOS) || os(iOS) || os(watchOS) || os(tvOS)\n return "_" + sym\n #else\n return sym\n #endif\n }\n\n private func dylibName(_ dylib: String) -> String {\n #if os(macOS) || os(iOS) || os(watchOS) || os(tvOS)\n return dylib + ".dylib"\n #else\n return dylib + ".so"\n #endif\n }\n\n /// True if this symbol represents a system function.\n ///\n /// For instance, the `start` function from `dyld` on macOS is a system\n /// function, and we don't need to display it under normal circumstances.\n public var isSystemFunction: Bool {\n #if os(macOS) || os(iOS) || os(watchOS) || os(tvOS)\n if rawName == "start" && imageName == "dyld" {\n return true\n }\n #endif\n if rawName.hasSuffix("5$mainyyFZ")\n || rawName.hasSuffix("5$mainyyYaFZTQ0_")\n || rawName == maybeUnderscore("async_MainTQ0_") {\n return true\n }\n if rawName == maybeUnderscore("_ZL23completeTaskWithClosurePN5swift12AsyncContextEPNS_10SwiftErrorE") && imageName == dylibName("libswift_Concurrency") {\n return true\n }\n if let location = sourceLocation,\n ((location.line == 0 && location.column == 0)\n || location.path.hasSuffix("<compiler-generated>"))\n && !_swift_backtrace_isThunkFunction(rawName) {\n return true\n }\n return false\n }\n\n /// Construct a new Symbol.\n public init(imageIndex: Int, imageName: String,\n rawName: String, offset: Int, sourceLocation: SourceLocation?) {\n self.imageIndex = imageIndex\n self.imageName = imageName\n self.rawName = rawName\n self.offset = offset\n self.sourceLocation = sourceLocation\n }\n\n /// Demangle the raw name, if possible.\n private func demangleRawName() -> String {\n var length: size_t = 0\n if let demangled = _swift_backtrace_demangle(rawName, rawName.utf8.count,\n nil, &length) {\n defer { free(demangled) }\n\n // length is the size of the buffer that was allocated, *not* the\n // length of the string.\n let stringLen = strlen(demangled)\n if stringLen > 0 {\n return demangled.withMemoryRebound(to: UInt8.self,\n capacity: stringLen) {\n let demangledBytes = UnsafeBufferPointer<UInt8>(start: $0,\n count: stringLen)\n return String(decoding: demangledBytes, as: UTF8.self)\n }\n }\n }\n return rawName\n }\n\n /// A textual description of this symbol.\n public var description: String {\n let symPlusOffset: String\n\n if offset > 0 {\n symPlusOffset = "\(name) + \(offset)"\n } else if offset < 0 {\n symPlusOffset = "\(name) - \(-offset)"\n } else {\n symPlusOffset = name\n }\n\n let location: String\n if let sourceLocation = sourceLocation {\n location = " at \(sourceLocation)"\n } else {\n location = ""\n }\n\n return "[\(imageIndex)] \(imageName) \(symPlusOffset)\(location)"\n }\n }\n\n /// The architecture on which this backtrace was captured.\n public var architecture: String { return backtrace.architecture }\n\n /// A list of captured frame information.\n public private(set) var frames: [Frame]\n\n /// A list of images found in the process.\n public private(set) var images: ImageMap\n\n /// True if this backtrace is a Swift runtime failure.\n public var isSwiftRuntimeFailure: Bool {\n guard let frame = frames.first else { return false }\n return frame.isSwiftRuntimeFailure\n }\n\n /// If this backtrace is a Swift runtime failure, return the description.\n public var swiftRuntimeFailure: String? {\n guard let frame = frames.first else { return nil }\n if !frame.isSwiftRuntimeFailure { return nil }\n\n let symbolName = frame.symbol!.rawName\n if symbolName.hasPrefix("_") {\n return String(symbolName.dropFirst())\n }\n return symbolName\n }\n\n /// Construct a SymbolicatedBacktrace from a backtrace and a list of images.\n private init(backtrace: Backtrace, images: ImageMap, frames: [Frame]) {\n self.backtrace = backtrace\n self.images = images\n self.frames = frames\n }\n\n #if os(macOS) || os(iOS) || os(watchOS) || os(tvOS)\n /// Convert a build ID to a CFUUIDBytes.\n private static func uuidBytesFromBuildID(_ buildID: [UInt8])\n -> CFUUIDBytes {\n return withUnsafeTemporaryAllocation(of: CFUUIDBytes.self,\n capacity: 1) { buf in\n buf.withMemoryRebound(to: UInt8.self) {\n _ = $0.initialize(from: buildID)\n }\n return buf[0]\n }\n }\n\n /// Create a symbolicator.\n private static func withSymbolicator<T>(images: ImageMap,\n useSymbolCache: Bool,\n fn: (CSSymbolicatorRef) throws -> T) rethrows -> T {\n let binaryImageList = images.map{ image in\n BinaryImageInformation(\n base: vm_address_t(image.baseAddress)!,\n extent: vm_address_t(image.endOfText)!,\n uuid: uuidBytesFromBuildID(image.uniqueID!),\n arch: HostContext.coreSymbolicationArchitecture,\n path: image.path ?? "",\n relocations: [\n BinaryRelocationInformation(\n base: vm_address_t(image.baseAddress)!,\n extent: vm_address_t(image.endOfText)!,\n name: "__TEXT"\n )\n ],\n flags: 0\n )\n }\n\n let symbolicator = CSSymbolicatorCreateWithBinaryImageList(\n binaryImageList,\n useSymbolCache ? 0 : kCSSymbolicatorDisallowDaemonCommunication,\n nil\n )\n\n defer { CSRelease(symbolicator) }\n\n return try fn(symbolicator)\n }\n\n /// Generate a frame from a symbol and source info pair\n private static func buildFrame(from capturedFrame: Backtrace.Frame,\n with owner: CSSymbolOwnerRef,\n isInline: Bool,\n symbol: CSSymbolRef,\n sourceInfo: CSSourceInfoRef?,\n images: ImageMap) -> Frame {\n if CSIsNull(symbol) {\n return Frame(captured: capturedFrame, symbol: nil)\n }\n\n let address = capturedFrame.originalProgramCounter\n let rawName = CSSymbolGetMangledName(symbol) ?? "<unknown>"\n let name = CSSymbolGetName(symbol) ?? rawName\n let range = CSSymbolGetRange(symbol)\n\n let location: SourceLocation?\n\n if let sourceInfo = sourceInfo, !CSIsNull(sourceInfo) {\n let path = CSSourceInfoGetPath(sourceInfo) ?? "<unknown>"\n let line = CSSourceInfoGetLineNumber(sourceInfo)\n let column = CSSourceInfoGetColumn(sourceInfo)\n\n location = SourceLocation(\n path: path,\n line: Int(line),\n column: Int(column)\n )\n } else {\n location = nil\n }\n\n let imageBase = CSSymbolOwnerGetBaseAddress(owner)\n var imageIndex = -1\n var imageName = ""\n for (ndx, image) in images.enumerated() {\n if vm_address_t(image.baseAddress) == imageBase {\n imageIndex = ndx\n imageName = image.name ?? "<unknown>"\n break\n }\n }\n\n let theSymbol = Symbol(imageIndex: imageIndex,\n imageName: imageName,\n rawName: rawName,\n offset: Int(UInt64(address)! - UInt64(range.location)),\n sourceLocation: location)\n theSymbol.name = name\n\n return Frame(captured: capturedFrame, symbol: theSymbol, inlined: isInline)\n }\n #endif\n\n /// Actually symbolicate.\n internal static func symbolicate(backtrace: Backtrace,\n images: ImageMap?,\n options: Backtrace.SymbolicationOptions)\n -> SymbolicatedBacktrace? {\n\n let theImages: ImageMap\n if let images = images {\n theImages = images\n } else if let images = backtrace.images {\n theImages = images\n } else {\n theImages = ImageMap.capture()\n }\n\n var frames: [Frame] = []\n\n #if os(macOS) || os(iOS) || os(watchOS) || os(tvOS)\n withSymbolicator(images: theImages,\n useSymbolCache: options.contains(.useSymbolCache)) {\n symbolicator in\n for frame in backtrace.frames {\n switch frame {\n case .omittedFrames(_), .truncated:\n frames.append(Frame(captured: frame, symbol: nil))\n default:\n let address = vm_address_t(frame.adjustedProgramCounter)!\n let owner\n = CSSymbolicatorGetSymbolOwnerWithAddressAtTime(symbolicator,\n address,\n kCSBeginningOfTime)\n\n if CSIsNull(owner) {\n frames.append(Frame(captured: frame, symbol: nil))\n } else if options.contains(.showInlineFrames) {\n // These present in *reverse* order (i.e. the real one first,\n // then the inlined frames from callee to caller).\n let pos = frames.count\n var first = true\n\n _ = CSSymbolOwnerForEachStackFrameAtAddress(owner, address) {\n symbol, sourceInfo in\n\n frames.insert(buildFrame(from: frame,\n with: owner,\n isInline: !first,\n symbol: symbol,\n sourceInfo: sourceInfo,\n images: theImages),\n at: pos)\n\n first = false\n }\n } else if options.contains(.showSourceLocations) {\n let symbol = CSSymbolOwnerGetSymbolWithAddress(owner, address)\n let sourceInfo = CSSymbolOwnerGetSourceInfoWithAddress(owner,\n address)\n\n frames.append(buildFrame(from: frame,\n with: owner,\n isInline: false,\n symbol: symbol,\n sourceInfo: sourceInfo,\n images: theImages))\n } else {\n let symbol = CSSymbolOwnerGetSymbolWithAddress(owner, address)\n\n frames.append(buildFrame(from: frame,\n with: owner,\n isInline: false,\n symbol: symbol,\n sourceInfo: nil,\n images: theImages))\n }\n }\n }\n }\n #elseif os(Linux)\n let cache = ElfImageCache.threadLocal\n\n // This could be more efficient; at the moment we execute the line\n // number programs once per frame, whereas we could just run them once\n // for all the addresses we're interested in.\n\n for frame in backtrace.frames {\n let address = frame.adjustedProgramCounter\n if let imageNdx = theImages.indexOfImage(at: address) {\n let relativeAddress = ImageSource.Address(\n address - theImages[imageNdx].baseAddress\n )\n let name = theImages[imageNdx].name ?? "<unknown>"\n var symbol: Symbol = Symbol(imageIndex: imageNdx,\n imageName: name,\n rawName: "<unknown>",\n offset: 0,\n sourceLocation: nil)\n\n func lookupSymbol<ElfImage: ElfSymbolLookupProtocol>(\n image: ElfImage?,\n at imageNdx: Int,\n named name: String,\n address imageAddr: ImageSource.Address\n ) -> Symbol? {\n let address = ElfImage.Traits.Address(imageAddr)\n\n guard let image = image else {\n return nil\n }\n guard let theSymbol = image.lookupSymbol(address: address) else {\n return nil\n }\n\n var location: SourceLocation?\n\n if options.contains(.showSourceLocations)\n || options.contains(.showInlineFrames) {\n location = try? image.sourceLocation(for: address)\n } else {\n location = nil\n }\n\n if options.contains(.showInlineFrames) {\n for inline in image.inlineCallSites(at: address) {\n let fakeSymbol = Symbol(imageIndex: imageNdx,\n imageName: name,\n rawName: inline.rawName ?? "<unknown>",\n offset: 0,\n sourceLocation: location)\n frames.append(Frame(captured: frame,\n symbol: fakeSymbol,\n inlined: true))\n\n location = SourceLocation(path: inline.filename,\n line: inline.line,\n column: inline.column)\n }\n }\n\n return Symbol(imageIndex: imageNdx,\n imageName: name,\n rawName: theSymbol.name,\n offset: theSymbol.offset,\n sourceLocation: location)\n }\n\n if let hit = cache.lookup(path: theImages[imageNdx].path) {\n switch hit {\n case let .elf32Image(image):\n if let theSymbol = lookupSymbol(image: image,\n at: imageNdx,\n named: name,\n address: relativeAddress) {\n symbol = theSymbol\n }\n case let .elf64Image(image):\n if let theSymbol = lookupSymbol(image: image,\n at: imageNdx,\n named: name,\n address: relativeAddress) {\n symbol = theSymbol\n }\n }\n }\n\n frames.append(Frame(captured: frame, symbol: symbol))\n continue\n }\n\n frames.append(Frame(captured: frame, symbol: nil))\n }\n #else\n frames = backtrace.frames.map{ Frame(captured: $0, symbol: nil) }\n #endif\n\n return SymbolicatedBacktrace(backtrace: backtrace,\n images: theImages,\n frames: frames)\n }\n\n /// Provide a textual version of the backtrace.\n public var description: String {\n var lines: [String] = []\n\n var n = 0\n for frame in frames {\n lines.append("\(n)\t\(frame.description)")\n switch frame.captured {\n case let .omittedFrames(count):\n n += count\n default:\n n += 1\n }\n }\n\n lines.append("")\n lines.append("Images:")\n lines.append("")\n for (n, image) in images.enumerated() {\n lines.append("\(n)\t\(image.description)")\n }\n\n return lines.joined(separator: "\n")\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_SymbolicatedBacktrace.swift
cpp_apple_swift_stdlib_public_RuntimeModule_SymbolicatedBacktrace.swift
Swift
19,895
0.95
0.117949
0.180915
awesome-app
347
2023-07-28T18:20:51.078296
GPL-3.0
false
6648548487d276f75b3636451717db5e
//===--- Utils.swift - Utility functions ----------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Utility functions that are used in the backtracing library.\n//\n//===----------------------------------------------------------------------===//\n\nimport Swift\n\n#if os(macOS) || os(iOS) || os(tvOS) || os(watchOS)\ninternal import Darwin\n#elseif os(Windows)\ninternal import ucrt\n#elseif canImport(Glibc)\ninternal import Glibc\n#elseif canImport(Musl)\ninternal import Musl\n#endif\n\ninternal func hex<T: FixedWidthInteger>(_ value: T,\n prefix shouldPrefix: Bool = true,\n width: Int = MemoryLayout<T>.size * 2)\n -> String {\n let digits = String(value, radix: 16)\n let padding = digits.count >= width ? "" : String(repeating: "0",\n count: width - digits.count)\n let prefix = shouldPrefix ? "0x" : ""\n\n return "\(prefix)\(padding)\(digits)"\n}\n\ninternal func hex(_ bytes: some Sequence<UInt8>) -> String {\n return bytes.map{ hex($0, prefix: false) }.joined(separator: "")\n}\n\nenum PadAlignment {\n case left\n case right\n}\n\nfunc pad<T>(_ value: T, _ width: Int, align: PadAlignment = .left) -> String {\n let string = String(describing: value)\n let padding = string.count >= width ? "" : String(repeating: " ",\n count: width - string.count)\n switch align {\n case .left:\n return string + padding\n case .right:\n return padding + string\n }\n}\n\n@_spi(Utils)\npublic func readString(from file: String) -> String? {\n let fd = open(file, O_RDONLY, 0)\n if fd < 0 {\n return nil\n }\n defer { close(fd) }\n\n // Files in /proc are awkward; you can't get their length and then\n // read the data in one chunk, because they have zero length and don't\n // support seeking.\n var bytes: [UInt8] = []\n withUnsafeTemporaryAllocation(of: UInt8.self, capacity: 4096) { buffer in\n while true {\n let bytesRead = read(fd, buffer.baseAddress, buffer.count)\n if bytesRead <= 0 {\n break\n }\n\n bytes.append(contentsOf: buffer[0..<bytesRead])\n }\n }\n\n return String(decoding: bytes, as: UTF8.self)\n}\n\n@_spi(Utils)\npublic func stripWhitespace<S: StringProtocol>(_ s: S)\n -> S.SubSequence {\n guard let firstNonWhitespace = s.firstIndex(where: { !$0.isWhitespace })\n else {\n return ""\n }\n let lastNonWhitespace = s.lastIndex(where: { !$0.isWhitespace })!\n return s[firstNonWhitespace...lastNonWhitespace]\n}\n\n/// Strip any Optional from a value.\n///\n/// This is useful when interfacing with the system C library, because some\n/// C libraries have nullability annotations while others do not.\nfunc notOptional<T>(_ optional: T?) -> T {\n return optional!\n}\n\nfunc notOptional<T>(_ value: T) -> T {\n return value\n}\n\n/// Convert mutable pointers to non-mutable\n///\n/// This is useful when interfacing with the system C library, because some\n/// C libraries have const annotations in places others do not.\nfunc notMutable<T>(_ mutable: UnsafeMutablePointer<T>) -> UnsafePointer<T> {\n return UnsafePointer<T>(mutable)\n}\nfunc notMutable<T>(_ immutable: UnsafePointer<T>) -> UnsafePointer<T> {\n return immutable\n}\nfunc notMutable(_ mutable: UnsafeMutableRawPointer) -> UnsafeRawPointer {\n return UnsafeRawPointer(mutable)\n}\nfunc notMutable(_ immutable: UnsafeRawPointer) -> UnsafeRawPointer {\n return immutable\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_RuntimeModule_Utils.swift
cpp_apple_swift_stdlib_public_RuntimeModule_Utils.swift
Swift
3,838
0.95
0.063492
0.279279
python-kit
427
2023-07-21T04:19:40.528123
Apache-2.0
false
90bec0e89e67a9265b2423cd3ba5d52d
//===----------------------------------------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n// Pre-specialization of some popular generic classes and functions.\n//===----------------------------------------------------------------------===//\nimport Swift\n\n// =============================================================================\n// Definitions of proxy functions that mimic a generic function signature in the\n// standard library and are annotated with the standard library's\n// actual generic function name. The "prespecialize" annotation forces\n// the actual generic function to be specialized based on the argument\n// types passed to the proxy function.\n// =============================================================================\n\nextension Collection {\n // _failEarlyRangeCheck(_: A.Index, bounds: Swift.Range<A.Index>) -> ()\n @_semantics("prespecialize.$sSlsE20_failEarlyRangeCheck_6boundsy5IndexQz_SnyADGtF")\n func _prespecializeCollection(index: Index, range: Range<Index>) {}\n}\n\nextension Collection where Iterator == IndexingIterator<Self> {\n // makeIterator() -> Swift.IndexingIterator<A>\n @_semantics("prespecialize.$sSlss16IndexingIteratorVyxG0B0RtzrlE04makeB0ACyF")\n func _prespecializeIndexingIterator() {}\n}\n\nextension BidirectionalCollection {\n // reversed() -> ReversedCollection<A>\n @_semantics("prespecialize.$sSKsE8reverseds18ReversedCollectionVyxGyF")\n func _prespecializeBidirectionalCollection() {}\n}\n\nextension MutableCollection where Self: BidirectionalCollection {\n // _reverse(within: Swift.Range<A.Index>) -> ()\n @_semantics("prespecialize.$sSMsSKRzrlE8_reverse6withinySny5IndexSlQzG_tF")\n mutating func _prespecializeMutableBirectionalCollection(range: Range<Index>) {}\n\n // _insertionSort(within: Swift.Range<A.Index>,\n // by: (A.Element, A.Element\n // ) throws -> Swift.Bool) throws -> ()\n @_semantics("prespecialize.$sSMsSKRzrlE14_insertionSort6within2byySny5IndexSlQzG_Sb7ElementSTQz_AHtKXEtKF")\n mutating func _prespecializeMutableBirectionalCollection(range: Range<Index>, cmp: (Element, Element) throws -> Bool) {}\n\n // _insertionSort(\n // within: Swift.Range<A.Index>,\n // sortedEnd: A.Index,\n // by: (A.Element, A.Element) throws -> Swift.Bool\n // ) throws -> ()\n @_semantics("prespecialize.$sSMsSKRzrlE14_insertionSort6within9sortedEnd2byySny5IndexSlQzG_AFSb7ElementSTQz_AItKXEtKF")\n mutating func _prespecializeMutableBirectionalCollection(range: Range<Index>, end: Index, cmp: (Element, Element) throws -> Bool) {}\n} // extension MutableCollection where Self: BidirectionalCollection\n\nextension MutableCollection where Self: RandomAccessCollection {\n // sort(by: (A.Element, A.Element) throws -> Swift.Bool) throws -> ()\n @_semantics("prespecialize.$sSMsSkRzrlE4sort2byySb7ElementSTQz_ADtKXE_tKF")\n mutating func _prespecializeMutableRandomAccessCollection(cmp: (Element, Element) throws -> Bool) throws {}\n}\n\nextension RandomAccessCollection where Index : Strideable, Index.Stride == Int {\n // index(after: A.Index) -> A.Index\n @_semantics("prespecialize.$sSksSx5IndexRpzSnyABG7IndicesRtzSiAA_6StrideRTzrlE5index5afterA2B_tF")\n func _prespecializeRandomAccessCollection(after: Index) {}\n\n // indices.getter : Swift.Range<A.Index>\n @_semantics("prespecialize.$sSksSx5IndexRpzSnyABG7IndicesRtzSiAA_6StrideRTzrlE7indicesACvg")\n func _prespecializeRandomAccessCollection() {}\n}\n\n// _allocateUninitializedArray<A>(Builtin.Word) -> ([A], Builtin.RawPointer)\n@_semantics("prespecialize.$ss27_allocateUninitializedArrayySayxG_BptBwlF")\nfunc _prespecializeArray<T>(_ word: Builtin.Word) -> ([T], Builtin.RawPointer) {\n return ([], Builtin.inttoptr_Word(word))\n}\n\nextension Array {\n // init() -> [A]\n @_semantics("prespecialize.$sS2ayxGycfC")\n // startIndex.getter : Swift.Int\n @_semantics("prespecialize.$sSa10startIndexSivg")\n // _getCapacity() -> Swift.Int\n @_semantics("prespecialize.$sSa12_getCapacitySiyF")\n // _makeMutableAndUnique() -> ()\n @_semantics("prespecialize.$sSa21_makeMutableAndUniqueyyF")\n // _copyToContiguousArray() -> Swift.ContiguousArray<A>\n @_semantics("prespecialize.$sSa22_copyToContiguousArrays0cD0VyxGyF")\n // _hoistableIsNativeTypeChecked() -> Swift.Bool\n @_semantics("prespecialize.$sSa29_hoistableIsNativeTypeCheckedSbyF")\n // count.getter : Swift.Int\n @_semantics("prespecialize.$sSa5countSivg")\n // capacity.getter : Swift.Int\n @_semantics("prespecialize.$sSa8capacitySivg")\n // endIndex.getter : Swift.Int\n @_semantics("prespecialize.$sSa8endIndexSivg")\n // formIndex(before: inout Swift.Int) -> ()\n @_semantics("prespecialize.$sSa9formIndex6beforeySiz_tF")\n func _prespecializeArray() {}\n\n // _makeUniqueAndReserveCapacityIfNotUnique() -> ()\n @_semantics("prespecialize.$sSa034_makeUniqueAndReserveCapacityIfNotB0yyF")\n func _prespecializeMutableArray() {}\n\n // _checkSubscript(_: Swift.Int, wasNativeTypeChecked: Swift.Bool) -> Swift._DependenceToken\n @_semantics("prespecialize.$sSa15_checkSubscript_20wasNativeTypeCheckeds16_DependenceTokenVSi_SbtF")\n func _prespecializeArray(index: Int, flag: Bool) {}\n\n // _getElement(_: Swift.Int, wasNativeTypeChecked: Swift.Bool, matchingSubscriptCheck: Swift._DependenceToken) -> A\n @_semantics("prespecialize.$sSa11_getElement_20wasNativeTypeChecked22matchingSubscriptCheckxSi_Sbs16_DependenceTokenVtF")\n func _prespecializeArray(index: Int, flag: Bool, token: _DependenceToken) {}\n\n // init(arrayLiteral: A...) -> [A]\n @_semantics("prespecialize.$sSa12arrayLiteralSayxGxd_tcfC")\n func _prespecializeArray(arrayLiteral: Element...) {}\n\n // init(_unsafeUninitializedCapacity: Swift.Int, initializingWith: (inout Swift.UnsafeMutableBufferPointer<A>, inout Swift.Int) throws -> ()) throws -> [A]\n @_semantics("prespecialize.$sSa28_unsafeUninitializedCapacity16initializingWithSayxGSi_ySryxGz_SiztKXEtKcfC")\n func _prespecializeArray(capacity: Int, generator: (inout UnsafeMutableBufferPointer<Element>, inout Int) throws -> ()) {}\n\n // removeAll(keepingCapacity: Swift.Bool) -> ()\n @_semantics("prespecialize.$sSa9removeAll15keepingCapacityySb_tF")\n // default argument 0 of Swift.Array.removeAll(keepingCapacity: Swift.Bool) -> ()\n @_semantics("prespecialize.$sSa9removeAll15keepingCapacityySb_tFfA_")\n func _prespecializeArray(flag: Bool) {}\n\n // init(_uninitializedCount: Swift.Int) -> [A]\n @_semantics("prespecialize.$sSa19_uninitializedCountSayxGSi_tcfC")\n // _reserveCapacityAssumingUniqueBuffer(oldCount: Swift.Int) -> ()\n @_semantics("prespecialize.$sSa36_reserveCapacityAssumingUniqueBuffer8oldCountySi_tF")\n // reserveCapacity(Swift.Int) -> ()\n @_semantics("prespecialize.$sSa15reserveCapacityyySiF")\n // _copyToNewBuffer(oldCount: Swift.Int) -> ()\n @_semantics("prespecialize.$sSa16_copyToNewBuffer8oldCountySi_tF")\n // _getCount() -> Swift.Int\n @_semantics("prespecialize.$sSa9_getCountSiyF")\n // formIndex(after: inout Swift.Int) -> ()\n @_semantics("prespecialize.$sSa9formIndex5afterySiz_tF")\n // subscript.modify : (Swift.Int) -> A\n @_semantics("prespecialize.$sSayxSiciM")\n // subscript.getter : (Swift.Int) -> A\n @_semantics("prespecialize.$sSayxSicig")\n // subscript.read : (Swift.Int) -> A\n @_semantics("prespecialize.$sSayxSicir")\n func _prespecializeArray(index: Int) {}\n\n // _appendElementAssumeUniqueAndCapacity(_: Swift.Int, newElement: __owned A) -> ()\n @_semantics("prespecialize.$sSa37_appendElementAssumeUniqueAndCapacity_03newB0ySi_xntF")\n func _prespecializeArray(index: Int, element: Element) {}\n\n // append(__owned A) -> ()\n @_semantics("prespecialize.$sSa6appendyyxnF")\n // init(repeating: A, count: Swift.Int) -> [A]\n @_semantics("prespecialize.$sSa9repeating5countSayxGx_SitcfC")\n func _prespecializeArray(element: Element, index: Int) {}\n\n // replaceSubrange<A where A == A1.Element, A1: Swift.Collection>(\n // _: Swift.Range<Swift.Int>, with: __owned A1\n // ) -> ()\n @_semantics("prespecialize.$sSa15replaceSubrange_4withySnySiG_qd__nt7ElementQyd__RszSlRd__lF")\n func _prespecializeArray<C: Collection>(range: Range<C.Index>, collection: C) where Element == C.Element {}\n\n // _withUnsafeMutableBufferPointerIfSupported<A>(\n // (inout Swift.UnsafeMutableBufferPointer<A>) throws -> A1\n // ) throws -> A1?\n @_semantics("prespecialize.$sSa42_withUnsafeMutableBufferPointerIfSupportedyqd__Sgqd__SryxGzKXEKlF")\n func _prespecializeArray<R>(with: (inout UnsafeMutableBufferPointer<Element>) throws -> R) {}\n} // extension Array\n\nextension _ContiguousArrayBuffer {\n // startIndex.getter : Swift.Int\n @_semantics("prespecialize.$ss22_ContiguousArrayBufferV10startIndexSivg")\n // firstElementAddress.getter : Swift.UnsafeMutablePointer<A>\n @_semantics("prespecialize.$ss22_ContiguousArrayBufferV19firstElementAddressSpyxGvg")\n // count.getter : Swift.Int\n @_semantics("prespecialize.$ss22_ContiguousArrayBufferV7_buffer19shiftedToStartIndexAByxGAE_SitcfC")\n // endIndex.getter : Swift.Int\n @_semantics("prespecialize.$ss22_ContiguousArrayBufferV8endIndexSivg")\n // init() -> Swift._ContiguousArrayBuffer<A>\n @_semantics("prespecialize.$ss22_ContiguousArrayBufferVAByxGycfC")\n func _prespecializeContiguousArrayBuffer() {}\n\n // _copyContents(subRange: Swift.Range<Swift.Int>, initializing: Swift.UnsafeMutablePointer<A>) -> Swift.UnsafeMutablePointer<A>\n @_semantics("prespecialize.$ss22_ContiguousArrayBufferV13_copyContents8subRange12initializingSpyxGSnySiG_AFtF")\n func _prespecializeContiguousArrayBuffer(range: Range<Int>, pointer: UnsafeMutablePointer<Element>) {}\n\n // _initStorageHeader(count: Swift.Int, capacity: Swift.Int) -> ()\n @_semantics("prespecialize.$ss22_ContiguousArrayBufferV18_initStorageHeader5count8capacityySi_SitF")\n func _prespecializeContiguousArrayBuffer(count: Int, capacity: Int) {}\n\n @_semantics("prespecialize.$ss22_ContiguousArrayBufferV5countSivg")\n // init(_buffer: Swift._ContiguousArrayBuffer<A>, shiftedToStartIndex: Swift.Int) -> Swift._ContiguousArrayBuffer<A>\n func _prespecializeContiguousArrayBuffer(buffer: _ContiguousArrayBuffer<Element>, index: Int) {}\n}\n\n#if _runtime(_ObjC)\nextension _ArrayBuffer {\n // requestNativeBuffer() -> Swift._ContiguousArrayBuffer<A>?\n @_semantics("prespecialize.$ss12_ArrayBufferV013requestNativeB0s011_ContiguousaB0VyxGSgyF")\n // _nonNative.getter : Swift._CocoaArrayWrapper\n @_semantics("prespecialize.$ss12_ArrayBufferV10_nonNatives06_CocoaA7WrapperVvg")\n // startIndex.getter : Swift.Int\n @_semantics("prespecialize.$ss12_ArrayBufferV10startIndexSivg")\n // firstElementAddress.getter : Swift.UnsafeMutablePointer<A>\n @_semantics("prespecialize.$ss12_ArrayBufferV19firstElementAddressSpyxGvg")\n // isUniquelyReferenced() -> Swift.Bool\n @_semantics("prespecialize.$ss12_ArrayBufferV20isUniquelyReferencedSbyF")\n // count.setter : Swift.Int\n @_semantics("prespecialize.$ss12_ArrayBufferV5countSivs")\n // _native.getter : Swift._ContiguousArrayBuffer<A>\n @_semantics("prespecialize.$ss12_ArrayBufferV7_natives011_ContiguousaB0VyxGvg")\n // _isNative.getter : Swift.Bool\n @_semantics("prespecialize.$ss12_ArrayBufferV9_isNativeSbvg")\n // capacity.getter : Swift.Int\n @_semantics("prespecialize.$ss12_ArrayBufferV8capacitySivg")\n // endIndex.getter : Swift.Int\n @_semantics("prespecialize.$ss12_ArrayBufferV8endIndexSivg")\n func _prespecializeArrayBuffer() {}\n\n // requestUniqueMutableBackingBuffer(minimumCapacity: Swift.Int) -> Swift._ContiguousArrayBuffer<A>?\n @_semantics("prespecialize.$ss12_ArrayBufferV027requestUniqueMutableBackingB015minimumCapacitys011_ContiguousaB0VyxGSgSi_tF")\n // _getElementSlowPath(Swift.Int) -> Swift.AnyObject\n @_semantics("prespecialize.$ss12_ArrayBufferV19_getElementSlowPathyyXlSiF")\n // subscript.getter : (Swift.Int) -> A\n @_semantics("prespecialize.$ss12_ArrayBufferVyxSicig")\n // subscript.read : (Swift.Int) -> A\n @_semantics("prespecialize.$ss12_ArrayBufferVyxSicir")\n func _prespecializeArrayBuffer(index: Int) {}\n\n // _typeCheck(Swift.Range<Swift.Int>) -> ()\n @_semantics("prespecialize.$ss12_ArrayBufferV10_typeCheckyySnySiGF")\n func _prespecializeArrayBuffer(range: Range<Int>) {}\n\n // _copyContents(subRange: Swift.Range<Swift.Int>, initializing: Swift.UnsafeMutablePointer<A>) -> Swift.UnsafeMutablePointer<A>\n @_semantics("prespecialize.$ss12_ArrayBufferV13_copyContents8subRange12initializingSpyxGSnySiG_AFtF")\n func _prespecializeArrayBuffer(range: Range<Int>, pointer: UnsafeMutablePointer<Element>) {}\n\n // _checkInoutAndNativeTypeCheckedBounds(_: Swift.Int, wasNativeTypeChecked: Swift.Bool) -> ()\n @_semantics("prespecialize.$ss12_ArrayBufferV37_checkInoutAndNativeTypeCheckedBounds_03wasfgH0ySi_SbtF")\n func _prespecializeArrayBuffer(index: Int, flag: Bool) {}\n\n // init(_buffer: Swift._ContiguousArrayBuffer<A>, shiftedToStartIndex: Swift.Int) -> Swift._ArrayBuffer<A>\n @_semantics("prespecialize.$ss12_ArrayBufferV7_buffer19shiftedToStartIndexAByxGs011_ContiguousaB0VyxG_SitcfC")\n func _prespecializeArrayBuffer(buffer: _ContiguousArrayBuffer<Element>, index: Int) {}\n}\n#endif // ObjC\n\nextension Range {\n // contains(A) -> Swift.Bool\n @_semantics("prespecialize.$sSn8containsySbxF")\n func _prespecializeRange(bound: Bound) {}\n\n // init(uncheckedBounds: (lower: A, upper: A)) -> Swift.Range<A>\n @_semantics("prespecialize.$sSn15uncheckedBoundsSnyxGx5lower_x5uppert_tcfC")\n func _prespecializeRange(bounds: (lower: Bound, upper: Bound)) {}\n}\n\nextension Range where Bound: Strideable, Bound.Stride : SignedInteger {\n // startIndex.getter\n @_semantics("prespecialize.$sSnsSxRzSZ6StrideRpzrlE10startIndexxvg")\n // endIndex.getter\n @_semantics("prespecialize.$sSnsSxRzSZ6StrideRpzrlE8endIndexxvg")\n // index(after: A) -> A\n @_semantics("prespecialize.$sSnsSxRzSZ6StrideRpzrlE5index5afterxx_tF")\n // subscript.read\n @_semantics("prespecialize.$sSnsSxRzSZ6StrideRpzrlEyxxcir")\n func _prespecializeIntegerRange(bound: Bound) {}\n}\n\nextension ClosedRange {\n // init(uncheckedBounds: (lower: A, upper: A)) -> Swift.ClosedRange<A>\n @_semantics("prespecialize.$sSN15uncheckedBoundsSNyxGx5lower_x5uppert_tcfC")\n func _prespecializeClosedRange() {}\n}\n\nextension ClosedRange where Bound: Strideable, Bound.Stride : SignedInteger {\n // startIndex.getter\n @_semantics("prespecialize.$sSNsSxRzSZ6StrideRpzrlE10startIndexSNsSxRzSZABRQrlE0C0Oyx_Gvg")\n // endIndex.getter\n @_semantics("prespecialize.$sSNsSxRzSZ6StrideRpzrlE8endIndexSNsSxRzSZABRQrlE0C0Oyx_Gvg")\n // subscript.read\n @_semantics("prespecialize.$sSNsSxRzSZ6StrideRpzrlEyxSNsSxRzSZABRQrlE5IndexOyx_Gcir")\n func _prespecializeIntegerClosedRange() {}\n\n // index(after: ClosedRange<A>< where A: Swift.Strideable, A.Stride: Swift.SignedInteger>.Index)\n // -> ClosedRange<A>< where A: Swift.Strideable, A.Stride: Swift.SignedInteger>.Index\n @_semantics("prespecialize.$sSNsSxRzSZ6StrideRpzrlE5index5afterSNsSxRzSZABRQrlE5IndexOyx_GAG_tF")\n func _prespecializeIntegerClosedRange(range: Self) {}\n}\n\n// IndexingIterator.next() -> A.Element?\n@_semantics("prespecialize.$ss16IndexingIteratorV4next7ElementQzSgyF")\nfunc _prespecializeIndexingIterator<Elements>(_ x: IndexingIterator<Elements>) where Elements : Collection {}\n\n// =============================================================================\n// Helpers that construct arguments of the necessary specialized types,\n// passing them to the above generic proxy functions.\n// =============================================================================\n\nfunc prespecializeCollections<T>(_ element: T) {\n var umbp = UnsafeMutableBufferPointer<T>.allocate(capacity: 1)\n let cmp = { (_: T, _: T) in return false }\n unsafe umbp._prespecializeMutableBirectionalCollection(range: 0..<0)\n unsafe umbp._prespecializeMutableBirectionalCollection(range: 0..<0, cmp: cmp)\n unsafe umbp._prespecializeMutableBirectionalCollection(range: 0..<0, end: 0, cmp: cmp)\n try! unsafe umbp._prespecializeMutableRandomAccessCollection(cmp: cmp)\n\n let _: (Array<T>, Builtin.RawPointer) = _prespecializeArray(0._builtinWordValue)\n\n var array = Array<T>()\n array._prespecializeArray()\n array._prespecializeMutableArray()\n array._prespecializeArray(index: 0, flag: false)\n array._prespecializeArray(index: 0, flag: false, token: _DependenceToken())\n array._prespecializeArray(arrayLiteral: element)\n unsafe array._prespecializeArray(capacity: 0) { (_: inout UnsafeMutableBufferPointer<T>, _: inout Int) in return }\n array._prespecializeArray(flag: false)\n array._prespecializeArray(index: 0)\n array._prespecializeArray(index: 0, element: element)\n array._prespecializeArray(element: element, index: 0)\n array._prespecializeArray(range: 0..<0, collection: EmptyCollection())\n unsafe array._prespecializeArray(with: { (_: inout UnsafeMutableBufferPointer<T>) -> Optional<()> in return () })\n array._prespecializeBidirectionalCollection()\n array._prespecializeRandomAccessCollection()\n try! array._prespecializeMutableRandomAccessCollection(cmp: cmp)\n\n let cab = _ContiguousArrayBuffer<T>()\n cab._prespecializeContiguousArrayBuffer()\n unsafe cab._prespecializeContiguousArrayBuffer(range: (0..<0), pointer: umbp.baseAddress!)\n cab._prespecializeContiguousArrayBuffer(count: 0, capacity: 0)\n cab._prespecializeContiguousArrayBuffer(buffer: cab, index: 0)\n\n#if _runtime(_ObjC)\n let ab = _ArrayBuffer<T>()\n ab._prespecializeArrayBuffer()\n ab._prespecializeArrayBuffer(index: 0)\n ab._prespecializeArrayBuffer(range: (0..<0))\n unsafe ab._prespecializeArrayBuffer(range: (0..<0), pointer: umbp.baseAddress!)\n ab._prespecializeArrayBuffer(index: 0, flag: false)\n ab._prespecializeArrayBuffer(buffer: cab, index: 0)\n ab._prespecializeRandomAccessCollection(after: 0)\n ab._prespecializeRandomAccessCollection()\n ab._prespecializeCollection(index: 0, range: (0..<0))\n#endif // ObjC\n\n var ca = ContiguousArray<T>()\n ca._prespecializeRandomAccessCollection()\n try! ca._prespecializeMutableRandomAccessCollection(cmp: cmp)\n\n let cb = _ContiguousArrayBuffer<T>()\n cb._prespecializeRandomAccessCollection()\n}\n\nfunc prespecializeRanges() {\n // Range<Int>\n (0..<0)._prespecializeCollection(index: 0, range: (0..<0))\n (0..<0)._prespecializeRange(bound: 0)\n (0..<0)._prespecializeRange(bounds: (0, 0))\n (0..<0)._prespecializeIntegerRange(bound: 0)\n (0..<0)._prespecializeIndexingIterator()\n _prespecializeIndexingIterator((0..<0).makeIterator())\n // ClosedRange<Int>\n (0...0)._prespecializeClosedRange()\n (0...0)._prespecializeIntegerClosedRange()\n (0...0)._prespecializeIntegerClosedRange(range: (0...0))\n (0...0)._prespecializeIndexingIterator()\n _prespecializeIndexingIterator((0...0).makeIterator())\n}\n\n// =============================================================================\n// Top-level function that statically calls all generic entry points\n// that require prespecialization.\n// =============================================================================\n\n// Allow optimization here so that specialization occurs.\nfunc prespecializeAll() {\n prespecializeCollections(() as Any)\n prespecializeCollections("a" as Character)\n prespecializeCollections("a" as Unicode.Scalar)\n prespecializeCollections("a".utf8)\n prespecializeCollections("a".utf16)\n prespecializeCollections("a".unicodeScalars)\n prespecializeCollections("a" as String)\n prespecializeCollections(1.5 as Double)\n prespecializeCollections(1.5 as Float)\n prespecializeCollections(1 as Int)\n prespecializeCollections(1 as UInt)\n prespecializeCollections(1 as Int8)\n prespecializeCollections(1 as Int16)\n prespecializeCollections(1 as Int32)\n prespecializeCollections(1 as Int64)\n prespecializeCollections(1 as UInt8)\n prespecializeCollections(1 as UInt16)\n prespecializeCollections(1 as UInt32)\n prespecializeCollections(1 as UInt64)\n\n prespecializeRanges()\n}\n\n// Mark with optimize(none) to make sure its not get\n// rid of by dead function elimination. \n@_optimize(none)\ninternal func _swift_forcePrespecializations() {\n prespecializeAll()\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_SwiftOnoneSupport_SwiftOnoneSupport.swift
cpp_apple_swift_stdlib_public_SwiftOnoneSupport_SwiftOnoneSupport.swift
Swift
20,263
0.95
0.03163
0.354749
react-lib
225
2024-01-31T12:57:50.787981
GPL-3.0
false
4dd3483bd04f456fa052a00e9b69a32b
//===----------------------------------------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport Builtin\n\n/// An atomic value.\n@available(SwiftStdlib 6.0, *)\n@frozen\n@_rawLayout(like: Value.AtomicRepresentation)\n@_staticExclusiveOnly\npublic struct Atomic<Value: AtomicRepresentable>: ~Copyable {\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n var _address: UnsafeMutablePointer<Value.AtomicRepresentation> {\n unsafe UnsafeMutablePointer<Value.AtomicRepresentation>(_rawAddress)\n }\n\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n var _rawAddress: Builtin.RawPointer {\n Builtin.unprotectedAddressOfBorrow(self)\n }\n\n /// Initializes a value of this atomic with the given initial value.\n ///\n /// - Parameter initialValue: The initial value to set this atomic.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public init(_ initialValue: consuming Value) {\n unsafe _address.initialize(to: Value.encodeAtomicRepresentation(initialValue))\n }\n\n // Deinit's can't be marked @_transparent. Do these things need all of these\n // attributes..?\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @inlinable\n deinit {\n let oldValue = unsafe Value.decodeAtomicRepresentation(_address.pointee)\n _ = consume oldValue\n\n unsafe _address.deinitialize(count: 1)\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension Atomic: @unchecked Sendable where Value: Sendable {}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_Synchronization_Atomics_Atomic.swift
cpp_apple_swift_stdlib_public_Synchronization_Atomics_Atomic.swift
Swift
1,887
0.95
0.033898
0.326923
awesome-app
751
2023-09-06T19:57:45.466512
Apache-2.0
false
cbbc411515d698dc524bee22045f2a7d
//===----------------------------------------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport Builtin\n\n//===----------------------------------------------------------------------===//\n// Bool AtomicRepresentable conformance\n//===----------------------------------------------------------------------===//\n\n@available(SwiftStdlib 6.0, *)\nextension Bool: AtomicRepresentable {\n /// The storage representation type that `Self` encodes to and decodes from\n /// which is a suitable type when used in atomic operations.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicRepresentation = UInt8.AtomicRepresentation\n\n /// Destroys a value of `Self` and prepares an `AtomicRepresentation` storage\n /// type to be used for atomic operations.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicRepresentation`.\n ///\n /// - Parameter value: A valid instance of `Self` that's about to be destroyed\n /// to encode an instance of its `AtomicRepresentation`.\n /// - Returns: The newly encoded `AtomicRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicRepresentation(\n _ value: borrowing Bool\n ) -> AtomicRepresentation {\n UInt8.encodeAtomicRepresentation(\n UInt8(Builtin.zext_Int1_Int8(value._value))\n )\n }\n\n /// Recovers the logical atomic type `Self` by destroying some\n /// `AtomicRepresentation` storage instance returned from an atomic operation.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations back into the logical type for\n /// normal use, `Self`.\n ///\n /// - Parameter storage: The storage representation for `Self` that's used\n /// within atomic operations.\n /// - Returns: The newly decoded logical type `Self`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicRepresentation(\n _ representation: consuming AtomicRepresentation\n ) -> Bool {\n Bool(Builtin.trunc_Int8_Int1(\n UInt8.decodeAtomicRepresentation(representation)._value)\n )\n }\n}\n\n//===----------------------------------------------------------------------===//\n// Bool atomic operations\n//===----------------------------------------------------------------------===//\n\n@available(SwiftStdlib 6.0, *)\nextension Atomic where Value == Bool {\n /// Perform an atomic logical AND operation and return the old and new value,\n /// applying the specified memory ordering.\n ///\n /// - Parameter operand: A boolean value.\n /// - Parameter ordering: The memory ordering to apply on this operation.\n /// - Returns: A tuple with the old value before the operation a the new value\n /// after the operation.\n @available(SwiftStdlib 6.0, *)\n @discardableResult\n @_semantics("atomics.requires_constant_orderings")\n @_alwaysEmitIntoClient\n @_transparent\n public func logicalAnd(\n _ operand: Bool,\n ordering: AtomicUpdateOrdering\n ) -> (oldValue: Bool, newValue: Bool) {\n let builtinOperand = Bool.encodeAtomicRepresentation(operand)._storage\n\n let original = switch ordering {\n case .relaxed:\n Builtin.atomicrmw_and_monotonic_Int8(\n _rawAddress,\n builtinOperand\n )\n\n case .acquiring:\n Builtin.atomicrmw_and_acquire_Int8(\n _rawAddress,\n builtinOperand\n )\n\n case .releasing:\n Builtin.atomicrmw_and_release_Int8(\n _rawAddress,\n builtinOperand\n )\n\n case .acquiringAndReleasing:\n Builtin.atomicrmw_and_acqrel_Int8(\n _rawAddress,\n builtinOperand\n )\n\n case .sequentiallyConsistent:\n Builtin.atomicrmw_and_seqcst_Int8(\n _rawAddress,\n builtinOperand\n )\n\n default:\n Builtin.unreachable()\n }\n\n let old = Bool.decodeAtomicRepresentation(UInt8.AtomicRepresentation(original))\n\n return (oldValue: old, newValue: old && operand)\n }\n\n /// Perform an atomic logical OR operation and return the old and new value,\n /// applying the specified memory ordering.\n ///\n /// - Parameter operand: A boolean value.\n /// - Parameter ordering: The memory ordering to apply on this operation.\n /// - Returns: A tuple with the old value before the operation a the new value\n /// after the operation.\n @available(SwiftStdlib 6.0, *)\n @discardableResult\n @_semantics("atomics.requires_constant_orderings")\n @_alwaysEmitIntoClient\n @_transparent\n public func logicalOr(\n _ operand: Bool,\n ordering: AtomicUpdateOrdering\n ) -> (oldValue: Bool, newValue: Bool) {\n let builtinOperand = Bool.encodeAtomicRepresentation(operand)._storage\n\n let original = switch ordering {\n case .relaxed:\n Builtin.atomicrmw_or_monotonic_Int8(\n _rawAddress,\n builtinOperand\n )\n\n case .acquiring:\n Builtin.atomicrmw_or_acquire_Int8(\n _rawAddress,\n builtinOperand\n )\n\n case .releasing:\n Builtin.atomicrmw_or_release_Int8(\n _rawAddress,\n builtinOperand\n )\n\n case .acquiringAndReleasing:\n Builtin.atomicrmw_or_acqrel_Int8(\n _rawAddress,\n builtinOperand\n )\n\n case .sequentiallyConsistent:\n Builtin.atomicrmw_or_seqcst_Int8(\n _rawAddress,\n builtinOperand\n )\n\n default:\n Builtin.unreachable()\n }\n\n let old = Bool.decodeAtomicRepresentation(UInt8.AtomicRepresentation(original))\n\n return (oldValue: old, newValue: old || operand)\n }\n\n /// Perform an atomic logical XOR operation and return the old and new value,\n /// applying the specified memory ordering.\n ///\n /// - Parameter operand: A boolean value.\n /// - Parameter ordering: The memory ordering to apply on this operation.\n /// - Returns: A tuple with the old value before the operation a the new value\n /// after the operation.\n @available(SwiftStdlib 6.0, *)\n @discardableResult\n @_semantics("atomics.requires_constant_orderings")\n @_alwaysEmitIntoClient\n @_transparent\n public func logicalXor(\n _ operand: Bool,\n ordering: AtomicUpdateOrdering\n ) -> (oldValue: Bool, newValue: Bool) {\n let builtinOperand = Bool.encodeAtomicRepresentation(operand)._storage\n\n let original = switch ordering {\n case .relaxed:\n Builtin.atomicrmw_xor_monotonic_Int8(\n _rawAddress,\n builtinOperand\n )\n\n case .acquiring:\n Builtin.atomicrmw_xor_acquire_Int8(\n _rawAddress,\n builtinOperand\n )\n\n case .releasing:\n Builtin.atomicrmw_xor_release_Int8(\n _rawAddress,\n builtinOperand\n )\n\n case .acquiringAndReleasing:\n Builtin.atomicrmw_xor_acqrel_Int8(\n _rawAddress,\n builtinOperand\n )\n\n case .sequentiallyConsistent:\n Builtin.atomicrmw_xor_seqcst_Int8(\n _rawAddress,\n builtinOperand\n )\n\n default:\n Builtin.unreachable()\n }\n\n let old = Bool.decodeAtomicRepresentation(UInt8.AtomicRepresentation(original))\n\n return (oldValue: old, newValue: old != operand)\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_Synchronization_Atomics_AtomicBool.swift
cpp_apple_swift_stdlib_public_Synchronization_Atomics_AtomicBool.swift
Swift
7,554
0.95
0.03629
0.27907
react-lib
231
2024-06-14T21:45:17.743970
GPL-3.0
false
8ffe2e5de61258ee7e0e9be42a394a01
//===----------------------------------------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\n//===----------------------------------------------------------------------===//\n// Float16 AtomicRepresentable conformance\n//===----------------------------------------------------------------------===//\n\n#if !((os(macOS) || targetEnvironment(macCatalyst)) && arch(x86_64))\n\n@available(SwiftStdlib 6.0, *)\nextension Float16: AtomicRepresentable {\n /// The storage representation type that `Self` encodes to and decodes from\n /// which is a suitable type when used in atomic operations.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicRepresentation = UInt16.AtomicRepresentation\n\n /// Destroys a value of `Self` and prepares an `AtomicRepresentation` storage\n /// type to be used for atomic operations.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicRepresentation`.\n ///\n /// - Parameter value: A valid instance of `Self` that's about to be destroyed\n /// to encode an instance of its `AtomicRepresentation`.\n /// - Returns: The newly encoded `AtomicRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicRepresentation(\n _ value: consuming Float16\n ) -> AtomicRepresentation {\n UInt16.encodeAtomicRepresentation(value.bitPattern)\n }\n\n /// Recovers the logical atomic type `Self` by destroying some\n /// `AtomicRepresentation` storage instance returned from an atomic operation.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations back into the logical type for\n /// normal use, `Self`.\n ///\n /// - Parameter storage: The storage representation for `Self` that's used\n /// within atomic operations.\n /// - Returns: The newly decoded logical type `Self`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicRepresentation(\n _ representation: consuming AtomicRepresentation\n ) -> Float16 {\n Float16(bitPattern: UInt16.decodeAtomicRepresentation(representation))\n }\n}\n\n#endif\n\n//===----------------------------------------------------------------------===//\n// Float AtomicRepresentable conformance\n//===----------------------------------------------------------------------===//\n\n@available(SwiftStdlib 6.0, *)\nextension Float: AtomicRepresentable {\n /// The storage representation type that `Self` encodes to and decodes from\n /// which is a suitable type when used in atomic operations.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicRepresentation = UInt32.AtomicRepresentation\n\n /// Destroys a value of `Self` and prepares an `AtomicRepresentation` storage\n /// type to be used for atomic operations.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicRepresentation`.\n ///\n /// - Parameter value: A valid instance of `Self` that's about to be destroyed\n /// to encode an instance of its `AtomicRepresentation`.\n /// - Returns: The newly encoded `AtomicRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicRepresentation(\n _ value: consuming Float\n ) -> AtomicRepresentation {\n UInt32.encodeAtomicRepresentation(value.bitPattern)\n }\n\n /// Recovers the logical atomic type `Self` by destroying some\n /// `AtomicRepresentation` storage instance returned from an atomic operation.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations back into the logical type for\n /// normal use, `Self`.\n ///\n /// - Parameter storage: The storage representation for `Self` that's used\n /// within atomic operations.\n /// - Returns: The newly decoded logical type `Self`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicRepresentation(\n _ representation: consuming AtomicRepresentation\n ) -> Float {\n Float(bitPattern: UInt32.decodeAtomicRepresentation(representation))\n }\n}\n\n//===----------------------------------------------------------------------===//\n// Double AtomicRepresentable conformance\n//===----------------------------------------------------------------------===//\n\n#if (_pointerBitWidth(_32) && _hasAtomicBitWidth(_64)) || _pointerBitWidth(_64)\n\n@available(SwiftStdlib 6.0, *)\nextension Double: AtomicRepresentable {\n /// The storage representation type that `Self` encodes to and decodes from\n /// which is a suitable type when used in atomic operations.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicRepresentation = UInt64.AtomicRepresentation\n\n /// Destroys a value of `Self` and prepares an `AtomicRepresentation` storage\n /// type to be used for atomic operations.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicRepresentation`.\n ///\n /// - Parameter value: A valid instance of `Self` that's about to be destroyed\n /// to encode an instance of its `AtomicRepresentation`.\n /// - Returns: The newly encoded `AtomicRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicRepresentation(\n _ value: consuming Double\n ) -> AtomicRepresentation {\n UInt64.encodeAtomicRepresentation(value.bitPattern)\n }\n\n /// Recovers the logical atomic type `Self` by destroying some\n /// `AtomicRepresentation` storage instance returned from an atomic operation.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations back into the logical type for\n /// normal use, `Self`.\n ///\n /// - Parameter storage: The storage representation for `Self` that's used\n /// within atomic operations.\n /// - Returns: The newly decoded logical type `Self`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicRepresentation(\n _ representation: consuming AtomicRepresentation\n ) -> Double {\n Double(bitPattern: UInt64.decodeAtomicRepresentation(representation))\n }\n}\n\n#endif\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_Synchronization_Atomics_AtomicFloats.swift
cpp_apple_swift_stdlib_public_Synchronization_Atomics_AtomicFloats.swift
Swift
6,952
0.8
0.094675
0.588235
vue-tools
840
2025-04-09T00:24:28.281184
BSD-3-Clause
false
72e67bc4f53afa4dcf47cd2a65d81077
//===----------------------------------------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\n/// A lazily initializable atomic strong reference.\n///\n/// These values can be set (initialized) exactly once, but read many\n/// times.\n@available(SwiftStdlib 6.0, *)\n@frozen\n@_staticExclusiveOnly\n@safe\npublic struct AtomicLazyReference<Instance: AnyObject>: ~Copyable {\n @usableFromInline\n let storage: Atomic<Unmanaged<Instance>?>\n\n @available(SwiftStdlib 6.0, *)\n @inlinable\n public init() {\n unsafe storage = Atomic<Unmanaged<Instance>?>(nil)\n }\n\n @inlinable\n deinit {\n if let unmanaged = unsafe storage.load(ordering: .acquiring) {\n unsafe unmanaged.release()\n }\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension AtomicLazyReference {\n /// Atomically initializes this reference if its current value is nil, then\n /// returns the initialized value. If this reference is already initialized,\n /// then `storeIfNil(_:)` discards its supplied argument and returns the\n /// current value without updating it.\n ///\n /// The following example demonstrates how this can be used to implement a\n /// thread-safe lazily initialized reference:\n ///\n /// class Image {\n /// let _histogram = AtomicLazyReference<Histogram>()\n ///\n /// // This is safe to call concurrently from multiple threads.\n /// var atomicLazyHistogram: Histogram {\n /// if let histogram = _histogram.load() { return histogram }\n /// // Note that code here may run concurrently on\n /// // multiple threads, but only one of them will get to\n /// // succeed setting the reference.\n /// let histogram = ...\n /// return _histogram.storeIfNil(histogram)\n /// }\n /// }\n ///\n /// - Note: This operation uses acquiring-and-releasing memory ordering.\n ///\n /// - Parameter desired: A value of `Instance` that we will attempt to store\n /// if the lazy reference is currently nil.\n /// - Returns: The value of `Instance` that was successfully stored within the\n /// lazy reference. This may or may not be the same value of `Instance` that\n /// was passed to this function.\n @available(SwiftStdlib 6.0, *)\n public func storeIfNil(_ desired: consuming Instance) -> Instance {\n let desiredUnmanaged = unsafe Unmanaged.passRetained(desired)\n let (exchanged, current) = unsafe storage.compareExchange(\n expected: nil,\n desired: desiredUnmanaged,\n ordering: .acquiringAndReleasing\n )\n\n if !exchanged {\n // The reference has already been initialized. Balance the retain that we\n // performed on 'desired'.\n unsafe desiredUnmanaged.release()\n return unsafe current!.takeUnretainedValue()\n }\n\n return unsafe desiredUnmanaged.takeUnretainedValue()\n }\n\n /// Atomically loads and returns the current value of this reference.\n ///\n /// - Note: The load operation is performed with the memory ordering\n /// `AtomicLoadOrdering.acquiring`.\n ///\n /// - Returns: A value of `Instance` if the lazy reference was written to, or\n /// `nil` if it has not been written to yet.\n @available(SwiftStdlib 6.0, *)\n public func load() -> Instance? {\n let value = unsafe storage.load(ordering: .acquiring)\n return unsafe value?.takeUnretainedValue()\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension AtomicLazyReference: @unchecked Sendable where Instance: Sendable {}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_Synchronization_Atomics_AtomicLazyReference.swift
cpp_apple_swift_stdlib_public_Synchronization_Atomics_AtomicLazyReference.swift
Swift
3,813
0.95
0.105769
0.552083
vue-tools
855
2024-09-11T18:23:57.287057
Apache-2.0
false
81fdcdbf84256e5ed439d9f8405c8e56
//===----------------------------------------------------------------------===//\n//\n// This source file is part of the Swift Atomics open source project\n//\n// Copyright (c) 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport Builtin\n\n//===----------------------------------------------------------------------===//\n// Load Orderings\n//===----------------------------------------------------------------------===//\n\n/// Specifies the memory ordering semantics of an atomic load operation.\n@available(SwiftStdlib 6.0, *)\n@frozen\npublic struct AtomicLoadOrdering {\n @usableFromInline\n internal var _rawValue: Int\n\n @available(SwiftStdlib 6.0, *)\n @inlinable\n @_semantics("constant_evaluable")\n @_transparent\n internal init(_rawValue: Int) {\n self._rawValue = _rawValue\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension AtomicLoadOrdering {\n // FIXME: Explain these ordering levels in more detail.\n\n /// Guarantees the atomicity of the specific operation on which it is applied,\n /// but imposes no ordering constraints on any other variable accesses.\n ///\n /// This value corresponds to `std::memory_order_relaxed` in C++.\n @available(SwiftStdlib 6.0, *)\n @_semantics("constant_evaluable")\n @_alwaysEmitIntoClient\n @_transparent\n public static var relaxed: Self {\n Self(_rawValue: 0)\n }\n\n /// An acquiring load synchronizes with a releasing operation whose\n /// value its reads. It ensures that the releasing and acquiring\n /// threads agree that all subsequent variable accesses on the\n /// acquiring thread happen after the atomic operation itself.\n ///\n /// This value corresponds to `std::memory_order_acquire` in C++.\n @available(SwiftStdlib 6.0, *)\n @_semantics("constant_evaluable")\n @_alwaysEmitIntoClient\n @_transparent\n public static var acquiring: Self {\n Self(_rawValue: 2)\n }\n\n /// A sequentially consistent load performs an acquiring load and\n /// also guarantees that it and all other sequentially consistent\n /// atomic operations (loads, stores, updates) appear to be executed\n /// in a single, total sequential ordering.\n ///\n /// This value corresponds to `std::memory_order_seq_cst` in C++.\n @available(SwiftStdlib 6.0, *)\n @_semantics("constant_evaluable")\n @_alwaysEmitIntoClient\n @_transparent\n public static var sequentiallyConsistent: Self {\n Self(_rawValue: 5)\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension AtomicLoadOrdering: Equatable {\n @available(SwiftStdlib 6.0, *)\n @_transparent\n public static func ==(left: Self, right: Self) -> Bool {\n left._rawValue == right._rawValue\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension AtomicLoadOrdering: Hashable {\n @available(SwiftStdlib 6.0, *)\n @inlinable\n public func hash(into hasher: inout Hasher) {\n hasher.combine(_rawValue)\n }\n}\n\n@available(SwiftStdlib 6.0, *)\n@_unavailableInEmbedded\nextension AtomicLoadOrdering: CustomStringConvertible {\n @available(SwiftStdlib 6.0, *)\n public var description: String {\n switch self {\n case .relaxed: return "relaxed"\n case .acquiring: return "acquiring"\n case .sequentiallyConsistent: return "sequentiallyConsistent"\n default: return "AtomicLoadOrdering(\(_rawValue))"\n }\n }\n}\n\n//===----------------------------------------------------------------------===//\n// Store Orderings\n//===----------------------------------------------------------------------===//\n\n/// Specifies the memory ordering semantics of an atomic store operation.\n@available(SwiftStdlib 6.0, *)\n@frozen\npublic struct AtomicStoreOrdering {\n @usableFromInline\n internal var _rawValue: Int\n\n @available(SwiftStdlib 6.0, *)\n @inlinable\n @_semantics("constant_evaluable")\n @_transparent\n internal init(_rawValue: Int) {\n self._rawValue = _rawValue\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension AtomicStoreOrdering {\n // FIXME: Explain these ordering levels in more detail.\n\n /// Guarantees the atomicity of the specific operation on which it is applied,\n /// but imposes no ordering constraints on any other variable accesses.\n ///\n /// This value corresponds to `std::memory_order_relaxed` in C++.\n @available(SwiftStdlib 6.0, *)\n @_semantics("constant_evaluable")\n @_alwaysEmitIntoClient\n @_transparent\n public static var relaxed: Self {\n Self(_rawValue: 0)\n }\n\n /// A releasing store synchronizes with acquiring operations that\n /// read the value it stores. It ensures that the releasing and\n /// acquiring threads agree that all preceding variable accesses on\n /// the releasing thread happen before the atomic operation itself.\n ///\n /// This value corresponds to `std::memory_order_release` in C++.\n @available(SwiftStdlib 6.0, *)\n @_semantics("constant_evaluable")\n @_alwaysEmitIntoClient\n @_transparent\n public static var releasing: Self {\n Self(_rawValue: 3)\n }\n\n /// A sequentially consistent store performs a releasing store and\n /// also guarantees that it and all other sequentially consistent\n /// atomic operations (loads, stores, updates) appear to be executed\n /// in a single, total sequential ordering.\n ///\n /// This value corresponds to `std::memory_order_seq_cst` in C++.\n @available(SwiftStdlib 6.0, *)\n @_semantics("constant_evaluable")\n @_alwaysEmitIntoClient\n @_transparent\n public static var sequentiallyConsistent: Self {\n Self(_rawValue: 5)\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension AtomicStoreOrdering: Equatable {\n @_transparent\n public static func ==(left: Self, right: Self) -> Bool {\n left._rawValue == right._rawValue\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension AtomicStoreOrdering: Hashable {\n @available(SwiftStdlib 6.0, *)\n @inlinable\n public func hash(into hasher: inout Hasher) {\n hasher.combine(_rawValue)\n }\n}\n\n@available(SwiftStdlib 6.0, *)\n@_unavailableInEmbedded\nextension AtomicStoreOrdering: CustomStringConvertible {\n @available(SwiftStdlib 6.0, *)\n public var description: String {\n switch self {\n case .relaxed: return "relaxed"\n case .releasing: return "releasing"\n case .sequentiallyConsistent: return "sequentiallyConsistent"\n default: return "AtomicStoreOrdering(\(_rawValue))"\n }\n }\n}\n\n//===----------------------------------------------------------------------===//\n// Update Orderings\n//===----------------------------------------------------------------------===//\n\n/// Specifies the memory ordering semantics of an atomic read-modify-write\n/// operation.\n@available(SwiftStdlib 6.0, *)\n@frozen\npublic struct AtomicUpdateOrdering {\n @usableFromInline\n internal var _rawValue: Int\n\n @available(SwiftStdlib 6.0, *)\n @inlinable\n @_semantics("constant_evaluable")\n @_transparent\n internal init(_rawValue: Int) {\n self._rawValue = _rawValue\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension AtomicUpdateOrdering {\n // FIXME: Explain these ordering levels in more detail.\n\n /// Guarantees the atomicity of the specific operation on which it is applied,\n /// but imposes no ordering constraints on any other variable accesses.\n ///\n /// This value corresponds to `std::memory_order_relaxed` in C++.\n @available(SwiftStdlib 6.0, *)\n @_semantics("constant_evaluable")\n @_alwaysEmitIntoClient\n @_transparent\n public static var relaxed: Self {\n Self(_rawValue: 0)\n }\n\n /// An acquiring update synchronizes with a releasing operation\n /// whose value its reads. It ensures that the releasing and\n /// acquiring threads agree that all subsequent variable accesses on\n /// the acquiring thread happen after the atomic operation itself.\n ///\n /// This value corresponds to `std::memory_order_acquire` in C++.\n @available(SwiftStdlib 6.0, *)\n @_semantics("constant_evaluable")\n @_alwaysEmitIntoClient\n @_transparent\n public static var acquiring: Self {\n Self(_rawValue: 2)\n }\n\n /// A releasing update synchronizes with acquiring operations that\n /// read the value it stores. It ensures that the releasing and\n /// acquiring threads agree that all preceding variable accesses on\n /// the releasing thread happen before the atomic operation itself.\n ///\n /// This value corresponds to `std::memory_order_release` in C++.\n @available(SwiftStdlib 6.0, *)\n @_semantics("constant_evaluable")\n @_alwaysEmitIntoClient\n @_transparent\n public static var releasing: Self {\n Self(_rawValue: 3)\n }\n\n /// An acquiring-and-releasing operation is a combination of\n /// `.acquiring` and `.releasing` operation on the same variable.\n ///\n /// This value corresponds to `std::memory_order_acq_rel` in C++.\n @available(SwiftStdlib 6.0, *)\n @_semantics("constant_evaluable")\n @_alwaysEmitIntoClient\n @_transparent\n public static var acquiringAndReleasing: Self {\n Self(_rawValue: 4)\n }\n\n /// A sequentially consistent update performs an\n /// acquiring-and-releasing update and also guarantees that it and\n /// all other sequentially consistent atomic operations (loads, stores,\n /// updates) appear to be executed in a single, total sequential\n /// ordering.\n ///\n /// This value corresponds to `std::memory_order_seq_cst` in C++.\n @available(SwiftStdlib 6.0, *)\n @_semantics("constant_evaluable")\n @_alwaysEmitIntoClient\n @_transparent\n public static var sequentiallyConsistent: Self {\n Self(_rawValue: 5)\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension AtomicUpdateOrdering: Equatable {\n @available(SwiftStdlib 6.0, *)\n @_transparent\n public static func ==(left: Self, right: Self) -> Bool {\n left._rawValue == right._rawValue\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension AtomicUpdateOrdering: Hashable {\n @available(SwiftStdlib 6.0, *)\n @inlinable\n public func hash(into hasher: inout Hasher) {\n hasher.combine(_rawValue)\n }\n}\n\n@available(SwiftStdlib 6.0, *)\n@_unavailableInEmbedded\nextension AtomicUpdateOrdering: CustomStringConvertible {\n @available(SwiftStdlib 6.0, *)\n public var description: String {\n switch self {\n case .relaxed: return "relaxed"\n case .acquiring: return "acquiring"\n case .releasing: return "releasing"\n case .acquiringAndReleasing: return "acquiringAndReleasing"\n case .sequentiallyConsistent: return "sequentiallyConsistent"\n default: return "AtomicUpdateOrdering(\(_rawValue))"\n }\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension AtomicLoadOrdering {\n @available(SwiftStdlib 6.0, *)\n @_semantics("constant_evaluable")\n @_semantics("atomics.requires_constant_orderings")\n @_alwaysEmitIntoClient\n @_transparent\n static func _failureOrdering(\n for ordering: AtomicUpdateOrdering\n ) -> AtomicLoadOrdering {\n switch ordering {\n case .relaxed: return .relaxed\n case .acquiring: return .acquiring\n case .releasing: return .relaxed\n case .acquiringAndReleasing: return .acquiring\n case .sequentiallyConsistent: return .sequentiallyConsistent\n default: fatalError("Unsupported ordering")\n }\n }\n}\n\n//===----------------------------------------------------------------------===//\n// Atomic Memory Fence\n//===----------------------------------------------------------------------===//\n\n/// Establishes a memory ordering without associating it with a\n/// particular atomic operation.\n///\n/// - A relaxed fence has no effect.\n/// - An acquiring fence ties to any preceding atomic operation that\n/// reads a value, and synchronizes with any releasing operation whose\n/// value was read.\n/// - A releasing fence ties to any subsequent atomic operation that\n/// modifies a value, and synchronizes with any acquiring operation\n/// that reads the result.\n/// - An acquiring and releasing fence is a combination of an\n/// acquiring and a releasing fence.\n/// - A sequentially consistent fence behaves like an acquiring and\n/// releasing fence, and ensures that the fence itself is part of\n/// the single, total ordering for all sequentially consistent\n/// operations.\n///\n/// This operation corresponds to `std::atomic_thread_fence` in C++.\n///\n/// Be aware that Thread Sanitizer does not support fences and may report\n/// false-positive races for data protected by a fence.\n@available(SwiftStdlib 6.0, *)\n@_semantics("atomics.requires_constant_orderings")\n@_alwaysEmitIntoClient\n@_transparent\npublic func atomicMemoryFence(\n ordering: AtomicUpdateOrdering\n) {\n switch ordering {\n case .relaxed:\n return\n\n case .acquiring:\n Builtin.fence_acquire()\n\n case .releasing:\n Builtin.fence_release()\n\n case .acquiringAndReleasing:\n Builtin.fence_acqrel()\n\n case .sequentiallyConsistent:\n Builtin.fence_seqcst()\n\n default:\n Builtin.unreachable()\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_Synchronization_Atomics_AtomicMemoryOrderings.swift
cpp_apple_swift_stdlib_public_Synchronization_Atomics_AtomicMemoryOrderings.swift
Swift
12,738
0.95
0.02457
0.300546
python-kit
476
2023-08-30T12:52:30.704396
MIT
false
e61a93117fe06e65ceb181b5f06a447f
//===----------------------------------------------------------------------===//\n//\n// This source file is part of the Swift Atomics open source project\n//\n// Copyright (c) 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\n/// An atomic value that also supports atomic operations when wrapped\n/// in an `Optional`. Atomic optional representable types come with a standalone\n/// atomic representation for their optional-wrapped variants.\n@available(SwiftStdlib 6.0, *)\npublic protocol AtomicOptionalRepresentable: AtomicRepresentable {\n /// The storage representation type that encodes to and decodes from\n /// `Optional<Self>` which is a suitable type when used in atomic operations\n /// on `Optional`.\n associatedtype AtomicOptionalRepresentation: BitwiseCopyable\n\n /// Destroys a value of `Self` and prepares an `AtomicOptionalRepresentation`\n /// storage type to be used for atomic operations on `Optional`.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicOptionalRepresentation`.\n ///\n /// - Parameter value: An optional instance of `Self` that's about to be\n /// destroyed to encode an instance of its `AtomicOptionalRepresentation`.\n /// - Returns: The newly encoded `AtomicOptionalRepresentation` storage.\n static func encodeAtomicOptionalRepresentation(\n _ value: consuming Self?\n ) -> AtomicOptionalRepresentation\n\n /// Recovers the logical atomic type `Self?` by destroying some\n /// `AtomicOptionalRepresentation` storage instance returned from an atomic\n /// operation on `Optional`.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations on `Optional` back into the\n /// logical type for normal use, `Self?`.\n ///\n /// - Parameter storage: The optional storage representation for `Self?`\n /// that's used within atomic operations on `Optional`.\n /// - Returns: The newly decoded logical type `Self?`.\n static func decodeAtomicOptionalRepresentation(\n _ representation: consuming AtomicOptionalRepresentation\n ) -> Self?\n}\n\n//===----------------------------------------------------------------------===//\n// RawRepresentable AtomicOptionalRepresentable conformance\n//===----------------------------------------------------------------------===//\n\n@available(SwiftStdlib 6.0, *)\nextension RawRepresentable\nwhere\n Self: AtomicOptionalRepresentable,\n RawValue: AtomicOptionalRepresentable\n{\n /// The storage representation type that encodes to and decodes from\n /// `Optional<Self>` which is a suitable type when used in atomic operations\n /// on `Optional`.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicOptionalRepresentation = RawValue.AtomicOptionalRepresentation\n\n /// Destroys a value of `Self` and prepares an `AtomicOptionalRepresentation`\n /// storage type to be used for atomic operations on `Optional`.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicOptionalRepresentation`.\n ///\n /// - Parameter value: An optional instance of `Self` that's about to be\n /// destroyed to encode an instance of its `AtomicOptionalRepresentation`.\n /// - Returns: The newly encoded `AtomicOptionalRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicOptionalRepresentation(\n _ value: consuming Self?\n ) -> RawValue.AtomicOptionalRepresentation {\n // FIXME: There is currently a compiler crash with the following:\n //\n // RawValue.encodeAtomicOptionalRepresentation(value?.rawValue)\n\n if let value = value {\n return RawValue.encodeAtomicOptionalRepresentation(value.rawValue)\n }\n\n return RawValue.encodeAtomicOptionalRepresentation(nil)\n }\n\n /// Recovers the logical atomic type `Self?` by destroying some\n /// `AtomicOptionalRepresentation` storage instance returned from an atomic\n /// operation on `Optional`.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations on `Optional` back into the\n /// logical type for normal use, `Self?`.\n ///\n /// - Parameter storage: The optional storage representation for `Self?`\n /// that's used within atomic operations on `Optional`.\n /// - Returns: The newly decoded logical type `Self?`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicOptionalRepresentation(\n _ representation: consuming RawValue.AtomicOptionalRepresentation\n ) -> Self? {\n RawValue.decodeAtomicOptionalRepresentation(representation).flatMap {\n Self(rawValue: $0)\n }\n }\n}\n\n//===----------------------------------------------------------------------===//\n// Optional AtomicRepresentable conformance\n//===----------------------------------------------------------------------===//\n\n@available(SwiftStdlib 6.0, *)\nextension Optional: AtomicRepresentable where Wrapped: AtomicOptionalRepresentable {\n /// The storage representation type that `Self` encodes to and decodes from\n /// which is a suitable type when used in atomic operations.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicRepresentation = Wrapped.AtomicOptionalRepresentation\n\n /// Destroys a value of `Self` and prepares an `AtomicRepresentation` storage\n /// type to be used for atomic operations.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicRepresentation`.\n ///\n /// - Parameter value: A valid instance of `Self` that's about to be destroyed\n /// to encode an instance of its `AtomicRepresentation`.\n /// - Returns: The newly encoded `AtomicRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicRepresentation(\n _ value: consuming Wrapped?\n ) -> AtomicRepresentation {\n Wrapped.encodeAtomicOptionalRepresentation(value)\n }\n\n /// Recovers the logical atomic type `Self` by destroying some\n /// `AtomicRepresentation` storage instance returned from an atomic operation.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations back into the logical type for\n /// normal use, `Self`.\n ///\n /// - Parameter storage: The storage representation for `Self` that's used\n /// within atomic operations.\n /// - Returns: The newly decoded logical type `Self`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicRepresentation(\n _ representation: consuming AtomicRepresentation\n ) -> Wrapped? {\n Wrapped.decodeAtomicOptionalRepresentation(representation)\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_Synchronization_Atomics_AtomicOptional.swift
cpp_apple_swift_stdlib_public_Synchronization_Atomics_AtomicOptional.swift
Swift
7,300
0.8
0.095808
0.603896
node-utils
203
2025-06-30T23:44:18.563305
GPL-3.0
false
b5ba7ca7cbcfe083fc4810d830b50cd6
//===----------------------------------------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2023 - 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\n//===----------------------------------------------------------------------===//\n// UnsafePointer AtomicRepresentable and AtomicOptionalRepresentable conformance\n//===----------------------------------------------------------------------===//\n\n@available(SwiftStdlib 6.0, *)\nextension UnsafePointer: @unsafe AtomicRepresentable where Pointee: ~Copyable {\n /// The storage representation type that `Self` encodes to and decodes from\n /// which is a suitable type when used in atomic operations.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicRepresentation = Int.AtomicRepresentation\n\n /// Destroys a value of `Self` and prepares an `AtomicRepresentation` storage\n /// type to be used for atomic operations.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicRepresentation`.\n ///\n /// - Parameter value: A valid instance of `Self` that's about to be destroyed\n /// to encode an instance of its `AtomicRepresentation`.\n /// - Returns: The newly encoded `AtomicRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicRepresentation(\n _ value: consuming UnsafePointer<Pointee>\n ) -> AtomicRepresentation {\n Int.encodeAtomicRepresentation(\n Int(bitPattern: value)\n )\n }\n\n /// Recovers the logical atomic type `Self` by destroying some\n /// `AtomicRepresentation` storage instance returned from an atomic operation.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations back into the logical type for\n /// normal use, `Self`.\n ///\n /// - Parameter storage: The storage representation for `Self` that's used\n /// within atomic operations.\n /// - Returns: The newly decoded logical type `Self`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicRepresentation(\n _ representation: consuming AtomicRepresentation\n ) -> UnsafePointer<Pointee> {\n unsafe UnsafePointer<Pointee>(\n bitPattern: Int.decodeAtomicRepresentation(representation)\n )!\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension UnsafePointer: @unsafe AtomicOptionalRepresentable where Pointee: ~Copyable {\n /// The storage representation type that encodes to and decodes from\n /// `Optional<Self>` which is a suitable type when used in atomic operations\n /// on `Optional`.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicOptionalRepresentation = Int.AtomicRepresentation\n\n /// Destroys a value of `Self` and prepares an `AtomicOptionalRepresentation`\n /// storage type to be used for atomic operations on `Optional`.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicOptionalRepresentation`.\n ///\n /// - Parameter value: An optional instance of `Self` that's about to be\n /// destroyed to encode an instance of its `AtomicOptionalRepresentation`.\n /// - Returns: The newly encoded `AtomicOptionalRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicOptionalRepresentation(\n _ value: consuming UnsafePointer<Pointee>?\n ) -> AtomicOptionalRepresentation {\n Int.encodeAtomicRepresentation(\n Int(bitPattern: value)\n )\n }\n\n /// Recovers the logical atomic type `Self?` by destroying some\n /// `AtomicOptionalRepresentation` storage instance returned from an atomic\n /// operation on `Optional`.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations on `Optional` back into the\n /// logical type for normal use, `Self?`.\n ///\n /// - Parameter storage: The optional storage representation for `Self?`\n /// that's used within atomic operations on `Optional`.\n /// - Returns: The newly decoded logical type `Self?`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicOptionalRepresentation(\n _ representation: consuming AtomicOptionalRepresentation\n ) -> UnsafePointer<Pointee>? {\n unsafe UnsafePointer<Pointee>(\n bitPattern: Int.decodeAtomicRepresentation(representation)\n )\n }\n}\n\n//===----------------------------------------------------------------------===//\n// UnsafeMutablePointer AtomicRepresentable and AtomicOptionalRepresentable conformance\n//===----------------------------------------------------------------------===//\n\n@available(SwiftStdlib 6.0, *)\nextension UnsafeMutablePointer: @unsafe AtomicRepresentable where Pointee: ~Copyable {\n /// The storage representation type that `Self` encodes to and decodes from\n /// which is a suitable type when used in atomic operations.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicRepresentation = Int.AtomicRepresentation\n\n /// Destroys a value of `Self` and prepares an `AtomicRepresentation` storage\n /// type to be used for atomic operations.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicRepresentation`.\n ///\n /// - Parameter value: A valid instance of `Self` that's about to be destroyed\n /// to encode an instance of its `AtomicRepresentation`.\n /// - Returns: The newly encoded `AtomicRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicRepresentation(\n _ value: consuming UnsafeMutablePointer<Pointee>\n ) -> AtomicRepresentation {\n Int.encodeAtomicRepresentation(\n Int(bitPattern: value)\n )\n }\n\n /// Recovers the logical atomic type `Self` by destroying some\n /// `AtomicRepresentation` storage instance returned from an atomic operation.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations back into the logical type for\n /// normal use, `Self`.\n ///\n /// - Parameter storage: The storage representation for `Self` that's used\n /// within atomic operations.\n /// - Returns: The newly decoded logical type `Self`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicRepresentation(\n _ representation: consuming AtomicRepresentation\n ) -> UnsafeMutablePointer<Pointee> {\n unsafe UnsafeMutablePointer<Pointee>(\n bitPattern: Int.decodeAtomicRepresentation(representation)\n )!\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension UnsafeMutablePointer: @unsafe AtomicOptionalRepresentable\nwhere Pointee: ~Copyable {\n /// The storage representation type that encodes to and decodes from\n /// `Optional<Self>` which is a suitable type when used in atomic operations\n /// on `Optional`.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicOptionalRepresentation = Int.AtomicRepresentation\n\n /// Destroys a value of `Self` and prepares an `AtomicOptionalRepresentation`\n /// storage type to be used for atomic operations on `Optional`.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicOptionalRepresentation`.\n ///\n /// - Parameter value: An optional instance of `Self` that's about to be\n /// destroyed to encode an instance of its `AtomicOptionalRepresentation`.\n /// - Returns: The newly encoded `AtomicOptionalRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicOptionalRepresentation(\n _ value: consuming UnsafeMutablePointer<Pointee>?\n ) -> AtomicOptionalRepresentation {\n Int.encodeAtomicRepresentation(\n Int(bitPattern: value)\n )\n }\n\n /// Recovers the logical atomic type `Self?` by destroying some\n /// `AtomicOptionalRepresentation` storage instance returned from an atomic\n /// operation on `Optional`.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations on `Optional` back into the\n /// logical type for normal use, `Self?`.\n ///\n /// - Parameter storage: The optional storage representation for `Self?`\n /// that's used within atomic operations on `Optional`.\n /// - Returns: The newly decoded logical type `Self?`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicOptionalRepresentation(\n _ representation: consuming AtomicOptionalRepresentation\n ) -> UnsafeMutablePointer<Pointee>? {\n unsafe UnsafeMutablePointer<Pointee>(\n bitPattern: Int.decodeAtomicRepresentation(representation)\n )\n }\n}\n\n//===----------------------------------------------------------------------===//\n// UnsafeRawPointer AtomicRepresentable and AtomicOptionalRepresentable conformance\n//===----------------------------------------------------------------------===//\n\n@available(SwiftStdlib 6.0, *)\nextension UnsafeRawPointer: @unsafe AtomicRepresentable {\n /// The storage representation type that `Self` encodes to and decodes from\n /// which is a suitable type when used in atomic operations.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicRepresentation = Int.AtomicRepresentation\n\n /// Destroys a value of `Self` and prepares an `AtomicRepresentation` storage\n /// type to be used for atomic operations.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicRepresentation`.\n ///\n /// - Parameter value: A valid instance of `Self` that's about to be destroyed\n /// to encode an instance of its `AtomicRepresentation`.\n /// - Returns: The newly encoded `AtomicRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicRepresentation(\n _ value: consuming UnsafeRawPointer\n ) -> AtomicRepresentation {\n Int.encodeAtomicRepresentation(\n Int(bitPattern: value)\n )\n }\n\n /// Recovers the logical atomic type `Self` by destroying some\n /// `AtomicRepresentation` storage instance returned from an atomic operation.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations back into the logical type for\n /// normal use, `Self`.\n ///\n /// - Parameter storage: The storage representation for `Self` that's used\n /// within atomic operations.\n /// - Returns: The newly decoded logical type `Self`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicRepresentation(\n _ representation: consuming AtomicRepresentation\n ) -> UnsafeRawPointer {\n unsafe UnsafeRawPointer(\n bitPattern: Int.decodeAtomicRepresentation(representation)\n )!\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension UnsafeRawPointer: @unsafe AtomicOptionalRepresentable {\n /// The storage representation type that encodes to and decodes from\n /// `Optional<Self>` which is a suitable type when used in atomic operations\n /// on `Optional`.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicOptionalRepresentation = Int.AtomicRepresentation\n\n /// Destroys a value of `Self` and prepares an `AtomicOptionalRepresentation`\n /// storage type to be used for atomic operations on `Optional`.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicOptionalRepresentation`.\n ///\n /// - Parameter value: An optional instance of `Self` that's about to be\n /// destroyed to encode an instance of its `AtomicOptionalRepresentation`.\n /// - Returns: The newly encoded `AtomicOptionalRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicOptionalRepresentation(\n _ value: consuming UnsafeRawPointer?\n ) -> AtomicOptionalRepresentation {\n Int.encodeAtomicRepresentation(\n Int(bitPattern: value)\n )\n }\n\n /// Recovers the logical atomic type `Self?` by destroying some\n /// `AtomicOptionalRepresentation` storage instance returned from an atomic\n /// operation on `Optional`.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations on `Optional` back into the\n /// logical type for normal use, `Self?`.\n ///\n /// - Parameter storage: The optional storage representation for `Self?`\n /// that's used within atomic operations on `Optional`.\n /// - Returns: The newly decoded logical type `Self?`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicOptionalRepresentation(\n _ representation: consuming AtomicOptionalRepresentation\n ) -> UnsafeRawPointer? {\n unsafe UnsafeRawPointer(\n bitPattern: Int.decodeAtomicRepresentation(representation)\n )\n }\n}\n\n//===----------------------------------------------------------------------===//\n// UnsafeMutableRawPointer AtomicRepresentable and AtomicOptionalRepresentable conformance\n//===----------------------------------------------------------------------===//\n\n@available(SwiftStdlib 6.0, *)\nextension UnsafeMutableRawPointer: @unsafe AtomicRepresentable {\n /// The storage representation type that `Self` encodes to and decodes from\n /// which is a suitable type when used in atomic operations.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicRepresentation = Int.AtomicRepresentation\n\n /// Destroys a value of `Self` and prepares an `AtomicRepresentation` storage\n /// type to be used for atomic operations.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicRepresentation`.\n ///\n /// - Parameter value: A valid instance of `Self` that's about to be destroyed\n /// to encode an instance of its `AtomicRepresentation`.\n /// - Returns: The newly encoded `AtomicRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicRepresentation(\n _ value: consuming UnsafeMutableRawPointer\n ) -> AtomicRepresentation {\n Int.encodeAtomicRepresentation(\n Int(bitPattern: value)\n )\n }\n\n /// Recovers the logical atomic type `Self` by destroying some\n /// `AtomicRepresentation` storage instance returned from an atomic operation.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations back into the logical type for\n /// normal use, `Self`.\n ///\n /// - Parameter storage: The storage representation for `Self` that's used\n /// within atomic operations.\n /// - Returns: The newly decoded logical type `Self`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicRepresentation(\n _ representation: consuming AtomicRepresentation\n ) -> UnsafeMutableRawPointer {\n unsafe UnsafeMutableRawPointer(\n bitPattern: Int.decodeAtomicRepresentation(representation)\n )!\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension UnsafeMutableRawPointer: @unsafe AtomicOptionalRepresentable {\n /// The storage representation type that encodes to and decodes from\n /// `Optional<Self>` which is a suitable type when used in atomic operations\n /// on `Optional`.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicOptionalRepresentation = Int.AtomicRepresentation\n\n /// Destroys a value of `Self` and prepares an `AtomicOptionalRepresentation`\n /// storage type to be used for atomic operations on `Optional`.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicOptionalRepresentation`.\n ///\n /// - Parameter value: An optional instance of `Self` that's about to be\n /// destroyed to encode an instance of its `AtomicOptionalRepresentation`.\n /// - Returns: The newly encoded `AtomicOptionalRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicOptionalRepresentation(\n _ value: consuming UnsafeMutableRawPointer?\n ) -> AtomicOptionalRepresentation {\n Int.encodeAtomicRepresentation(\n Int(bitPattern: value)\n )\n }\n\n /// Recovers the logical atomic type `Self?` by destroying some\n /// `AtomicOptionalRepresentation` storage instance returned from an atomic\n /// operation on `Optional`.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations on `Optional` back into the\n /// logical type for normal use, `Self?`.\n ///\n /// - Parameter storage: The optional storage representation for `Self?`\n /// that's used within atomic operations on `Optional`.\n /// - Returns: The newly decoded logical type `Self?`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicOptionalRepresentation(\n _ representation: consuming AtomicOptionalRepresentation\n ) -> UnsafeMutableRawPointer? {\n unsafe UnsafeMutableRawPointer(\n bitPattern: Int.decodeAtomicRepresentation(representation)\n )\n }\n}\n\n//===----------------------------------------------------------------------===//\n// Unmanaged AtomicRepresentable and AtomicOptionalRepresentable conformance\n//===----------------------------------------------------------------------===//\n\n@available(SwiftStdlib 6.0, *)\nextension Unmanaged: @unsafe AtomicRepresentable {\n /// The storage representation type that `Self` encodes to and decodes from\n /// which is a suitable type when used in atomic operations.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicRepresentation = Int.AtomicRepresentation\n\n /// Destroys a value of `Self` and prepares an `AtomicRepresentation` storage\n /// type to be used for atomic operations.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicRepresentation`.\n ///\n /// - Parameter value: A valid instance of `Self` that's about to be destroyed\n /// to encode an instance of its `AtomicRepresentation`.\n /// - Returns: The newly encoded `AtomicRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicRepresentation(\n _ value: consuming Unmanaged<Instance>\n ) -> AtomicRepresentation {\n unsafe Int.encodeAtomicRepresentation(\n Int(bitPattern: value.toOpaque())\n )\n }\n\n /// Recovers the logical atomic type `Self` by destroying some\n /// `AtomicRepresentation` storage instance returned from an atomic operation.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations back into the logical type for\n /// normal use, `Self`.\n ///\n /// - Parameter storage: The storage representation for `Self` that's used\n /// within atomic operations.\n /// - Returns: The newly decoded logical type `Self`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicRepresentation(\n _ representation: consuming AtomicRepresentation\n ) -> Unmanaged<Instance> {\n unsafe Unmanaged<Instance>.fromOpaque(\n UnsafeRawPointer.decodeAtomicRepresentation(representation)\n )\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension Unmanaged: @unsafe AtomicOptionalRepresentable {\n /// The storage representation type that encodes to and decodes from\n /// `Optional<Self>` which is a suitable type when used in atomic operations\n /// on `Optional`.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicOptionalRepresentation = Int.AtomicRepresentation\n\n /// Destroys a value of `Self` and prepares an `AtomicOptionalRepresentation`\n /// storage type to be used for atomic operations on `Optional`.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicOptionalRepresentation`.\n ///\n /// - Parameter value: An optional instance of `Self` that's about to be\n /// destroyed to encode an instance of its `AtomicOptionalRepresentation`.\n /// - Returns: The newly encoded `AtomicOptionalRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicOptionalRepresentation(\n _ value: consuming Unmanaged<Instance>?\n ) -> AtomicOptionalRepresentation {\n // FIXME: The following leads to a compiler crash at the moment.\n //\n // Int.AtomicRepresentation(Int(bitPattern: value?.toOpaque())._value)\n\n if let unmanaged = unsafe value {\n return unsafe Int.encodeAtomicRepresentation(\n Int(bitPattern: unmanaged.toOpaque())\n )\n }\n\n return Int.AtomicRepresentation(0._value)\n }\n\n /// Recovers the logical atomic type `Self?` by destroying some\n /// `AtomicOptionalRepresentation` storage instance returned from an atomic\n /// operation on `Optional`.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations on `Optional` back into the\n /// logical type for normal use, `Self?`.\n ///\n /// - Parameter storage: The optional storage representation for `Self?`\n /// that's used within atomic operations on `Optional`.\n /// - Returns: The newly decoded logical type `Self?`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicOptionalRepresentation(\n _ representation: consuming AtomicOptionalRepresentation\n ) -> Unmanaged<Instance>? {\n unsafe UnsafeRawPointer.decodeAtomicOptionalRepresentation(representation).map {\n unsafe Unmanaged.fromOpaque($0)\n }\n }\n}\n\n//===----------------------------------------------------------------------===//\n// OpaquePointer AtomicRepresentable and AtomicOptionalRepresentable conformance\n//===----------------------------------------------------------------------===//\n\n@available(SwiftStdlib 6.0, *)\nextension OpaquePointer: @unsafe AtomicRepresentable {\n /// The storage representation type that `Self` encodes to and decodes from\n /// which is a suitable type when used in atomic operations.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicRepresentation = Int.AtomicRepresentation\n\n /// Destroys a value of `Self` and prepares an `AtomicRepresentation` storage\n /// type to be used for atomic operations.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicRepresentation`.\n ///\n /// - Parameter value: A valid instance of `Self` that's about to be destroyed\n /// to encode an instance of its `AtomicRepresentation`.\n /// - Returns: The newly encoded `AtomicRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicRepresentation(\n _ value: consuming OpaquePointer\n ) -> AtomicRepresentation {\n Int.encodeAtomicRepresentation(\n Int(bitPattern: value)\n )\n }\n\n /// Recovers the logical atomic type `Self` by destroying some\n /// `AtomicRepresentation` storage instance returned from an atomic operation.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations back into the logical type for\n /// normal use, `Self`.\n ///\n /// - Parameter storage: The storage representation for `Self` that's used\n /// within atomic operations.\n /// - Returns: The newly decoded logical type `Self`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicRepresentation(\n _ representation: consuming AtomicRepresentation\n ) -> OpaquePointer {\n unsafe OpaquePointer(\n bitPattern: Int.decodeAtomicRepresentation(representation)\n )!\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension OpaquePointer: @unsafe AtomicOptionalRepresentable {\n /// The storage representation type that encodes to and decodes from\n /// `Optional<Self>` which is a suitable type when used in atomic operations\n /// on `Optional`.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicOptionalRepresentation = Int.AtomicRepresentation\n\n /// Destroys a value of `Self` and prepares an `AtomicOptionalRepresentation`\n /// storage type to be used for atomic operations on `Optional`.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicOptionalRepresentation`.\n ///\n /// - Parameter value: An optional instance of `Self` that's about to be\n /// destroyed to encode an instance of its `AtomicOptionalRepresentation`.\n /// - Returns: The newly encoded `AtomicOptionalRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicOptionalRepresentation(\n _ value: consuming OpaquePointer?\n ) -> AtomicOptionalRepresentation {\n Int.encodeAtomicRepresentation(\n Int(bitPattern: value)\n )\n }\n\n /// Recovers the logical atomic type `Self?` by destroying some\n /// `AtomicOptionalRepresentation` storage instance returned from an atomic\n /// operation on `Optional`.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations on `Optional` back into the\n /// logical type for normal use, `Self?`.\n ///\n /// - Parameter storage: The optional storage representation for `Self?`\n /// that's used within atomic operations on `Optional`.\n /// - Returns: The newly decoded logical type `Self?`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicOptionalRepresentation(\n _ representation: consuming AtomicOptionalRepresentation\n ) -> OpaquePointer? {\n unsafe OpaquePointer(\n bitPattern: Int.decodeAtomicRepresentation(representation)\n )\n }\n}\n\n//===----------------------------------------------------------------------===//\n// ObjectIdentifier AtomicRepresentable and AtomicOptionalRepresentable conformance\n//===----------------------------------------------------------------------===//\n\n@available(SwiftStdlib 6.0, *)\nextension ObjectIdentifier: AtomicRepresentable {\n /// The storage representation type that `Self` encodes to and decodes from\n /// which is a suitable type when used in atomic operations.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicRepresentation = Int.AtomicRepresentation\n\n /// Destroys a value of `Self` and prepares an `AtomicRepresentation` storage\n /// type to be used for atomic operations.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicRepresentation`.\n ///\n /// - Parameter value: A valid instance of `Self` that's about to be destroyed\n /// to encode an instance of its `AtomicRepresentation`.\n /// - Returns: The newly encoded `AtomicRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicRepresentation(\n _ value: consuming ObjectIdentifier\n ) -> AtomicRepresentation {\n Int.encodeAtomicRepresentation(\n Int(bitPattern: value)\n )\n }\n\n /// Recovers the logical atomic type `Self` by destroying some\n /// `AtomicRepresentation` storage instance returned from an atomic operation.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations back into the logical type for\n /// normal use, `Self`.\n ///\n /// - Parameter storage: The storage representation for `Self` that's used\n /// within atomic operations.\n /// - Returns: The newly decoded logical type `Self`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicRepresentation(\n _ representation: consuming AtomicRepresentation\n ) -> ObjectIdentifier {\n // ObjectIdentifier doesn't have a bitPattern init..?\n unsafe unsafeBitCast(\n Int.decodeAtomicRepresentation(representation),\n to: ObjectIdentifier.self\n )\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension ObjectIdentifier: AtomicOptionalRepresentable {\n /// The storage representation type that encodes to and decodes from\n /// `Optional<Self>` which is a suitable type when used in atomic operations\n /// on `Optional`.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicOptionalRepresentation = Int.AtomicRepresentation\n\n /// Destroys a value of `Self` and prepares an `AtomicOptionalRepresentation`\n /// storage type to be used for atomic operations on `Optional`.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicOptionalRepresentation`.\n ///\n /// - Parameter value: An optional instance of `Self` that's about to be\n /// destroyed to encode an instance of its `AtomicOptionalRepresentation`.\n /// - Returns: The newly encoded `AtomicOptionalRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicOptionalRepresentation(\n _ value: consuming ObjectIdentifier?\n ) -> AtomicOptionalRepresentation {\n unsafe Int.encodeAtomicRepresentation(\n // {U}Int have bitPattern inits for ObjectIdentifier, but not optional\n // ObjectIdentifier :sad:\n unsafeBitCast(value, to: Int.self)\n )\n }\n\n /// Recovers the logical atomic type `Self?` by destroying some\n /// `AtomicOptionalRepresentation` storage instance returned from an atomic\n /// operation on `Optional`.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations on `Optional` back into the\n /// logical type for normal use, `Self?`.\n ///\n /// - Parameter storage: The optional storage representation for `Self?`\n /// that's used within atomic operations on `Optional`.\n /// - Returns: The newly decoded logical type `Self?`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicOptionalRepresentation(\n _ representation: consuming AtomicOptionalRepresentation\n ) -> ObjectIdentifier? {\n // ObjectIdentifier doesn't have a bitPattern init..?\n unsafe unsafeBitCast(\n Int.decodeAtomicRepresentation(representation),\n to: ObjectIdentifier?.self\n )\n }\n}\n\n//===----------------------------------------------------------------------===//\n// UnsafeBufferPointer AtomicRepresentable conformance\n//===----------------------------------------------------------------------===//\n\n#if (_pointerBitWidth(_32) && _hasAtomicBitWidth(_64)) || (_pointerBitWidth(_64) && _hasAtomicBitWidth(_128))\n\n@available(SwiftStdlib 6.0, *)\nextension UnsafeBufferPointer: @unsafe AtomicRepresentable where Element: ~Copyable {\n /// The storage representation type that `Self` encodes to and decodes from\n /// which is a suitable type when used in atomic operations.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicRepresentation = WordPair.AtomicRepresentation\n\n /// Destroys a value of `Self` and prepares an `AtomicRepresentation` storage\n /// type to be used for atomic operations.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicRepresentation`.\n ///\n /// - Parameter value: A valid instance of `Self` that's about to be destroyed\n /// to encode an instance of its `AtomicRepresentation`.\n /// - Returns: The newly encoded `AtomicRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicRepresentation(\n _ value: consuming UnsafeBufferPointer<Element>\n ) -> AtomicRepresentation {\n let valueCopy = unsafe value\n\n return WordPair.encodeAtomicRepresentation(\n WordPair(\n first: UInt(bitPattern: valueCopy.baseAddress),\n second: UInt(truncatingIfNeeded: valueCopy.count)\n )\n )\n }\n\n /// Recovers the logical atomic type `Self` by destroying some\n /// `AtomicRepresentation` storage instance returned from an atomic operation.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations back into the logical type for\n /// normal use, `Self`.\n ///\n /// - Parameter storage: The storage representation for `Self` that's used\n /// within atomic operations.\n /// - Returns: The newly decoded logical type `Self`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicRepresentation(\n _ representation: consuming AtomicRepresentation\n ) -> UnsafeBufferPointer<Element> {\n let wp = WordPair.decodeAtomicRepresentation(representation)\n\n return unsafe UnsafeBufferPointer<Element>(\n start: UnsafePointer<Element>(bitPattern: wp.first),\n count: Int(truncatingIfNeeded: wp.second)\n )\n }\n}\n\n#endif\n\n//===----------------------------------------------------------------------===//\n// UnsafeMutableBufferPointer AtomicRepresentable conformance\n//===----------------------------------------------------------------------===//\n\n#if (_pointerBitWidth(_32) && _hasAtomicBitWidth(_64)) || (_pointerBitWidth(_64) && _hasAtomicBitWidth(_128))\n\n@available(SwiftStdlib 6.0, *)\nextension UnsafeMutableBufferPointer: @unsafe AtomicRepresentable\nwhere Element: ~Copyable\n{\n /// The storage representation type that `Self` encodes to and decodes from\n /// which is a suitable type when used in atomic operations.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicRepresentation = WordPair.AtomicRepresentation\n\n /// Destroys a value of `Self` and prepares an `AtomicRepresentation` storage\n /// type to be used for atomic operations.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicRepresentation`.\n ///\n /// - Parameter value: A valid instance of `Self` that's about to be destroyed\n /// to encode an instance of its `AtomicRepresentation`.\n /// - Returns: The newly encoded `AtomicRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicRepresentation(\n _ value: consuming UnsafeMutableBufferPointer<Element>\n ) -> AtomicRepresentation {\n let valueCopy = unsafe value\n\n return WordPair.encodeAtomicRepresentation(\n WordPair(\n first: UInt(bitPattern: valueCopy.baseAddress),\n second: UInt(truncatingIfNeeded: valueCopy.count)\n )\n )\n }\n\n /// Recovers the logical atomic type `Self` by destroying some\n /// `AtomicRepresentation` storage instance returned from an atomic operation.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations back into the logical type for\n /// normal use, `Self`.\n ///\n /// - Parameter storage: The storage representation for `Self` that's used\n /// within atomic operations.\n /// - Returns: The newly decoded logical type `Self`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicRepresentation(\n _ representation: consuming AtomicRepresentation\n ) -> UnsafeMutableBufferPointer<Element> {\n let wp = WordPair.decodeAtomicRepresentation(representation)\n\n return unsafe UnsafeMutableBufferPointer<Element>(\n start: UnsafeMutablePointer<Element>(bitPattern: wp.first),\n count: Int(truncatingIfNeeded: wp.second)\n )\n }\n}\n\n#endif\n\n//===----------------------------------------------------------------------===//\n// UnsafeRawBufferPointer AtomicRepresentable conformance\n//===----------------------------------------------------------------------===//\n\n#if (_pointerBitWidth(_32) && _hasAtomicBitWidth(_64)) || (_pointerBitWidth(_64) && _hasAtomicBitWidth(_128))\n\n@available(SwiftStdlib 6.0, *)\nextension UnsafeRawBufferPointer: @unsafe AtomicRepresentable {\n /// The storage representation type that `Self` encodes to and decodes from\n /// which is a suitable type when used in atomic operations.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicRepresentation = WordPair.AtomicRepresentation\n\n /// Destroys a value of `Self` and prepares an `AtomicRepresentation` storage\n /// type to be used for atomic operations.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicRepresentation`.\n ///\n /// - Parameter value: A valid instance of `Self` that's about to be destroyed\n /// to encode an instance of its `AtomicRepresentation`.\n /// - Returns: The newly encoded `AtomicRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicRepresentation(\n _ value: consuming UnsafeRawBufferPointer\n ) -> AtomicRepresentation {\n let valueCopy = unsafe value\n\n return WordPair.encodeAtomicRepresentation(\n WordPair(\n first: UInt(bitPattern: valueCopy.baseAddress),\n second: UInt(truncatingIfNeeded: valueCopy.count)\n )\n )\n }\n\n /// Recovers the logical atomic type `Self` by destroying some\n /// `AtomicRepresentation` storage instance returned from an atomic operation.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations back into the logical type for\n /// normal use, `Self`.\n ///\n /// - Parameter storage: The storage representation for `Self` that's used\n /// within atomic operations.\n /// - Returns: The newly decoded logical type `Self`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicRepresentation(\n _ representation: consuming AtomicRepresentation\n ) -> UnsafeRawBufferPointer {\n let wp = WordPair.decodeAtomicRepresentation(representation)\n\n return unsafe UnsafeRawBufferPointer(\n start: UnsafeRawPointer(bitPattern: wp.first),\n count: Int(truncatingIfNeeded: wp.second)\n )\n }\n}\n\n#endif\n\n//===----------------------------------------------------------------------===//\n// UnsafeMutableRawBufferPointer AtomicRepresentable conformance\n//===----------------------------------------------------------------------===//\n\n#if (_pointerBitWidth(_32) && _hasAtomicBitWidth(_64)) || (_pointerBitWidth(_64) && _hasAtomicBitWidth(_128))\n\n@available(SwiftStdlib 6.0, *)\nextension UnsafeMutableRawBufferPointer: @unsafe AtomicRepresentable {\n /// The storage representation type that `Self` encodes to and decodes from\n /// which is a suitable type when used in atomic operations.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicRepresentation = WordPair.AtomicRepresentation\n\n /// Destroys a value of `Self` and prepares an `AtomicRepresentation` storage\n /// type to be used for atomic operations.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicRepresentation`.\n ///\n /// - Parameter value: A valid instance of `Self` that's about to be destroyed\n /// to encode an instance of its `AtomicRepresentation`.\n /// - Returns: The newly encoded `AtomicRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicRepresentation(\n _ value: consuming UnsafeMutableRawBufferPointer\n ) -> AtomicRepresentation {\n let valueCopy = unsafe value\n\n return WordPair.encodeAtomicRepresentation(\n WordPair(\n first: UInt(bitPattern: valueCopy.baseAddress),\n second: UInt(truncatingIfNeeded: valueCopy.count)\n )\n )\n }\n\n /// Recovers the logical atomic type `Self` by destroying some\n /// `AtomicRepresentation` storage instance returned from an atomic operation.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations back into the logical type for\n /// normal use, `Self`.\n ///\n /// - Parameter storage: The storage representation for `Self` that's used\n /// within atomic operations.\n /// - Returns: The newly decoded logical type `Self`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicRepresentation(\n _ representation: consuming AtomicRepresentation\n ) -> UnsafeMutableRawBufferPointer {\n let wp = WordPair.decodeAtomicRepresentation(representation)\n\n return unsafe UnsafeMutableRawBufferPointer(\n start: UnsafeMutableRawPointer(bitPattern: wp.first),\n count: Int(truncatingIfNeeded: wp.second)\n )\n }\n}\n\n#endif\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_Synchronization_Atomics_AtomicPointers.swift
cpp_apple_swift_stdlib_public_Synchronization_Atomics_AtomicPointers.swift
Swift
42,513
0.8
0.077369
0.493165
react-lib
926
2024-12-01T07:48:26.226846
Apache-2.0
false
7989705ac7573f5ecfe800cc0ca1a56a
//===----------------------------------------------------------------------===//\n//\n// This source file is part of the Swift Atomics open source project\n//\n// Copyright (c) 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\n/// A type that supports atomic operations through a separate atomic storage\n/// representation.\n///\n/// Types that conform to the `AtomicRepresentable` protocol can be used as the\n/// `Value` type parameter with the `Atomic` type. Conformances that utilize\n/// existing atomic storage representations as their own representation will get\n/// the primitive atomic operations available on `Atomic` for free. Such\n/// operations include `load`, `store`, `exchange`, `compareExchange`, and\n/// `weakCompareExchange`.\n///\n/// Conforming to the AtomicRepresentable protocol\n/// --------------------------------------\n///\n/// Conforming your own custom types allow them to be used in the `Atomic` type\n/// and get access to all of the primitive atomic operations explained above.\n/// There are two main ways to conform your type to `AtomicRepresentable`:\n///\n/// 1. Using a predefined `RawRepresentable` conformance\n/// 2. Manually conforming to `AtomicRepresentable`\n///\n/// If you custom type already conforms to `RawRepresentable`, then adding the\n/// `AtomicRepresentable` conformance may be really simple! If the `RawValue`\n/// associated type of your type is already itself an `AtomicRepresentable`,\n/// then all you need to do is add the conformance and you're done!\n///\n/// enum TrafficLight: UInt8 {\n/// case red\n/// case yellow\n/// case green\n/// }\n///\n/// extension TrafficLight: AtomicRepresentable {}\n///\n/// And that's it! Here, we're utilizing Swift's automatic `RawRepresentable`\n/// conformance synthesis for enums by declaring our "raw value" to be a\n/// `UInt8`. By adding the `AtomicRepresentable` conformance, we automatically\n/// figure out how to do the conformance from the `RawRepresentable`\n/// implementation and do all of th necessary work for you. However, it is still\n/// possible to customize this behavior using the manual method explained below.\n///\n/// Defining your own `AtomicRepresentable` conformance is pretty simple. All\n/// you have to do is decide what atomic storage representation fits best for\n/// your type, and create the bidirectional relationship between the two.\n///\n/// // A point in an x-y coordinate system.\n/// struct GridPoint {\n/// var x: Int\n/// var y: Int\n/// }\n///\n/// extension GridPoint: AtomicRepresentable {\n/// typealias AtomicRepresentation = WordPair.AtomicRepresentation\n///\n/// static func encodeAtomicRepresentation(\n/// _ value: consuming GridPoint\n/// ) -> AtomicRepresentation {\n/// let wordPair = WordPair(\n/// first: UInt(bitPattern: value.x),\n/// second: UInt(bitPattern: value.y)\n/// )\n///\n/// return WordPair.encodeAtomicRepresentation(wordPair)\n/// }\n///\n/// static func decodeAtomicRepresentation(\n/// _ representation: consuming AtomicRepresentation\n/// ) -> GridPoint {\n/// let wordPair = WordPair.decodeAtomicRepresentation(representation)\n///\n/// return GridPoint(\n/// x: Int(bitPattern: wordPair.first),\n/// y: Int(bitPattern: wordPair.second)\n/// )\n/// }\n/// }\n///\n/// Here, we're going to select `WordPair`'s atomic storage representation as\n/// our own. This is very important because we only get the atomic operations\n/// like `load` and `store` if our representation is one of the _fundamental_\n/// storage representations. Luckily for us, `WordPair` does use one of these\n/// types as its storage type.\n///\n/// In addition to selecting what storage representation our type will use, we\n/// define two static functions that go from both our custom type to its\n/// representation and the representation back to our own type. Because our\n/// representation is the same as `WordPair.AtomicRepresentation`, we will\n/// actually go through `WordPair`'s `AtomicRepresentable` conformance to help\n/// define our own.\n///\n/// This is all you need to do to conform your custom type to the\n/// `AtomicRepresentable` protocol. From here, you can use this type in all of\n/// the primitive atomic operations like shown below:\n///\n/// func atomicGridPoint(_ gridPoint: Atomic<GridPoint>) {\n/// let newGridPoint = GridPoint(x: 123, y: -456)\n///\n/// let oldGridPoint1 = gridPoint.load(ordering: .relaxed)\n///\n/// gridPoint.store(newGridPoint, ordering: .releasing)\n///\n/// let oldGridPoint2 = gridPoint.exchange(\n/// desired: oldGridPoint1,\n/// ordering: .acquiringAndReleasing\n/// )\n///\n/// let (exchanged1, oldGridPoint2) = gridPoint.compareExchange(\n/// expected: oldGridPoint1,\n/// desired: newGridPoint,\n/// ordering: .sequentiallyConsistent\n/// )\n///\n/// let (exchanged2, oldGridPoint3) = gridPoint.weakCompareExchange(\n/// expected: newGridPoint,\n/// desired: oldGridPoint2,\n/// ordering: .relaxed\n/// )\n/// }\n///\n/// List of Fundamental Atomic Representations\n/// ------------------------------------------\n///\n/// When defining your own `AtomicRepresentable` conformance, it is critical\n/// that your custom type should choose from the following list of types as its\n/// own `AtomicRepresentation`:\n///\n/// - `UInt8.AtomicRepresentation`\n/// - `UInt16.AtomicRepresentation`\n/// - `UInt32.AtomicRepresentation`\n/// - `UInt64.AtomicRepresentation`\n/// - `UInt.AtomicRepresentation`\n/// - `Int8.AtomicRepresentation`\n/// - `Int16.AtomicRepresentation`\n/// - `Int32.AtomicRepresentation`\n/// - `Int64.AtomicRepresentation`\n/// - `Int.AtomicRepresentation`\n/// - `WordPair.AtomicRepresentation`\n///\n/// - Note: `Int8.AtomicRepresentation` is the same type as\n/// `UInt8.AtomicRepresentation` and the same is true for all of the same\n/// sized integer types. If your type wraps an unsigned integer, you should\n/// prefer to use an unsigned integer's atomic representation instead of a\n/// signed ones and vice versa. `Int` and `UInt`'s representation will be\n/// 64 bits wide on 64 bit systems and 32 bit wide on 32 bit systems. `Int64`\n/// and `UInt64` always conform to `AtomicRepresentable` on 64 bit systems,\n/// but on 32 bit systems they will only conform if the platform supports\n/// double wide atomics. `WordPair` will only conform to `AtomicRepresentable`\n/// on platforms that support double wide atomics, but if they do it will be\n/// 128 bits wide on 64 bit systems and 64 bits wide on 32 bit systems.\n///\n@available(SwiftStdlib 6.0, *)\npublic protocol AtomicRepresentable {\n /// The storage representation type that `Self` encodes to and decodes from\n /// which is a suitable type when used in atomic operations.\n associatedtype AtomicRepresentation: BitwiseCopyable\n\n /// Destroys a value of `Self` and prepares an `AtomicRepresentation` storage\n /// type to be used for atomic operations.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicRepresentation`.\n ///\n /// - Parameter value: A valid instance of `Self` that's about to be destroyed\n /// to encode an instance of its `AtomicRepresentation`.\n /// - Returns: The newly encoded `AtomicRepresentation` storage.\n static func encodeAtomicRepresentation(\n _ value: consuming Self\n ) -> AtomicRepresentation\n\n /// Recovers the logical atomic type `Self` by destroying some\n /// `AtomicRepresentation` storage instance returned from an atomic operation.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations back into the logical type for\n /// normal use, `Self`.\n ///\n /// - Parameter storage: The storage representation for `Self` that's used\n /// within atomic operations.\n /// - Returns: The newly decoded logical type `Self`.\n static func decodeAtomicRepresentation(\n _ storage: consuming AtomicRepresentation\n ) -> Self\n}\n\n//===----------------------------------------------------------------------===//\n// RawRepresentable AtomicRepresentable conformance\n//===----------------------------------------------------------------------===//\n\n@available(SwiftStdlib 6.0, *)\nextension RawRepresentable\nwhere\n Self: AtomicRepresentable,\n RawValue: AtomicRepresentable\n{\n /// The storage representation type that `Self` encodes to and decodes from\n /// which is a suitable type when used in atomic operations.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicRepresentation = RawValue.AtomicRepresentation\n\n\n /// Destroys a value of `Self` and prepares an `AtomicRepresentation` storage\n /// type to be used for atomic operations.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicRepresentation`.\n ///\n /// - Parameter value: A valid instance of `Self` that's about to be destroyed\n /// to encode an instance of its `AtomicRepresentation`.\n /// - Returns: The newly encoded `AtomicRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicRepresentation(\n _ value: consuming Self\n ) -> RawValue.AtomicRepresentation {\n RawValue.encodeAtomicRepresentation(value.rawValue)\n }\n\n\n /// Recovers the logical atomic type `Self` by destroying some\n /// `AtomicRepresentation` storage instance returned from an atomic operation.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations back into the logical type for\n /// normal use, `Self`.\n ///\n /// - Parameter storage: The storage representation for `Self` that's used\n /// within atomic operations.\n /// - Returns: The newly decoded logical type `Self`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicRepresentation(\n _ representation: consuming RawValue.AtomicRepresentation\n ) -> Self {\n Self(rawValue: RawValue.decodeAtomicRepresentation(representation))!\n }\n}\n\n//===----------------------------------------------------------------------===//\n// Never AtomicRepresentable conformance\n//===----------------------------------------------------------------------===//\n\n@available(SwiftStdlib 6.0, *)\nextension Never: AtomicRepresentable {\n /// The storage representation type that `Self` encodes to and decodes from\n /// which is a suitable type when used in atomic operations.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicRepresentation = Never\n\n\n /// Destroys a value of `Self` and prepares an `AtomicRepresentation` storage\n /// type to be used for atomic operations.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicRepresentation`.\n ///\n /// - Parameter value: A valid instance of `Self` that's about to be destroyed\n /// to encode an instance of its `AtomicRepresentation`.\n /// - Returns: The newly encoded `AtomicRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicRepresentation(\n _ value: consuming Never\n ) -> Never {}\n\n\n /// Recovers the logical atomic type `Self` by destroying some\n /// `AtomicRepresentation` storage instance returned from an atomic operation.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations back into the logical type for\n /// normal use, `Self`.\n ///\n /// - Parameter storage: The storage representation for `Self` that's used\n /// within atomic operations.\n /// - Returns: The newly decoded logical type `Self`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicRepresentation(\n _ representation: consuming Never\n ) -> Never {}\n}\n\n//===----------------------------------------------------------------------===//\n// Duration AtomicRepresentable conformance\n//===----------------------------------------------------------------------===//\n\n#if _pointerBitWidth(_64) && _hasAtomicBitWidth(_128)\n\n@available(SwiftStdlib 6.0, *)\nextension Duration: AtomicRepresentable {\n /// The storage representation type that `Self` encodes to and decodes from\n /// which is a suitable type when used in atomic operations.\n @available(SwiftStdlib 6.0, *)\n public typealias AtomicRepresentation = WordPair.AtomicRepresentation\n\n\n /// Destroys a value of `Self` and prepares an `AtomicRepresentation` storage\n /// type to be used for atomic operations.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicRepresentation`.\n ///\n /// - Parameter value: A valid instance of `Self` that's about to be destroyed\n /// to encode an instance of its `AtomicRepresentation`.\n /// - Returns: The newly encoded `AtomicRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicRepresentation(\n _ value: consuming Duration\n ) -> AtomicRepresentation {\n WordPair.encodeAtomicRepresentation(\n WordPair(\n first: UInt(truncatingIfNeeded: value._high),\n second: UInt(truncatingIfNeeded: value._low)\n )\n )\n }\n\n\n /// Recovers the logical atomic type `Self` by destroying some\n /// `AtomicRepresentation` storage instance returned from an atomic operation.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations back into the logical type for\n /// normal use, `Self`.\n ///\n /// - Parameter storage: The storage representation for `Self` that's used\n /// within atomic operations.\n /// - Returns: The newly decoded logical type `Self`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicRepresentation(\n _ representation: consuming AtomicRepresentation\n ) -> Duration {\n let wp = WordPair.decodeAtomicRepresentation(representation)\n\n return Duration(\n _high: Int64(truncatingIfNeeded: wp.first),\n low: UInt64(truncatingIfNeeded: wp.second)\n )\n }\n}\n\n#endif\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_Synchronization_Atomics_AtomicRepresentable.swift
cpp_apple_swift_stdlib_public_Synchronization_Atomics_AtomicRepresentable.swift
Swift
14,961
0.95
0.076712
0.759531
awesome-app
774
2025-04-20T18:45:48.747690
GPL-3.0
false
7500dcf1da4dc1faa10405fbbdf3a96a
//===----------------------------------------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2023-2025 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport Builtin\n\n/// A pair of two word sized `UInt`s.\n///\n/// This type's primary purpose is to be used in double wide atomic operations.\n/// On platforms that support it, atomic operations on `WordPair` are done in a\n/// single operation for two words. Users can use this type as itself when used\n/// on `Atomic`, or it could be used as an intermediate step for custom\n/// `AtomicRepresentable` types that are also double wide.\n///\n/// let atomicPair = Atomic<WordPair>(WordPair(first: 0, second: 0))\n/// atomicPair.store(WordPair(first: someVersion, second: .max), ordering: .relaxed)\n///\n/// When used as an intermediate step for custom `AtomicRepresentable` types, it\n/// is critical that their `AtomicRepresentation` be equal to\n/// `WordPair.AtomicRepresentation`.\n///\n/// struct GridPoint {\n/// var x: Int\n/// var y: Int\n/// }\n///\n/// extension GridPoint: AtomicRepresentable {\n/// typealias AtomicRepresentation = WordPair.AtomicRepresentation\n///\n/// ...\n/// }\n///\n/// - Note: This type only conforms to `AtomicRepresentable` on platforms that\n/// support double wide atomics.\n@available(SwiftStdlib 6.0, *)\n@frozen\npublic struct WordPair {\n /// The first element in this word pair.\n public var first: UInt\n\n /// The second element in this word pair.\n public var second: UInt\n\n /// Initialize a new `WordPair` value given both individual words.\n ///\n /// - Parameter first: The first word to use in the pair.\n /// - Parameter second: The second word to use in the pair.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public init(first: UInt, second: UInt) {\n self.first = first\n self.second = second\n }\n}\n\n#if (_pointerBitWidth(_32) && _hasAtomicBitWidth(_64)) || (_pointerBitWidth(_64) && _hasAtomicBitWidth(_128))\n\n@available(SwiftStdlib 6.0, *)\nextension WordPair: AtomicRepresentable {\n#if _pointerBitWidth(_64)\n /// The storage representation type that `Self` encodes to and decodes from\n /// which is a suitable type when used in atomic operations.\n public typealias AtomicRepresentation = _Atomic128BitStorage\n#elseif _pointerBitWidth(_32)\n /// The storage representation type that `Self` encodes to and decodes from\n /// which is a suitable type when used in atomic operations.\n public typealias AtomicRepresentation = _Atomic64BitStorage\n#else\n#error("Unsupported platform")\n#endif\n\n /// Destroys a value of `Self` and prepares an `AtomicRepresentation` storage\n /// type to be used for atomic operations.\n ///\n /// - Note: This is not an atomic operation. This simply encodes the logical\n /// type `Self` into its storage representation suitable for atomic\n /// operations, `AtomicRepresentation`.\n ///\n /// - Parameter value: A valid instance of `Self` that's about to be destroyed\n /// to encode an instance of its `AtomicRepresentation`.\n /// - Returns: The newly encoded `AtomicRepresentation` storage.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func encodeAtomicRepresentation(\n _ value: consuming WordPair\n ) -> AtomicRepresentation {\n#if _pointerBitWidth(_64)\n var i128 = Builtin.zext_Int64_Int128(value.first._value)\n var high128 = Builtin.zext_Int64_Int128(value.second._value)\n let highShift = Builtin.zext_Int64_Int128(UInt(64)._value)\n high128 = Builtin.shl_Int128(high128, highShift)\n i128 = Builtin.or_Int128(i128, high128)\n\n return AtomicRepresentation(i128)\n#elseif _pointerBitWidth(_32)\n var i64 = Builtin.zext_Int32_Int64(value.first._value)\n var high64 = Builtin.zext_Int32_Int64(value.second._value)\n let highShift = Builtin.zext_Int32_Int64(UInt(32)._value)\n high64 = Builtin.shl_Int64(high64, highShift)\n i64 = Builtin.or_Int64(i64, high64)\n\n return AtomicRepresentation(i64)\n#else\n#error("Unsupported platform")\n#endif\n }\n\n /// Recovers the logical atomic type `Self` by destroying some\n /// `AtomicRepresentation` storage instance returned from an atomic operation.\n ///\n /// - Note: This is not an atomic operation. This simply decodes the storage\n /// representation used in atomic operations back into the logical type for\n /// normal use, `Self`.\n ///\n /// - Parameter storage: The storage representation for `Self` that's used\n /// within atomic operations.\n /// - Returns: The newly decoded logical type `Self`.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func decodeAtomicRepresentation(\n _ representation: consuming AtomicRepresentation\n ) -> WordPair {\n#if _pointerBitWidth(_64)\n let highShift = Builtin.zext_Int64_Int128(UInt(64)._value)\n let high128 = Builtin.lshr_Int128(representation._storage, highShift)\n let high = Builtin.trunc_Int128_Int64(high128)\n let low = Builtin.trunc_Int128_Int64(representation._storage)\n#elseif _pointerBitWidth(_32)\n let highShift = Builtin.zext_Int32_Int64(UInt(32)._value)\n let high64 = Builtin.lshr_Int64(representation._storage, highShift)\n let high = Builtin.trunc_Int64_Int32(high64)\n let low = Builtin.trunc_Int64_Int32(representation._storage)\n#else\n#error("Unsupported platform")\n#endif\n\n return WordPair(first: UInt(low), second: UInt(high))\n }\n}\n\n#endif\n\n@available(SwiftStdlib 6.0, *)\nextension WordPair: Equatable {\n /// Compares two values of this type to determine if they are equivalent to\n /// each other.\n ///\n /// - Parameter lhs: The first value to compare.\n /// - Parameter rhs: The second value to compare.\n /// - Returns: True if both values were equal, or false if they were unequal.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func ==(lhs: WordPair, rhs: WordPair) -> Bool {\n lhs.first == rhs.first && lhs.second == rhs.second\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension WordPair: Hashable {\n /// Hashes the essential components of this value by feeding them into the\n /// given hasher.\n ///\n /// - Parameter hasher: The hasher to use when combining the components\n /// of this instance.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public func hash(into hasher: inout Hasher) {\n hasher.combine(first)\n hasher.combine(second)\n }\n}\n\n@available(SwiftStdlib 6.1, *)\nextension WordPair: Comparable {\n @available(SwiftStdlib 6.1, *)\n @_alwaysEmitIntoClient\n @_transparent\n public static func <(lhs: WordPair, rhs: WordPair) -> Bool {\n (lhs.first, lhs.second) < (rhs.first, rhs.second)\n }\n}\n\n@available(SwiftStdlib 6.0, *)\n@_unavailableInEmbedded\nextension WordPair: CustomStringConvertible {\n /// A string that represents the contents of the word pair.\n @available(SwiftStdlib 6.0, *)\n public var description: String {\n "WordPair(first: \(first), second: \(second))"\n }\n}\n\n@available(SwiftStdlib 6.0, *)\n@_unavailableInEmbedded\nextension WordPair: CustomDebugStringConvertible {\n /// A string that represents the contents of the word pair, suitable for\n /// debugging.\n @available(SwiftStdlib 6.0, *)\n public var debugDescription: String {\n "WordPair(first: \(first), second: \(second))"\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension WordPair: Sendable {}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_Synchronization_Atomics_WordPair.swift
cpp_apple_swift_stdlib_public_Synchronization_Atomics_WordPair.swift
Swift
7,707
0.95
0.077982
0.5
vue-tools
1
2023-11-13T08:16:24.911644
GPL-3.0
false
5c3d407db960db049cf300e2df14a38c
//===----------------------------------------------------------------------===//\n//\n// This source file is part of the Swift Atomics open source project\n//\n// Copyright (c) 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport Builtin\n\n@available(SwiftStdlib 6.0, *)\n@frozen\n@_rawLayout(like: Value, movesAsLike)\npublic struct _Cell<Value: ~Copyable>: ~Copyable {\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public var _address: UnsafeMutablePointer<Value> {\n unsafe UnsafeMutablePointer<Value>(_rawAddress)\n }\n\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n internal var _rawAddress: Builtin.RawPointer {\n Builtin.addressOfRawLayout(self)\n }\n\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public init(_ initialValue: consuming Value) {\n unsafe _address.initialize(to: initialValue)\n }\n\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @inlinable\n deinit {\n unsafe _address.deinitialize(count: 1)\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_Synchronization_Cell.swift
cpp_apple_swift_stdlib_public_Synchronization_Cell.swift
Swift
1,318
0.95
0.043478
0.268293
awesome-app
338
2023-09-28T00:16:20.467319
BSD-3-Clause
false
17446b26498b16092d65d0d9fd3bb294
//===----------------------------------------------------------------------===//\n//\n// This source file is part of the Swift Atomics open source project\n//\n// Copyright (c) 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport Darwin\n\n@available(SwiftStdlib 6.0, *)\n@frozen\n@_staticExclusiveOnly\npublic struct _MutexHandle: ~Copyable {\n @usableFromInline\n let value: _Cell<os_unfair_lock>\n\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public init() {\n value = _Cell(os_unfair_lock())\n }\n\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n internal borrowing func _lock() {\n unsafe os_unfair_lock_lock(value._address)\n }\n\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n internal borrowing func _tryLock() -> Bool {\n unsafe os_unfair_lock_trylock(value._address)\n }\n\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n internal borrowing func _unlock() {\n unsafe os_unfair_lock_unlock(value._address)\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_Synchronization_Mutex_DarwinImpl.swift
cpp_apple_swift_stdlib_public_Synchronization_Mutex_DarwinImpl.swift
Swift
1,327
0.95
0.040816
0.255814
python-kit
479
2025-04-06T11:43:57.730293
GPL-3.0
false
8b9935630f9766c58e02bf0d1ee0cf9e
//===----------------------------------------------------------------------===//\n//\n// This source file is part of the Swift Atomics open source project\n//\n// Copyright (c) 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport _SynchronizationShims\n#if canImport(Android)\nimport Android\n#elseif canImport(Musl)\nimport Musl\n#else\nimport Glibc\n#endif\n\nextension Atomic where Value == UInt32 {\n // This returns 'false' on success and 'true' on error. Check 'errno' for the\n // specific error value.\n internal borrowing func _futexLock() -> UInt32 {\n _swift_stdlib_futex_lock(.init(_rawAddress))\n }\n\n // This returns 'false' on success and 'true' on error. Check 'errno' for the\n // specific error value.\n internal borrowing func _futexTryLock() -> UInt32 {\n _swift_stdlib_futex_trylock(.init(_rawAddress))\n }\n\n // This returns 'false' on success and 'true' on error. Check 'errno' for the\n // specific error value.\n internal borrowing func _futexUnlock() -> UInt32 {\n _swift_stdlib_futex_unlock(.init(_rawAddress))\n }\n}\n\n@available(SwiftStdlib 6.0, *)\n@frozen\n@_staticExclusiveOnly\npublic struct _MutexHandle: ~Copyable {\n // There are only 3 different values that storage can hold at a single time.\n // 0: unlocked\n // TID: locked, current thread's id (uncontended)\n // (TID | FUTEX_WAITERS): locked, current thread's id (contended)\n @usableFromInline\n let storage: Atomic<UInt32>\n\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public init() {\n storage = Atomic(0)\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension _MutexHandle {\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n internal borrowing func _lock() {\n // Note: This is being TLS cached.\n let selfId = _swift_stdlib_gettid()\n\n let (exchanged, _) = storage.compareExchange(\n expected: 0,\n desired: selfId,\n successOrdering: .acquiring,\n failureOrdering: .relaxed\n )\n\n if _fastPath(exchanged) {\n // Locked!\n return\n }\n\n _lockSlow(selfId)\n }\n\n @available(SwiftStdlib 6.0, *)\n @usableFromInline\n internal borrowing func _lockSlow(_ selfId: UInt32) {\n // Before relinquishing control to the kernel to block this particular\n // thread, run a little spin lock to keep this thread busy in the scenario\n // where the current owner thread's critical section is somewhat quick. We\n // avoid a lot of the syscall overhead in these cases which allow both the\n // owner thread and this current thread to do the user-space atomic for\n // releasing and acquiring (assuming no existing waiters). The waiter bit is\n // typically unset when a call to 'FUTEX_UNLOCK_PI' has no other pi state,\n // meaning there is no one else waiting to acquire the lock.\n do {\n // This value is controlled on a per architecture bases defined in\n // 'SpinLoopHint.swift'.\n var tries = _tries\n\n repeat {\n // Do a relaxed load of the futex value to prevent introducing a memory\n // barrier on each iteration of this loop. We're already informing the\n // CPU that this is a spin loop via the '_spinLoopHint' call which\n // should hopefully slow down the loop a considerable amount to view an\n // actually change in the value potentially. An extra memory barrier\n // would make it even slower on top of the fact that we may not even be\n // able to attempt to acquire the lock.\n let state = storage.load(ordering: .relaxed)\n\n if state == 0, storage.compareExchange(\n expected: 0,\n desired: selfId,\n successOrdering: .acquiring,\n failureOrdering: .relaxed\n ).exchanged {\n // Locked!\n return\n }\n\n tries &-= 1\n\n // Inform the CPU that we're doing a spin loop which should have the\n // effect of slowing down this loop if only by a little to preserve\n // energy.\n _spinLoopHint()\n } while tries != 0\n }\n\n // We've exhausted our spins. Ask the kernel to block for us until the owner\n // releases the lock.\n //\n // Note: The kernel will attempt to acquire the lock for us as well which\n // could succeed if the owner releases in between finishing spinning the\n // futex syscall.\n while true {\n // Block until an equivalent '_futexUnlock' has been called by the owner.\n // This returns '0' on success which means the kernel has acquired the\n // lock for us.\n switch storage._futexLock() {\n case 0:\n // Locked!\n return\n\n // EINTR - "A FUTEX_WAIT or FUTEX_WAIT_BITSET operation was interrupted\n // by a signal (see signal(7)). Before Linux 2.6.22, this error\n // could also be returned for a spurious wakeup; since Linux\n // 2.6.22, this no longer happens."\n // EAGAIN - "The futex owner thread ID of uaddr is about to exit, but has\n // not yet handled the internal state cleanup. Try again."\n case 4, 11:\n continue\n\n // EDEADLK - "The futex word at uaddr is already locked by the caller."\n case 35:\n // TODO: Replace with a colder function / one that takes a StaticString\n fatalError("Recursive call to lock Mutex")\n\n // This handles all of the following errors which generally aren't\n // applicable to this implementation:\n //\n // EACCES - "No read access to the memory of a futex word."\n // EFAULT - "A required pointer argument did not point to a valid\n // user-space address."\n // EINVAL - "The operation in futex_op is one of those that employs a\n // timeout, but the supplied timeout argument was invalid\n // (tv_sec was less than zero, or tv_nsec was not less than\n // 1,000,000,000)."\n // OR\n // "The operation specified in futex_op employs one or both of\n // the pointers uaddr and uaddr2, but one of these does not\n // point to a valid object—that is, the address is not four-\n // byte-aligned."\n // OR\n // "The kernel detected an inconsistency between the user-space\n // state at uaddr and the kernel state. This indicates either\n // state corruption or that the kernel found a waiter on uaddr\n // which is waiting via FUTEX_WAIT or FUTEX_WAIT_BITSET."\n // OR\n // "Invalid argument."\n // ENOMEM - "The kernel could not allocate memory to hold state\n // information."\n // ENOSYS - "Invalid operation specified in futex_op."\n // OR\n // "A run-time check determined that the operation is not\n // available. The PI-futex operations are not implemented on all\n // architectures and are not supported on some CPU variants."\n // EPERM - "The caller is not allowed to attach itself to the futex at\n // uaddr (This may be caused by a state corruption in user\n // space.)"\n // ESRCH - "The thread ID in the futex word at uaddr does not exist."\n default:\n // TODO: Replace with a colder function / one that takes a StaticString\n fatalError("Unknown error occurred while attempting to acquire a Mutex")\n }\n }\n }\n\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n internal borrowing func _tryLock() -> Bool {\n // Do a user space cmpxchg to see if we can easily acquire the lock.\n if storage.compareExchange(\n expected: 0,\n\n // Note: This is being TLS cached.\n desired: _swift_stdlib_gettid(),\n successOrdering: .acquiring,\n failureOrdering: .relaxed\n ).exchanged {\n // Locked!\n return true\n }\n\n // The quick atomic op failed, ask the kernel to see if it can acquire the\n // lock for us.\n return _tryLockSlow()\n }\n\n @available(SwiftStdlib 6.0, *)\n @usableFromInline\n internal borrowing func _tryLockSlow() -> Bool {\n // Note: "Because the kernel has access to more state information than user\n // space, acquisition of the lock might succeed if performed by the\n // kernel in cases where the futex word (i.e., the state information\n // accessible to use-space) contains stale state (FUTEX_WAITERS\n // and/or FUTEX_OWNER_DIED). This can happen when the owner of the\n // futex died. User space cannot handle this condition in a race-free\n // manner, but the kernel can fix this up and acquire the futex."\n switch storage._futexTryLock() {\n case 0:\n // Locked!\n return true\n\n // EDEADLK - "The futex word at uaddr is already locked by the caller."\n case 35:\n // TODO: Replace with a colder function / one that takes a StaticString\n fatalError("Attempt to try to lock Mutex in already acquired thread")\n\n // This handles all of the following errors which generally aren't\n // applicable to this implementation:\n //\n // EACCES - "No read access to the memory of a futex word."\n // EAGAIN - "The futex owner thread ID of uaddr is about to exit, but has\n // not yet handled the internal state cleanup. Try again."\n // EFAULT - "A required pointer argument did not point to a valid\n // user-space address."\n // EINVAL - "The operation in futex_op is one of those that employs a\n // timeout, but the supplied timeout argument was invalid\n // (tv_sec was less than zero, or tv_nsec was not less than\n // 1,000,000,000)."\n // OR\n // "The operation specified in futex_op employs one or both of\n // the pointers uaddr and uaddr2, but one of these does not\n // point to a valid object—that is, the address is not four-\n // byte-aligned."\n // OR\n // "The kernel detected an inconsistency between the user-space\n // state at uaddr and the kernel state. This indicates either\n // state corruption or that the kernel found a waiter on uaddr\n // which is waiting via FUTEX_WAIT or FUTEX_WAIT_BITSET."\n // OR\n // "Invalid argument."\n // ENOMEM - "The kernel could not allocate memory to hold state\n // information."\n // ENOSYS - "Invalid operation specified in futex_op."\n // OR\n // "A run-time check determined that the operation is not\n // available. The PI-futex operations are not implemented on all\n // architectures and are not supported on some CPU variants."\n // EPERM - "The caller is not allowed to attach itself to the futex at\n // uaddr (This may be caused by a state corruption in user\n // space.)"\n // ESRCH - "The thread ID in the futex word at uaddr does not exist."\n default:\n // Note: We could maybe retry this operation when given EAGAIN, but this\n // is more or less supposed to be a quick yes/no.\n return false\n }\n }\n\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n internal borrowing func _unlock() {\n // Note: This is being TLS cached.\n let selfId = _swift_stdlib_gettid()\n\n // Attempt to release the lock. We can only atomically release the lock in\n // user-space when there are no other waiters. If there are waiters, the\n // waiter bit is set and we need to inform the kernel that we're unlocking.\n let (exchanged, _) = storage.compareExchange(\n expected: selfId,\n desired: 0,\n successOrdering: .releasing,\n failureOrdering: .relaxed\n )\n\n if _fastPath(exchanged) {\n // No waiters, unlocked!\n return\n }\n\n _unlockSlow()\n }\n\n @available(SwiftStdlib 6.0, *)\n @usableFromInline\n internal borrowing func _unlockSlow() {\n while true {\n switch storage._futexUnlock() {\n case 0:\n // Unlocked!\n return\n\n // EINTR - "A FUTEX_WAIT or FUTEX_WAIT_BITSET operation was interrupted\n // by a signal (see signal(7)). Before Linux 2.6.22, this error\n // could also be returned for a spurious wakeup; since Linux\n // 2.6.22, this no longer happens."\n case 4:\n continue\n\n // EPERM - "The caller does not own the lock represented by the futex\n // word."\n case 1:\n // TODO: Replace with a colder function / one that takes a StaticString\n fatalError(\n "Call to unlock Mutex on a thread which hasn't acquired the lock"\n )\n\n // This handles all of the following errors which generally aren't\n // applicable to this implementation:\n //\n // EACCES - "No read access to the memory of a futex word."\n // EFAULT - "A required pointer argument did not point to a valid\n // user-space address."\n // EINVAL - "The operation in futex_op is one of those that employs a\n // timeout, but the supplied timeout argument was invalid\n // (tv_sec was less than zero, or tv_nsec was not less than\n // 1,000,000,000)."\n // OR\n // "The operation specified in futex_op employs one or both of\n // the pointers uaddr and uaddr2, but one of these does not\n // point to a valid object—that is, the address is not four-\n // byte-aligned."\n // OR\n // "The kernel detected an inconsistency between the user-space\n // state at uaddr and the kernel state. This indicates either\n // state corruption or that the kernel found a waiter on uaddr\n // which is waiting via FUTEX_WAIT or FUTEX_WAIT_BITSET."\n // OR\n // "Invalid argument."\n // ENOSYS - "Invalid operation specified in futex_op."\n // OR\n // "A run-time check determined that the operation is not\n // available. The PI-futex operations are not implemented on all\n // architectures and are not supported on some CPU variants."\n // EPERM - "The caller is not allowed to attach itself to the futex at\n // uaddr (This may be caused by a state corruption in user\n // space.)"\n default:\n // TODO: Replace with a colder function / one that takes a StaticString\n fatalError("Unknown error occurred while attempting to release a Mutex")\n }\n }\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_Synchronization_Mutex_LinuxImpl.swift
cpp_apple_swift_stdlib_public_Synchronization_Mutex_LinuxImpl.swift
Swift
14,877
0.95
0.097826
0.585075
vue-tools
911
2025-03-26T18:56:13.595233
GPL-3.0
false
f59126ad102c94c93a55367353fab65e
//===----------------------------------------------------------------------===//\n//\n// This source file is part of the Swift Atomics open source project\n//\n// Copyright (c) 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\n/// A synchronization primitive that protects shared mutable state via\n/// mutual exclusion.\n///\n/// The `Mutex` type offers non-recursive exclusive access to the state\n/// it is protecting by blocking threads attempting to acquire the lock.\n/// Only one execution context at a time has access to the value stored\n/// within the `Mutex` allowing for exclusive access.\n///\n/// An example use of `Mutex` in a class used simultaneously by many\n/// threads protecting a `Dictionary` value:\n///\n/// class Manager {\n/// let cache = Mutex<[Key: Resource]>([:])\n///\n/// func saveResource(_ resource: Resource, as key: Key) {\n/// cache.withLock {\n/// $0[key] = resource\n/// }\n/// }\n/// }\n///\n@available(SwiftStdlib 6.0, *)\n@frozen\n@_staticExclusiveOnly\npublic struct Mutex<Value: ~Copyable>: ~Copyable {\n @usableFromInline\n let handle = _MutexHandle()\n\n @usableFromInline\n let value: _Cell<Value>\n\n /// Initializes a value of this mutex with the given initial state.\n ///\n /// - Parameter initialValue: The initial value to give to the mutex.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public init(_ initialValue: consuming sending Value) {\n value = _Cell(initialValue)\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension Mutex: @unchecked Sendable where Value: ~Copyable {}\n\n@available(SwiftStdlib 6.0, *)\nextension Mutex where Value: ~Copyable {\n /// Calls the given closure after acquiring the lock and then releases\n /// ownership.\n ///\n /// This method is equivalent to the following sequence of code:\n ///\n /// mutex.lock()\n /// defer {\n /// mutex.unlock()\n /// }\n /// return try body(&value)\n ///\n /// - Warning: Recursive calls to `withLock` within the\n /// closure parameter has behavior that is platform dependent.\n /// Some platforms may choose to panic the process, deadlock,\n /// or leave this behavior unspecified. This will never\n /// reacquire the lock however.\n ///\n /// - Parameter body: A closure with a parameter of `Value`\n /// that has exclusive access to the value being stored within\n /// this mutex. This closure is considered the critical section\n /// as it will only be executed once the calling thread has\n /// acquired the lock.\n ///\n /// - Returns: The return value, if any, of the `body` closure parameter.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public borrowing func withLock<Result: ~Copyable, E: Error>(\n _ body: (inout sending Value) throws(E) -> sending Result\n ) throws(E) -> sending Result {\n handle._lock()\n\n defer {\n handle._unlock()\n }\n\n return try unsafe body(&value._address.pointee)\n }\n\n /// Attempts to acquire the lock and then calls the given closure if\n /// successful.\n ///\n /// If the calling thread was successful in acquiring the lock, the\n /// closure will be executed and then immediately after it will\n /// release ownership of the lock. If we were unable to acquire the\n /// lock, this will return `nil`.\n ///\n /// This method is equivalent to the following sequence of code:\n ///\n /// guard mutex.tryLock() else {\n /// return nil\n /// }\n /// defer {\n /// mutex.unlock()\n /// }\n /// return try body(&value)\n ///\n /// - Warning: Recursive calls to `withLockIfAvailable` within the\n /// closure parameter has behavior that is platform dependent.\n /// Some platforms may choose to panic the process, deadlock,\n /// or leave this behavior unspecified. This will never\n /// reacquire the lock however.\n ///\n /// - Parameter body: A closure with a parameter of `Value`\n /// that has exclusive access to the value being stored within\n /// this mutex. This closure is considered the critical section\n /// as it will only be executed if the calling thread acquires\n /// the lock.\n ///\n /// - Returns: The return value, if any, of the `body` closure parameter\n /// or nil if the lock couldn't be acquired.\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public borrowing func withLockIfAvailable<Result: ~Copyable, E: Error>(\n _ body: (inout sending Value) throws(E) -> sending Result\n ) throws(E) -> sending Result? {\n guard handle._tryLock() else {\n return nil\n }\n\n defer {\n handle._unlock()\n }\n\n return unsafe try body(&value._address.pointee)\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension Mutex where Value == Void {\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public borrowing func _unsafeLock() {\n handle._lock()\n }\n\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public borrowing func _unsafeTryLock() -> Bool {\n handle._tryLock()\n }\n\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public borrowing func _unsafeUnlock() {\n handle._unlock()\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension _MutexHandle {\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n @unsafe\n public borrowing func unsafeLock() {\n _lock()\n }\n\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n @unsafe\n public borrowing func unsafeTryLock() -> Bool {\n _tryLock()\n }\n\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n @unsafe\n public borrowing func unsafeUnlock() {\n _unlock()\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_Synchronization_Mutex_Mutex.swift
cpp_apple_swift_stdlib_public_Synchronization_Mutex_Mutex.swift
Swift
5,957
0.95
0.070707
0.5
node-utils
372
2024-02-13T10:47:50.618382
Apache-2.0
false
4f5e7716793ad745eae0fdfa8d80ce16
//===----------------------------------------------------------------------===//\n//\n// This source file is part of the Swift Atomics open source project\n//\n// Copyright (c) 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\n/// A synchronization primitive that protects shared mutable state via\n/// mutual exclusion.\n///\n/// The `Mutex` type offers non-recursive exclusive access to the state\n/// it is protecting by blocking threads attempting to acquire the lock.\n/// Only one execution context at a time has access to the value stored\n/// within the `Mutex` allowing for exclusive access.\n///\n/// An example use of `Mutex` in a class used simultaneously by many\n/// threads protecting a `Dictionary` value:\n///\n/// class Manager {\n/// let cache = Mutex<[Key: Resource]>([:])\n///\n/// func saveResource(_ resource: Resource, as key: Key) {\n/// cache.withLock {\n/// $0[key] = resource\n/// }\n/// }\n/// }\n///\n@available(*, unavailable, message: "Mutex is not available on this platform")\n@frozen\n@_staticExclusiveOnly\npublic struct Mutex<Value: ~Copyable>: ~Copyable {}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_Synchronization_Mutex_MutexUnavailable.swift
cpp_apple_swift_stdlib_public_Synchronization_Mutex_MutexUnavailable.swift
Swift
1,413
0.95
0.135135
0.888889
node-utils
906
2025-06-08T14:26:49.729570
Apache-2.0
false
6e5007bb02729153fb91f6953749e26d
//===----------------------------------------------------------------------===//\n//\n// This source file is part of the Swift Atomics open source project\n//\n// Copyright (c) 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\n#if arch(arm) || arch(arm64) || arch(arm64_32)\n\n@inline(__always)\nvar _tries: Int {\n 100\n}\n\n#if arch(arm)\n\n// The following are acceptable operands to the aarch64 hint intrinsic from\n// 'llvm-project/llvm/lib/Target/ARM/ARMInstrInfo.td':\n//\n// `nop` = 0\n// `yield` = 1\n// `wfe` = 2\n// `wfi` = 3\n// `sev` = 4\n// `sevl` = 5\n//\n// There are others, but for the sake of spin loops, we only care about 'wfe'.\n@_extern(c, "llvm.arm.hint")\nfunc _hint(_: UInt32)\n\n#else\n\n// The following are acceptable operands to the aarch64 hint intrinsic from\n// 'llvm-project/llvm/lib/Target/AArch64/AArch64InstrInfo.td':\n//\n// `nop` = 0\n// `yield` = 1\n// `wfe` = 2\n// `wfi` = 3\n// `sev` = 4\n// `sevl` = 5\n//\n// There are others, but for the sake of spin loops, we only care about 'wfe'.\n@_extern(c, "llvm.aarch64.hint")\nfunc _hint(_: UInt32)\n\n#endif\n\n@inline(__always)\nfunc _wfe() {\n _hint(2)\n}\n\n#elseif arch(i386) || arch(x86_64)\n\n@inline(__always)\nvar _tries: Int {\n 1000\n}\n\n@_extern(c, "llvm.x86.sse2.pause")\nfunc _pause()\n\n#else\n\n@inline(__always)\nvar _tries: Int {\n 100\n}\n\n#endif\n\n@inline(__always)\nfunc _spinLoopHint() {\n#if arch(arm) || arch(arm64) || arch(arm64_32)\n _wfe()\n#elseif arch(i386) || arch(x86_64)\n _pause()\n#else\n // Just do a nop on architectures we don't know about.\n#endif\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_Synchronization_Mutex_SpinLoopHint.swift
cpp_apple_swift_stdlib_public_Synchronization_Mutex_SpinLoopHint.swift
Swift
1,821
0.8
0.08046
0.625
python-kit
29
2025-01-06T07:28:53.381410
BSD-3-Clause
false
e5499b97ac97a55159adfea904ba575e
//===----------------------------------------------------------------------===//\n//\n// This source file is part of the Swift Atomics open source project\n//\n// Copyright (c) 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\n// Note: All atomic accesses on Wasm are sequentially consistent regardless of\n// what ordering we tell LLVM to use.\n\n@_extern(c, "llvm.wasm.memory.atomic.wait32")\ninternal func _swift_stdlib_wait(\n on: UnsafePointer<UInt32>,\n expected: UInt32,\n timeout: Int64\n) -> UInt32\n\n@_extern(c, "llvm.wasm.memory.atomic.notify")\ninternal func _swift_stdlib_wake(on: UnsafePointer<UInt32>, count: UInt32) -> UInt32\n\nextension Atomic where Value == _MutexHandle.State {\n internal borrowing func _wait(expected: _MutexHandle.State) {\n #if _runtime(_multithreaded)\n _ = _swift_stdlib_wait(\n on: .init(_rawAddress),\n expected: expected.rawValue,\n\n // A timeout of < 0 means indefinitely.\n timeout: -1\n )\n #endif\n }\n\n internal borrowing func _wake() {\n #if _runtime(_multithreaded)\n // Only wake up 1 thread\n _ = _swift_stdlib_wake(on: .init(_rawAddress), count: 1)\n #endif\n }\n}\n\n@available(SwiftStdlib 6.0, *)\nextension _MutexHandle {\n @available(SwiftStdlib 6.0, *)\n @frozen\n @usableFromInline\n internal enum State: UInt32, AtomicRepresentable {\n case unlocked\n case locked\n case contended\n }\n}\n\n@available(SwiftStdlib 6.0, *)\n@frozen\n@_staticExclusiveOnly\npublic struct _MutexHandle: ~Copyable {\n @usableFromInline\n let storage: Atomic<State>\n\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public init() {\n storage = Atomic(.unlocked)\n }\n\n @available(SwiftStdlib 6.0, *)\n @usableFromInline\n internal borrowing func _lock() {\n // Note: We could probably merge this cas into a do/while style loop, but we\n // really want to perform the strong variant before attempting to do weak\n // ones in the loop.\n\n var (exchanged, state) = storage.compareExchange(\n expected: .unlocked,\n desired: .locked,\n successOrdering: .acquiring,\n failureOrdering: .relaxed\n )\n\n if _fastPath(exchanged) {\n // Locked!\n return\n }\n\n while !exchanged {\n // If we're not already contended, go ahead and transition the mutex state\n // into being contended. If when we do this that the value stored there\n // was unlocked, then we know we unintentionally acquired the lock. A\n // weird quirk that occurs if this happens is that we go directly from\n // .unlocked -> .contended when in fact the lock may not be contended.\n // We may be able to do another atomic access and change it to .locked if\n // acquired it, but it may cause more problems than just potentially\n // calling wake with no waiters.\n if state != .contended, storage.exchange(\n .contended,\n ordering: .acquiring\n ) == .unlocked {\n // Locked!\n return\n }\n\n // Block until unlock has been called. This will return early if the call\n // to unlock happened between attempting to acquire and attempting to\n // wait while nobody else managed to acquire it yet.\n storage._wait(expected: .contended)\n\n (exchanged, state) = storage.weakCompareExchange(\n expected: .unlocked,\n desired: .locked,\n successOrdering: .acquiring,\n failureOrdering: .relaxed\n )\n }\n\n // Locked!\n }\n\n @available(SwiftStdlib 6.0, *)\n @usableFromInline\n internal borrowing func _tryLock() -> Bool {\n storage.compareExchange(\n expected: .unlocked,\n desired: .locked,\n successOrdering: .acquiring,\n failureOrdering: .relaxed\n ).exchanged\n }\n\n @available(SwiftStdlib 6.0, *)\n @usableFromInline\n internal borrowing func _unlock() {\n // Transition our state from being either .locked or .contended to .unlocked.\n // At this point the mutex is freely acquirable. If the value that was\n // stored in the mutex was .locked, then no one else was waiting on this\n // mutex so we can just skip trying to wake up a thread.\n guard storage.exchange(.unlocked, ordering: .releasing) == .contended else {\n // Unlocked!\n return\n }\n\n // Otherwise, wake up our next lucky random thread to acquire the mutex.\n // (Assuming no new thread acquires the lock before it does)\n storage._wake()\n\n // Unlocked!\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_Synchronization_Mutex_WasmImpl.swift
cpp_apple_swift_stdlib_public_Synchronization_Mutex_WasmImpl.swift
Swift
4,669
0.8
0.077922
0.328358
python-kit
480
2025-02-14T12:21:37.191308
Apache-2.0
false
8500a57adc1912b9260d39dc18cc45b0
//===----------------------------------------------------------------------===//\n//\n// This source file is part of the Swift Atomics open source project\n//\n// Copyright (c) 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport WinSDK.core.synch\n\n@available(SwiftStdlib 6.0, *)\n@frozen\n@_staticExclusiveOnly\npublic struct _MutexHandle: ~Copyable {\n @usableFromInline\n let value: _Cell<SRWLOCK>\n\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n public init() {\n value = _Cell(SRWLOCK())\n }\n\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n internal borrowing func _lock() {\n AcquireSRWLockExclusive(value._address)\n }\n\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n internal borrowing func _tryLock() -> Bool {\n // Windows BOOLEAN gets imported as 'UInt8'...\n TryAcquireSRWLockExclusive(value._address) != 0\n }\n\n @available(SwiftStdlib 6.0, *)\n @_alwaysEmitIntoClient\n @_transparent\n internal borrowing func _unlock() {\n ReleaseSRWLockExclusive(value._address)\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_Synchronization_Mutex_WindowsImpl.swift
cpp_apple_swift_stdlib_public_Synchronization_Mutex_WindowsImpl.swift
Swift
1,369
0.95
0.04
0.272727
react-lib
582
2024-05-04T02:49:51.777775
Apache-2.0
false
9f5d6fd78727b7db08dac92b6c905743
//===----------------------------------------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2020 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport Swift\n\n/// A pointer for accessing "volatile" memory, e.g. memory-mapped I/O registers.\n///\n/// Do not use for inter-thread synchronization. This is only meaningful for\n/// low-level operations on special memory addresses performed from OS kernels,\n/// embedded firmware, and similar environments.\n///\n/// The semantics of volatile load and volatile store operations match the LLVM\n/// volatile semantics. Notably, a volatile operation cannot be added, removed,\n/// or reordered with other volatile operations by the compiler. They may be\n/// reordered with non-volatile operations. For details, see\n/// <https://llvm.org/docs/LangRef.html#volatile-memory-accesses>.\n@frozen\npublic struct VolatileMappedRegister<Pointee> {\n @usableFromInline\n let _rawPointer: Builtin.RawPointer\n\n @_transparent\n @unsafe\n public init(unsafeBitPattern: UInt) {\n self._rawPointer = Builtin.inttoptr_Word(unsafeBitPattern._builtinWordValue)\n }\n}\n\nextension VolatileMappedRegister where Pointee == UInt8 {\n /// Perform an 8-bit volatile load operation from the target pointer.\n ///\n /// Do not use for inter-thread synchronization.\n @_transparent\n public func load() -> Pointee { \n UInt8(Builtin.atomicload_monotonic_volatile_Int8(_rawPointer))\n }\n\n /// Perform an 8-bit volatile store operation on the target pointer.\n ///\n /// Do not use for inter-thread synchronization.\n @_transparent\n public func store(_ value: Pointee) {\n Builtin.atomicstore_monotonic_volatile_Int8(_rawPointer, value._value)\n }\n}\n\nextension VolatileMappedRegister where Pointee == UInt16 {\n /// Perform a 16-bit volatile load operation from the target pointer.\n ///\n /// Do not use for inter-thread synchronization.\n @_transparent\n public func load() -> Pointee {\n UInt16(Builtin.atomicload_monotonic_volatile_Int16(_rawPointer))\n }\n \n /// Perform a 16-bit volatile store operation on the target pointer.\n ///\n /// Do not use for inter-thread synchronization.\n @_transparent\n public func store(_ value: Pointee) {\n Builtin.atomicstore_monotonic_volatile_Int16(_rawPointer, value._value)\n }\n}\n\nextension VolatileMappedRegister where Pointee == UInt32 {\n /// Perform a 32-bit volatile load operation from the target pointer.\n ///\n /// Do not use for inter-thread synchronization.\n @_transparent\n public func load() -> Pointee {\n UInt32(Builtin.atomicload_monotonic_volatile_Int32(_rawPointer))\n }\n \n /// Perform a 32-bit volatile store operation on the target pointer.\n ///\n /// Do not use for inter-thread synchronization.\n @_transparent\n public func store(_ value: Pointee) {\n Builtin.atomicstore_monotonic_volatile_Int32(_rawPointer, value._value)\n }\n}\n\nextension VolatileMappedRegister where Pointee == UInt64 {\n /// Perform a 64-bit volatile load operation from the target pointer.\n ///\n /// Do not use for inter-thread synchronization.\n @_transparent\n public func load() -> Pointee {\n UInt64(Builtin.atomicload_monotonic_volatile_Int64(_rawPointer))\n }\n \n /// Perform a 64-bit volatile store operation on the target pointer.\n ///\n /// Do not use for inter-thread synchronization.\n @_transparent\n public func store(_ value: Pointee) {\n Builtin.atomicstore_monotonic_volatile_Int64(_rawPointer, value._value)\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_Volatile_Volatile.swift
cpp_apple_swift_stdlib_public_Volatile_Volatile.swift
Swift
3,775
0.95
0.12037
0.474227
node-utils
581
2023-09-18T18:57:23.364633
GPL-3.0
false
4b0493f6477951edd1ee1ae7f6520c13
//===----------------------------------------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2018 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\n@_exported import ucrt\n@_exported import WinSDK // Clang module\n\n// WinBase.h\n@inlinable\npublic var HANDLE_FLAG_INHERIT: DWORD {\n 0x00000001\n}\n\n// WinBase.h\n@inlinable\npublic var STARTF_USESTDHANDLES: DWORD {\n 0x00000100\n}\n\n// WinBase.h\n@inlinable\npublic var INFINITE: DWORD {\n DWORD(bitPattern: -1)\n}\n\n// WinBase.h\n@inlinable\npublic var WAIT_OBJECT_0: DWORD {\n 0\n}\n\n// WinBase.h\n@inlinable\npublic var STD_INPUT_HANDLE: DWORD {\n DWORD(bitPattern: -10)\n}\n\n@inlinable\npublic var STD_OUTPUT_HANDLE: DWORD {\n DWORD(bitPattern: -11)\n}\n\n@inlinable\npublic var STD_ERROR_HANDLE: DWORD {\n DWORD(bitPattern: -12)\n}\n\n// handleapi.h\n@inlinable\npublic var INVALID_HANDLE_VALUE: HANDLE {\n HANDLE(bitPattern: -1)!\n}\n\n// shellapi.h\n@inlinable\npublic var FOF_NO_UI: FILEOP_FLAGS {\n FILEOP_FLAGS(FOF_SILENT | FOF_NOCONFIRMATION | FOF_NOERRORUI | FOF_NOCONFIRMMKDIR)\n}\n\n// winioctl.h\n@inlinable\npublic var FSCTL_SET_REPARSE_POINT: DWORD {\n 0x900a4\n}\n\n@inlinable\npublic var FSCTL_GET_REPARSE_POINT: DWORD {\n 0x900a8\n}\n\n@inlinable\npublic var FSCTL_DELETE_REPARSE_POINT: DWORD {\n 0x900ac\n}\n\n// WinSock2.h\n@inlinable\npublic var INVALID_SOCKET: SOCKET {\n SOCKET(bitPattern: -1)\n}\n\n@inlinable\npublic var FIONBIO: Int32 {\n Int32(bitPattern: 0x8004667e)\n}\n\n// WinUser.h\n@inlinable\npublic var CW_USEDEFAULT: Int32 {\n Int32(bitPattern: 2147483648)\n}\n\n@inlinable\npublic var QS_MOUSE: UINT {\n UINT(QS_MOUSEMOVE | QS_MOUSEBUTTON)\n}\n\n@inlinable\npublic var QS_INPUT: UINT {\n QS_MOUSE | UINT(QS_KEY | QS_RAWINPUT | QS_TOUCH | QS_POINTER)\n}\n\n@inlinable\npublic var QS_ALLEVENTS: UINT {\n QS_INPUT | UINT(QS_POSTMESSAGE | QS_TIMER | QS_PAINT | QS_HOTKEY)\n}\n\n@inlinable\npublic var QS_ALLINPUT: UINT {\n QS_INPUT | UINT(QS_POSTMESSAGE | QS_TIMER | QS_PAINT | QS_HOTKEY | QS_SENDMESSAGE)\n}\n\n@inlinable\npublic var WS_OVERLAPPEDWINDOW: UINT {\n UINT(WS_OVERLAPPED | WS_CAPTION | WS_SYSMENU | WS_THICKFRAME | WS_MINIMIZEBOX | WS_MAXIMIZEBOX)\n}\n\n@inlinable\npublic var WS_POPUPWINDOW: UINT {\n UINT(numericCast(WS_POPUP) | WS_BORDER | WS_SYSMENU)\n}\n\n// fileapi.h\n@inlinable\npublic var INVALID_FILE_ATTRIBUTES: DWORD {\n DWORD(bitPattern: -1)\n}\n\n// CommCtrl.h\npublic let WC_BUTTONW: [WCHAR] = Array<WCHAR>("Button".utf16)\npublic let WC_COMBOBOXW: [WCHAR] = Array<WCHAR>("ComboBox".utf16)\npublic let WC_EDITW: [WCHAR] = Array<WCHAR>("Edit".utf16)\npublic let WC_HEADERW: [WCHAR] = Array<WCHAR>("SysHeader32".utf16)\npublic let WC_LISTBOXW: [WCHAR] = Array<WCHAR>("ListBox".utf16)\npublic let WC_LISTVIEWW: [WCHAR] = Array<WCHAR>("SysListView32".utf16)\npublic let WC_SCROLLBARW: [WCHAR] = Array<WCHAR>("ScrollBar".utf16)\npublic let WC_STATICW: [WCHAR] = Array<WCHAR>("Static".utf16)\npublic let WC_TABCONTROLW: [WCHAR] = Array<WCHAR>("SysTabControl32".utf16)\npublic let WC_TREEVIEWW: [WCHAR] = Array<WCHAR>("SysTreeView32".utf16)\n\npublic let ANIMATE_CLASSW: [WCHAR] = Array<WCHAR>("SysAnimate32".utf16)\npublic let HOTKEY_CLASSW: [WCHAR] = Array<WCHAR>("msctls_hotkey32".utf16)\npublic let PROGRESS_CLASSW: [WCHAR] = Array<WCHAR>("msctls_progress32".utf16)\npublic let STATUSCLASSNAMEW: [WCHAR] = Array<WCHAR>("msctls_statusbar32".utf16)\npublic let TOOLBARW_CLASSW: [WCHAR] = Array<WCHAR>("ToolbarWindow32".utf16)\npublic let TRACKBAR_CLASSW: [WCHAR] = Array<WCHAR>("msctls_trackbar32".utf16)\npublic let UPDOWN_CLASSW: [WCHAR] = Array<WCHAR>("msctls_updown32".utf16)\n\n// consoleapi.h\n@inlinable\npublic var PROC_THREAD_ATTRIBUTE_PSEUDOCONSOLE: DWORD_PTR {\n 0x00020016\n}\n\n// windef.h\n@inlinable\npublic var DPI_AWARENESS_CONTEXT_UNAWARE: DPI_AWARENESS_CONTEXT {\n DPI_AWARENESS_CONTEXT(bitPattern: -1)!\n}\n\n@inlinable\npublic var DPI_AWARENESS_CONTEXT_SYSTEM_AWARE: DPI_AWARENESS_CONTEXT {\n DPI_AWARENESS_CONTEXT(bitPattern: -2)!\n}\n\n@inlinable\npublic var DPI_AWARENESS_CONTEXT_PER_MONITOR_AWARE: DPI_AWARENESS_CONTEXT {\n DPI_AWARENESS_CONTEXT(bitPattern: -3)!\n}\n\n@inlinable\npublic var DPI_AWARENESS_CONTEXT_PER_MONITOR_AWARE_V2: DPI_AWARENESS_CONTEXT {\n DPI_AWARENESS_CONTEXT(bitPattern: -4)!\n}\n\n@inlinable\npublic var DPI_AWARENESS_CONTEXT_UNAWARE_GDISCALED: DPI_AWARENESS_CONTEXT {\n DPI_AWARENESS_CONTEXT(bitPattern: -5)!\n}\n\n// winreg.h\n@inlinable\npublic var HKEY_CLASSES_ROOT: HKEY {\n HKEY(bitPattern: UInt(0x80000000))!\n}\n\n@inlinable\npublic var HKEY_CURRENT_USER: HKEY {\n HKEY(bitPattern: UInt(0x80000001))!\n}\n\n@inlinable\npublic var HKEY_LOCAL_MACHINE: HKEY {\n HKEY(bitPattern: UInt(0x80000002))!\n}\n\n@inlinable\npublic var HKEY_USERS: HKEY {\n HKEY(bitPattern: UInt(0x80000003))!\n}\n\n@inlinable\npublic var HKEY_PERFORMANCE_DATA: HKEY {\n HKEY(bitPattern: UInt(0x80000004))!\n}\n\n@inlinable\npublic var HKEY_PERFORMANCE_TEXT: HKEY {\n HKEY(bitPattern: UInt(0x80000050))!\n}\n\n@inlinable\npublic var HKEY_PERFORMANCE_NLSTEXT: HKEY {\n HKEY(bitPattern: UInt(0x80000060))!\n}\n\n@inlinable\npublic var HKEY_CURRENT_CONFIG: HKEY {\n HKEY(bitPattern: UInt(0x80000005))!\n}\n\n@inlinable\npublic var HKEY_DYN_DATA: HKEY {\n HKEY(bitPattern: UInt(0x80000006))!\n}\n\n@inlinable\npublic var HKEY_CURRENT_USER_LOCAL_SETTINGS: HKEY {\n HKEY(bitPattern: UInt(0x80000007))!\n}\n\n// Richedit.h\npublic let MSFTEDIT_CLASS: [WCHAR] = Array<WCHAR>("RICHEDIT50W".utf16)\n\n// Swift Convenience\npublic extension FILETIME {\n var time_t: time_t {\n let NTTime: Int64 = Int64(self.dwLowDateTime) | (Int64(self.dwHighDateTime) << 32)\n return (NTTime - 116444736000000000) / 10000000\n }\n\n init(from time: time_t) {\n let UNIXTime: Int64 = ((time * 10000000) + 116444736000000000)\n self = FILETIME(dwLowDateTime: DWORD(UNIXTime & 0xffffffff),\n dwHighDateTime: DWORD((UNIXTime >> 32) & 0xffffffff))\n }\n}\n\n// WindowsBool\n\n/// The `BOOL` type declared in WinDefs.h and used throughout WinSDK\n///\n/// The C type is a typedef for `int`.\n@frozen\npublic struct WindowsBool: ExpressibleByBooleanLiteral {\n @usableFromInline\n var _value: Int32\n\n /// The value of `self`, expressed as a `Bool`.\n @_transparent\n public var boolValue: Bool {\n return !(_value == 0)\n }\n\n @_transparent\n public init(booleanLiteral value: Bool) {\n self.init(value)\n }\n\n /// Create an instance initialized to `value`.\n @_transparent\n public init(_ value: Bool) {\n self._value = value ? 1 : 0\n }\n}\n\n#if SWIFT_ENABLE_REFLECTION\nextension WindowsBool: CustomReflectable {\n /// Returns a mirror that reflects `self`.\n public var customMirror: Mirror {\n return Mirror(reflecting: boolValue)\n }\n}\n#endif\n\n@_unavailableInEmbedded\nextension WindowsBool: CustomStringConvertible {\n /// A textual representation of `self`.\n public var description: String {\n return self.boolValue.description\n }\n}\n\nextension WindowsBool: Equatable {\n @_transparent\n public static func ==(lhs: WindowsBool, rhs: WindowsBool) -> Bool {\n return lhs.boolValue == rhs.boolValue\n }\n}\n\n@_transparent\npublic // COMPILER_INTRINSIC\nfunc _convertBoolToWindowsBool(_ b: Bool) -> WindowsBool {\n return WindowsBool(b)\n}\n\n@_transparent\npublic // COMPILER_INTRINSIC\nfunc _convertWindowsBoolToBool(_ b: WindowsBool) -> Bool {\n return b.boolValue\n}\n\n
dataset_sample\swift\swift\cpp_apple_swift_stdlib_public_Windows_WinSDK.swift
cpp_apple_swift_stdlib_public_Windows_WinSDK.swift
Swift
7,483
0.95
0.0125
0.143396
node-utils
172
2023-10-01T09:45:37.924547
GPL-3.0
false
9c4a9de700f5c49b744d53c5497ddb7f
//===--- FakeStdlib.swift -------------------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2025 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// A handful of standard library stubs to allow Span.swift and RawSpan.swift\n// to be compiled as part of the compatibility shim.\n//\n//===----------------------------------------------------------------------===//\n\nimport Swift\n\n@_alwaysEmitIntoClient @_transparent\ninternal func _precondition(\n _ condition: @autoclosure () -> Bool, _ message: StaticString = StaticString(),\n file: StaticString = #file, line: UInt = #line\n) {\n fatalError()\n}\n\n@_alwaysEmitIntoClient @_transparent\ninternal func _internalInvariantFailure(\n _ message: StaticString = StaticString(),\n file: StaticString = #file, line: UInt = #line\n) -> Never {\n fatalError()\n}\n\n@unsafe\n@_unsafeNonescapableResult\n@_alwaysEmitIntoClient\n@_transparent\n@lifetime(borrow source)\ninternal func _overrideLifetime<\n T: ~Copyable & ~Escapable, U: ~Copyable & ~Escapable\n>(\n _ dependent: consuming T, borrowing source: borrowing U\n) -> T {\n dependent\n}\n\n@unsafe\n@_unsafeNonescapableResult\n@_alwaysEmitIntoClient\n@_transparent\n@lifetime(copy source)\ninternal func _overrideLifetime<\n T: ~Copyable & ~Escapable, U: ~Copyable & ~Escapable\n>(\n _ dependent: consuming T, copying source: borrowing U\n) -> T {\n dependent\n}\n\n@unsafe\n@_unsafeNonescapableResult\n@_alwaysEmitIntoClient\n@_transparent\n@lifetime(&source)\ninternal func _overrideLifetime<\n T: ~Copyable & ~Escapable, U: ~Copyable & ~Escapable\n>(\n _ dependent: consuming T,\n mutating source: inout U\n) -> T {\n dependent\n}\n\nextension Range {\n @_alwaysEmitIntoClient\n internal init(_uncheckedBounds bounds: (lower: Bound, upper: Bound)) {\n self.init(uncheckedBounds: bounds)\n }\n}\n\nextension Optional {\n @_alwaysEmitIntoClient\n internal var _unsafelyUnwrappedUnchecked: Wrapped {\n get {\n if let x = self {\n return x\n }\n _internalInvariantFailure("_unsafelyUnwrappedUnchecked of nil optional")\n }\n }\n}
dataset_sample\swift\swift\cpp_apple_swift_stdlib_toolchain_CompatibilitySpan_FakeStdlib.swift
cpp_apple_swift_stdlib_toolchain_CompatibilitySpan_FakeStdlib.swift
Swift
2,372
0.95
0.032609
0.188235
vue-tools
118
2023-11-29T18:05:46.331991
MIT
false
10b06ea0526fc5ce5afa78aa23f7a0de
// swift-tools-version:5.9\n//===--- Package.swift.in - SwiftCompiler SwiftPM package -----------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2021 - 2022 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\n// To successfully build, you'll need to create a couple of symlinks to an\n// existing Ninja build:\n//\n// ln -s <project-root>/build/<Ninja-Build>/llvm-<os+arch> <project-root>/build/Default/llvm\n// ln -s <project-root>/build/<Ninja-Build>/swift-<os+arch> <project-root>/build/Default/swift\n//\n// where <project-root> is the parent directory of the swift repository.\n//\n// FIXME: We may want to consider generating Package.swift as a part of the\n// build.\n\nimport PackageDescription\n\nprivate extension Target {\n static func compilerModuleTarget(\n name: String,\n dependencies: [Dependency],\n path: String? = nil,\n sources: [String]? = nil,\n swiftSettings: [SwiftSetting] = []) -> Target {\n .target(\n name: name,\n dependencies: dependencies,\n path: path ?? "Sources/\(name)",\n exclude: ["CMakeLists.txt"],\n sources: sources,\n swiftSettings: [\n .interoperabilityMode(.Cxx),\n .unsafeFlags([\n "-static",\n "-Xcc", "-DCOMPILED_WITH_SWIFT", "-Xcc", "-DPURE_BRIDGING_MODE",\n "-Xcc", "-UIBOutlet", "-Xcc", "-UIBAction", "-Xcc", "-UIBInspectable",\n "-Xcc", "-I../include",\n "-Xcc", "-I../../llvm-project/llvm/include",\n "-Xcc", "-I../../llvm-project/clang/include",\n "-Xcc", "-I../../build/Default/swift/include",\n "-Xcc", "-I../../build/Default/llvm/include",\n "-Xcc", "-I../../build/Default/llvm/tools/clang/include",\n "-cross-module-optimization",\n ]),\n ] + swiftSettings)\n }\n}\n\nlet package = Package(\n name: "SwiftCompilerSources",\n platforms: [\n .macOS(.v13),\n ],\n products: [\n .library(\n name: "swiftCompilerModules",\n type: .static,\n targets: ["Basic", "AST", "SIL", "Optimizer"]),\n ],\n dependencies: [\n ],\n // Note that targets and their dependencies must align with\n // 'SwiftCompilerSources/Sources/CMakeLists.txt'\n targets: [\n .compilerModuleTarget(\n name: "Basic",\n dependencies: []),\n .compilerModuleTarget(\n name: "AST",\n dependencies: ["Basic"]),\n .compilerModuleTarget(\n name: "SIL",\n dependencies: ["Basic", "AST"]),\n .compilerModuleTarget(\n name: "Optimizer",\n dependencies: ["Basic", "AST", "SIL"]),\n ],\n cxxLanguageStandard: .cxx17\n)\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Package.swift
cpp_apple_swift_SwiftCompilerSources_Package.swift
Swift
2,885
0.95
0.022727
0.285714
awesome-app
112
2024-07-03T18:14:38.592676
MIT
false
67324687d5663b1d1cda7ba5297eb048
//===--- Conformance.swift ------------------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport Basic\nimport ASTBridging\n\n/// Describes how a particular type conforms to a given protocol, providing the mapping from the protocol\n/// members to the type (or extension) members that provide the functionality for the concrete type.\n///\n/// TODO: Ideally, `Conformance` should be an enum\npublic struct Conformance: CustomStringConvertible, Hashable, NoReflectionChildren {\n public let bridged: BridgedConformance\n\n public init(bridged: BridgedConformance) {\n self.bridged = bridged\n }\n\n public var description: String {\n return String(taking: bridged.getDebugDescription())\n }\n\n public func hash(into hasher: inout Hasher) {\n hasher.combine(bridged.opaqueValue)\n }\n\n public static func ==(lhs: Conformance, rhs: Conformance) -> Bool {\n lhs.bridged.opaqueValue == rhs.bridged.opaqueValue\n }\n\n public var isConcrete: Bool { bridged.isConcrete() }\n\n public var isValid: Bool { bridged.isValid() }\n\n public var type: Type {\n assert(isConcrete)\n return Type(bridged: bridged.getType())\n }\n\n public var `protocol`: ProtocolDecl {\n return bridged.getRequirement().getAs(ProtocolDecl.self)\n }\n public var isSpecialized: Bool {\n assert(isConcrete)\n return bridged.isSpecializedConformance()\n }\n\n public var genericConformance: Conformance {\n assert(isSpecialized)\n return bridged.getGenericConformance().conformance\n }\n\n public var isInherited: Bool {\n assert(isConcrete)\n return bridged.isInheritedConformance()\n }\n\n public var inheritedConformance: Conformance {\n assert(isInherited)\n return bridged.getInheritedConformance().conformance\n }\n\n public var rootConformance: Conformance {\n if isInherited {\n return inheritedConformance.rootConformance\n }\n if isSpecialized {\n return genericConformance\n }\n return self\n }\n\n public var specializedSubstitutions: SubstitutionMap {\n assert(isSpecialized)\n return SubstitutionMap(bridged: bridged.getSpecializedSubstitutions())\n }\n\n public func getAssociatedConformance(ofAssociatedType assocType: Type, to proto: ProtocolDecl) -> Conformance {\n assert(isConcrete)\n return bridged.getAssociatedConformance(assocType.bridged, proto.bridged).conformance\n }\n}\n\npublic struct ConformanceArray : RandomAccessCollection, CustomReflectable {\n public let bridged: BridgedConformanceArray\n\n public var startIndex: Int { return 0 }\n public var endIndex: Int { return bridged.getCount() }\n\n public init(bridged: BridgedConformanceArray) {\n self.bridged = bridged\n }\n\n public subscript(_ index: Int) -> Conformance {\n bridged.getAt(index).conformance\n }\n\n public var customMirror: Mirror {\n let c: [Mirror.Child] = map { (label: nil, value: $0) }\n return Mirror(self, children: c)\n }\n}\n\nextension BridgedConformance {\n public var conformance: Conformance { Conformance(bridged: self) }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_AST_Conformance.swift
cpp_apple_swift_SwiftCompilerSources_Sources_AST_Conformance.swift
Swift
3,371
0.95
0.04386
0.163043
node-utils
829
2025-01-30T12:02:48.670775
BSD-3-Clause
false
6ba86c317b7bf9a29bf6510ad93e6b2f
//===--- Declarations.swift -----------------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport Basic\nimport ASTBridging\n\n/// The base class for all declarations in Swift.\n@_semantics("arc.immortal")\npublic class Decl: CustomStringConvertible, Hashable {\n public var bridged: BridgedDeclObj { BridgedDeclObj(SwiftObject(self)) }\n\n public var description: String { String(taking: bridged.getDebugDescription()) }\n\n /// The module in which this declaration resides.\n public var parentModule: ModuleDecl { bridged.getModuleContext().getAs(ModuleDecl.self) }\n\n /// The parent DeclContext if it is a Decl.\n public var parent: Decl? { bridged.getParent().decl }\n\n // True if this declaration is imported from C/C++/ObjC.\n public var hasClangNode: Bool { bridged.hasClangNode() }\n\n public static func ==(lhs: Decl, rhs: Decl) -> Bool { lhs === rhs }\n\n public func hash(into hasher: inout Hasher) {\n hasher.combine(ObjectIdentifier(self))\n }\n}\n\npublic class ValueDecl: Decl {\n final public var nameLoc: SourceLoc? { SourceLoc(bridged: bridged.Value_getNameLoc()) }\n final public var userFacingName: StringRef { StringRef(bridged: bridged.Value_getUserFacingName()) }\n final public var isObjC: Bool { bridged.Value_isObjC() }\n}\n\npublic class TypeDecl: ValueDecl {\n final public var name: StringRef { StringRef(bridged: bridged.Type_getName()) }\n}\n\npublic class GenericTypeDecl: TypeDecl {\n final public var isGenericAtAnyLevel: Bool { bridged.GenericType_isGenericAtAnyLevel() }\n}\n\npublic class NominalTypeDecl: GenericTypeDecl {\n final public var isGlobalActor: Bool { bridged.NominalType_isGlobalActor() }\n\n final public var valueTypeDestructor: DestructorDecl? {\n bridged.NominalType_getValueTypeDestructor().getAs(DestructorDecl.self)\n }\n}\n\nfinal public class EnumDecl: NominalTypeDecl {}\n\nfinal public class StructDecl: NominalTypeDecl {\n public var hasUnreferenceableStorage: Bool { bridged.Struct_hasUnreferenceableStorage() }\n}\n\nfinal public class ClassDecl: NominalTypeDecl {\n public var superClass: Type? { Type(bridgedOrNil: bridged.Class_getSuperclass()) }\n\n final public var destructor: DestructorDecl {\n bridged.Class_getDestructor().getAs(DestructorDecl.self)\n }\n}\n\nfinal public class ProtocolDecl: NominalTypeDecl {\n public var requiresClass: Bool { bridged.ProtocolDecl_requiresClass() }\n}\n\nfinal public class BuiltinTupleDecl: NominalTypeDecl {}\n\nfinal public class OpaqueTypeDecl: GenericTypeDecl {}\n\nfinal public class TypeAliasDecl: GenericTypeDecl {}\n\nfinal public class GenericTypeParamDecl: TypeDecl {}\n\nfinal public class AssociatedTypeDecl: TypeDecl {}\n\nfinal public class ModuleDecl: TypeDecl {}\n\npublic class AbstractStorageDecl: ValueDecl {\n final public var isConst: Bool { bridged.AbstractStorage_isConst() }\n}\n\npublic class VarDecl: AbstractStorageDecl {}\n\nfinal public class ParamDecl: VarDecl {}\n\nfinal public class SubscriptDecl: AbstractStorageDecl {}\n\npublic class AbstractFunctionDecl: ValueDecl {\n public var isOverridden: Bool { bridged.AbstractFunction_isOverridden() }\n}\n\nfinal public class ConstructorDecl: AbstractFunctionDecl {}\n\nfinal public class DestructorDecl: AbstractFunctionDecl {\n final public var isIsolated: Bool { bridged.Destructor_isIsolated() }\n}\n\npublic class FuncDecl: AbstractFunctionDecl {}\n\nfinal public class AccessorDecl: FuncDecl {}\n\nfinal public class MacroDecl: ValueDecl {}\n\nfinal public class EnumElementDecl: ValueDecl {}\n\nfinal public class ExtensionDecl: Decl {}\n\nfinal public class TopLevelCodeDecl: Decl {}\n\nfinal public class ImportDecl: Decl {}\n\nfinal public class PrecedenceGroupDecl: Decl {}\n\nfinal public class MissingDecl: Decl {}\n\nfinal public class MissingMemberDecl: Decl {}\n\nfinal public class PatternBindingDecl: Decl {}\n\nfinal public class EnumCaseDecl: Decl {}\n\npublic class OperatorDecl: Decl {}\n\nfinal public class InfixOperatorDecl: OperatorDecl {}\n\nfinal public class PrefixOperatorDecl: OperatorDecl {}\n\nfinal public class PostfixOperatorDecl: OperatorDecl {}\n\nfinal public class MacroExpansionDecl: Decl {}\n\n// Bridging utilities\n\nextension BridgedDeclObj {\n public var decl: Decl { obj.getAs(Decl.self) }\n public func getAs<T: Decl>(_ declType: T.Type) -> T { obj.getAs(T.self) }\n}\n\nextension OptionalBridgedDeclObj {\n public var decl: Decl? { obj.getAs(Decl.self) }\n public func getAs<T: Decl>(_ declType: T.Type) -> T? { obj.getAs(T.self) }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_AST_Declarations.swift
cpp_apple_swift_SwiftCompilerSources_Sources_AST_Declarations.swift
Swift
4,803
0.95
0.290323
0.153846
vue-tools
9
2023-11-19T16:48:08.531016
MIT
false
ac001b98ac60e5627ff9ac115f00e927
//===--- DiagnosticEngine.swift -------------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2022 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport ASTBridging\n\nimport Basic\n\npublic typealias DiagID = BridgedDiagID\n\npublic protocol DiagnosticArgument {\n func _withBridgedDiagnosticArgument(_ fn: (BridgedDiagnosticArgument) -> Void)\n}\nextension String: DiagnosticArgument {\n public func _withBridgedDiagnosticArgument(_ fn: (BridgedDiagnosticArgument) -> Void) {\n _withBridgedStringRef { fn(BridgedDiagnosticArgument($0)) }\n }\n}\nextension StringRef: DiagnosticArgument {\n public func _withBridgedDiagnosticArgument(_ fn: (BridgedDiagnosticArgument) -> Void) {\n fn(BridgedDiagnosticArgument(_bridged))\n }\n}\nextension Int: DiagnosticArgument {\n public func _withBridgedDiagnosticArgument(_ fn: (BridgedDiagnosticArgument) -> Void) {\n fn(BridgedDiagnosticArgument(self))\n }\n}\nextension Type: DiagnosticArgument {\n public func _withBridgedDiagnosticArgument(_ fn: (BridgedDiagnosticArgument) -> Void) {\n fn(bridged.asDiagnosticArgument())\n }\n}\n\npublic struct DiagnosticFixIt {\n let start: SourceLoc\n let byteLength: Int\n let text: String\n\n init(start: SourceLoc, byteLength: Int, replacement text: String) {\n self.start = start\n self.byteLength = byteLength\n self.text = text\n }\n\n func withBridgedDiagnosticFixIt(_ fn: (BridgedDiagnosticFixIt) -> Void) {\n text._withBridgedStringRef { bridgedTextRef in\n let bridgedDiagnosticFixIt = BridgedDiagnosticFixIt(\n start.bridged, UInt32(byteLength),\n bridgedTextRef)\n fn(bridgedDiagnosticFixIt)\n }\n }\n}\n\npublic struct DiagnosticEngine {\n private let bridged: BridgedDiagnosticEngine\n\n public init(bridged: BridgedDiagnosticEngine) {\n self.bridged = bridged\n }\n init?(bridged: BridgedNullableDiagnosticEngine) {\n guard let raw = bridged.raw else {\n return nil\n }\n self.bridged = BridgedDiagnosticEngine(raw: raw)\n }\n\n public func diagnose(_ id: DiagID,\n _ args: [DiagnosticArgument],\n at position: SourceLoc?,\n highlight: CharSourceRange? = nil,\n fixIts: [DiagnosticFixIt] = []) {\n\n let bridgedSourceLoc: BridgedSourceLoc = position.bridged\n let highlightStart: BridgedSourceLoc\n let highlightLength: UInt32\n if let highlight = highlight {\n highlightStart = highlight.start.bridged\n highlightLength = highlight.byteLength\n } else {\n highlightStart = BridgedSourceLoc()\n highlightLength = 0\n }\n var bridgedArgs: [BridgedDiagnosticArgument] = []\n var bridgedFixIts: [BridgedDiagnosticFixIt] = []\n\n // Build a higher-order function to wrap every 'withBridgedXXX { ... }'\n // calls, so we don't escape anything from the closure. 'bridgedArgs' and\n // 'bridgedFixIts' are temporary storage to store bridged values. So they\n // should not be used after the closure is executed.\n \n var closure: () -> Void = {\n bridgedArgs.withBridgedArrayRef { bridgedArgsRef in\n bridgedFixIts.withBridgedArrayRef { bridgedFixItsRef in\n bridged.diagnose(at: bridgedSourceLoc, id, bridgedArgsRef,\n highlightAt: highlightStart,\n highlightLength: highlightLength,\n fixIts: bridgedFixItsRef)\n }\n }\n }\n // 'reversed()' because the closure should be wrapped in that order.\n for arg in args.reversed() {\n closure = { [closure, arg] in\n arg._withBridgedDiagnosticArgument { bridgedArg in\n bridgedArgs.append(bridgedArg)\n closure()\n }\n }\n }\n // 'reversed()' because the closure should be wrapped in that order.\n for fixIt in fixIts.reversed() {\n closure = { [closure, fixIt] in\n fixIt.withBridgedDiagnosticFixIt { bridgedFixIt in\n bridgedFixIts.append(bridgedFixIt)\n closure()\n }\n }\n }\n\n closure()\n }\n\n public func diagnose(_ id: DiagID,\n _ args: DiagnosticArgument...,\n at position: SourceLoc?,\n highlight: CharSourceRange? = nil,\n fixIts: DiagnosticFixIt...) {\n diagnose(id, args, at: position, highlight: highlight, fixIts: fixIts)\n }\n\n public func diagnose(_ diagnostic: Diagnostic) {\n diagnose(diagnostic.id, diagnostic.arguments, at: diagnostic.position)\n }\n}\n\n/// A utility struct which allows throwing a Diagnostic.\npublic struct Diagnostic : Error {\n public let id: DiagID\n public let arguments: [DiagnosticArgument]\n public let position: SourceLoc?\n\n public init(_ id: DiagID, _ arguments: DiagnosticArgument..., at position: SourceLoc?) {\n self.init(id, arguments, at: position)\n }\n\n public init(_ id: DiagID, _ arguments: [DiagnosticArgument], at position: SourceLoc?) {\n self.id = id\n self.arguments = arguments\n self.position = position\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_AST_DiagnosticEngine.swift
cpp_apple_swift_SwiftCompilerSources_Sources_AST_DiagnosticEngine.swift
Swift
5,308
0.95
0.037267
0.126761
react-lib
255
2025-05-02T03:23:43.085736
Apache-2.0
false
ea441ea34fbccc1a1e7512e796231a02
//===--- GenericSignature.swift -------------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2025 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport Basic\nimport ASTBridging\n\n/// Describes the generic signature of a particular declaration, including both the generic type\n/// parameters and the requirements placed on those generic parameters.\npublic struct GenericSignature: CustomStringConvertible, NoReflectionChildren {\n public let bridged: BridgedGenericSignature\n\n public init(bridged: BridgedGenericSignature) {\n self.bridged = bridged\n }\n \n public var description: String {\n return String(taking: bridged.getDebugDescription())\n }\n\n public var genericParameters: TypeArray {\n TypeArray(bridged: bridged.getGenericParams())\n }\n\n public func mapTypeIntoContext(_ type: Type) -> Type {\n Type(bridged: bridged.mapTypeIntoContext(type.bridged))\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_AST_GenericSignature.swift
cpp_apple_swift_SwiftCompilerSources_Sources_AST_GenericSignature.swift
Swift
1,253
0.95
0.055556
0.433333
awesome-app
555
2024-06-09T15:10:38.217078
BSD-3-Clause
false
a0dc8a374006f65b0cf66e9ce53a9898
//===--- Registration.swift -----------------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport Basic\nimport ASTBridging\n\npublic func registerAST() {\n registerDecl(EnumDecl.self)\n registerDecl(StructDecl.self)\n registerDecl(ClassDecl.self)\n registerDecl(ProtocolDecl.self)\n registerDecl(BuiltinTupleDecl.self)\n registerDecl(OpaqueTypeDecl.self)\n registerDecl(TypeAliasDecl.self)\n registerDecl(GenericTypeParamDecl.self)\n registerDecl(AssociatedTypeDecl.self)\n registerDecl(ModuleDecl.self)\n registerDecl(VarDecl.self)\n registerDecl(ParamDecl.self)\n registerDecl(SubscriptDecl.self)\n registerDecl(ConstructorDecl.self)\n registerDecl(DestructorDecl.self)\n registerDecl(FuncDecl.self)\n registerDecl(AccessorDecl.self)\n registerDecl(MacroDecl.self)\n registerDecl(EnumElementDecl.self)\n registerDecl(ExtensionDecl.self)\n registerDecl(TopLevelCodeDecl.self)\n registerDecl(ImportDecl.self)\n registerDecl(PrecedenceGroupDecl.self)\n registerDecl(MissingDecl.self)\n registerDecl(MissingMemberDecl.self)\n registerDecl(PatternBindingDecl.self)\n registerDecl(EnumCaseDecl.self)\n registerDecl(InfixOperatorDecl.self)\n registerDecl(PrefixOperatorDecl.self)\n registerDecl(PostfixOperatorDecl.self)\n registerDecl(MacroExpansionDecl.self)\n}\n\nprivate func registerDecl<T: AnyObject>(_ cl: T.Type) {\n "\(cl)"._withBridgedStringRef { nameStr in\n let metatype = unsafeBitCast(cl, to: SwiftMetatype.self)\n registerBridgedDecl(nameStr, metatype)\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_AST_Registration.swift
cpp_apple_swift_SwiftCompilerSources_Sources_AST_Registration.swift
Swift
1,897
0.95
0.036364
0.211538
react-lib
440
2025-03-15T16:56:09.660520
GPL-3.0
false
b8f8a339d6925f8e84625821991b60fa
//===--- SubstitutionMap.swift --------------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport Basic\nimport ASTBridging\n\n/// SubstitutionMap describes the mapping of abstract types to replacement types,\n/// together with associated conformances to use for deriving nested types and conformances.\n///\n/// Substitution maps are primarily used when performing substitutions into any entity that\n/// can reference type parameters and conformances.\npublic struct SubstitutionMap: CustomStringConvertible, NoReflectionChildren {\n public let bridged: BridgedSubstitutionMap\n\n public init(bridged: BridgedSubstitutionMap) {\n self.bridged = bridged\n }\n\n public init() {\n self.bridged = BridgedSubstitutionMap()\n }\n\n public init(genericSignature: GenericSignature, replacementTypes: [Type]) {\n let bridgedReplTypes = replacementTypes.map { $0.bridged }\n self.bridged = bridgedReplTypes.withBridgedArrayRef {\n return BridgedSubstitutionMap.get(genericSignature.bridged, $0)\n }\n }\n\n public var description: String {\n return String(taking: bridged.getDebugDescription())\n }\n\n public var isEmpty: Bool { bridged.isEmpty() }\n\n public var hasAnySubstitutableParams: Bool { bridged.hasAnySubstitutableParams() }\n\n public var conformances: ConformanceArray { ConformanceArray(substitutionMap: self) }\n\n public struct ConformanceArray : BridgedRandomAccessCollection {\n fileprivate let bridgedSubs: BridgedSubstitutionMap\n public let count: Int\n\n init(substitutionMap: SubstitutionMap) {\n self.bridgedSubs = substitutionMap.bridged\n self.count = substitutionMap.bridged.getNumConformances()\n }\n\n public var startIndex: Int { return 0 }\n public var endIndex: Int { return count }\n\n public subscript(_ index: Int) -> Conformance {\n assert(index >= startIndex && index < endIndex)\n return Conformance(bridged: bridgedSubs.getConformance(index))\n }\n }\n\n public var replacementTypes: TypeArray {\n TypeArray(bridged: bridged.getReplacementTypes())\n }\n\n /// The single replacement type if it's guarnateed that the substitution map has a single replacement type.\n public var replacementType: Type {\n assert(replacementTypes.count == 1)\n return replacementTypes[0]\n }\n\n public static func ==(lhs: SubstitutionMap, rhs: SubstitutionMap) -> Bool {\n lhs.bridged.isEqualTo(rhs.bridged)\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_AST_SubstitutionMap.swift
cpp_apple_swift_SwiftCompilerSources_Sources_AST_SubstitutionMap.swift
Swift
2,791
0.95
0.05
0.265625
node-utils
697
2023-08-14T10:42:49.549009
BSD-3-Clause
false
b71475157b06763e0723d0c5e27e1ae1
//===--- Type.swift -------------------------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport Basic\nimport ASTBridging\n\n/// A Swift type.\n/// It is not necessarily canoncial, e.g. typealiases are not resolved.\npublic struct Type: TypeProperties, CustomStringConvertible, NoReflectionChildren {\n public enum TraitResult {\n case isNot\n case canBe\n case `is`\n }\n\n public enum MetatypeRepresentation {\n case thin\n case thick\n case objC\n };\n\n public let bridged: BridgedASTType\n\n // Needed to conform to TypeProperties\n public var rawType: Type { self }\n\n public init?(bridgedOrNil: BridgedASTType) {\n if bridgedOrNil.type == nil {\n return nil\n }\n self.bridged = bridgedOrNil\n }\n\n public init(bridged: BridgedASTType) {\n self.bridged = bridged\n }\n\n public var canonical: CanonicalType { CanonicalType(bridged: bridged.getCanonicalType()) }\n\n public var instanceTypeOfMetatype: Type { Type(bridged: bridged.getInstanceTypeOfMetatype()) }\n\n public var staticTypeOfDynamicSelf: Type { Type(bridged: bridged.getStaticTypeOfDynamicSelf()) }\n\n public var superClassType: Type? {\n precondition(isClass)\n let bridgedSuperClassTy = bridged.getSuperClassType()\n if bridgedSuperClassTy.type != nil {\n return Type(bridged: bridgedSuperClassTy)\n }\n return nil\n }\n\n public var builtinVectorElementType: Type { Type(bridged: bridged.getBuiltinVectorElementType()) }\n\n public func subst(with substitutionMap: SubstitutionMap) -> Type {\n return Type(bridged: bridged.subst(substitutionMap.bridged))\n }\n\n public func subst(type: Type, with targetType: Type) -> Type {\n return Type(bridged: bridged.subst(type.bridged, targetType.bridged))\n }\n}\n\n/// A Type that is statically known to be canonical.\n/// For example, typealiases are resolved.\npublic struct CanonicalType: TypeProperties, CustomStringConvertible, NoReflectionChildren {\n public let bridged: BridgedCanType\n\n public init(bridged: BridgedCanType) { self.bridged = bridged }\n\n public var rawType: Type { Type(bridged: bridged.getRawType()) }\n\n public var instanceTypeOfMetatype: CanonicalType { rawType.instanceTypeOfMetatype.canonical }\n\n public var superClassType: CanonicalType? { rawType.superClassType?.canonical }\n\n public var builtinVectorElementType: CanonicalType { rawType.builtinVectorElementType.canonical }\n\n public func subst(with substitutionMap: SubstitutionMap) -> CanonicalType {\n return rawType.subst(with: substitutionMap).canonical\n }\n\n public func subst(type: CanonicalType, with targetType: CanonicalType) -> CanonicalType {\n return self.rawType.subst(type: type.rawType, with: targetType.rawType).canonical\n }\n}\n\n/// Implements the common members of `AST.Type`, `AST.CanonicalType` and `SIL.Type`.\npublic protocol TypeProperties {\n var rawType: Type { get }\n}\n\nextension TypeProperties {\n public var description: String { String(taking: rawType.bridged.getDebugDescription()) }\n\n //===--------------------------------------------------------------------===//\n // Checks for different kinds of types\n //===--------------------------------------------------------------------===//\n\n public var isBuiltinInteger: Bool { rawType.bridged.isBuiltinInteger() }\n\n public func isBuiltinInteger(withFixedWidth width: Int) -> Bool {\n rawType.bridged.isBuiltinFixedWidthInteger(width)\n }\n\n public var isBuiltinFloat: Bool { rawType.bridged.isBuiltinFloat() }\n public var isBuiltinVector: Bool { rawType.bridged.isBuiltinVector() }\n\n public var isClass: Bool {\n if let nominal = nominal, nominal is ClassDecl {\n return true\n }\n return false\n }\n\n public var isStruct: Bool {\n if let nominal = nominal, nominal is StructDecl {\n return true\n }\n return false\n }\n\n public var isEnum: Bool {\n if let nominal = nominal, nominal is EnumDecl {\n return true\n }\n return false\n }\n\n public var isTuple: Bool { rawType.bridged.isTuple() }\n public var isFunction: Bool { rawType.bridged.isFunction() }\n public var isArchetype: Bool { rawType.bridged.isArchetype() }\n public var isExistentialArchetype: Bool { rawType.bridged.isExistentialArchetype() }\n public var isExistentialArchetypeWithError: Bool { rawType.bridged.isExistentialArchetypeWithError() }\n public var isExistential: Bool { rawType.bridged.isExistential() }\n public var isClassExistential: Bool { rawType.bridged.isClassExistential() }\n public var isGenericTypeParameter: Bool { rawType.bridged.isGenericTypeParam() }\n public var isUnownedStorageType: Bool { return rawType.bridged.isUnownedStorageType() }\n public var isMetatype: Bool { rawType.bridged.isMetatypeType() }\n public var isExistentialMetatype: Bool { rawType.bridged.isExistentialMetatypeType() }\n public var isDynamicSelf: Bool { rawType.bridged.isDynamicSelf()}\n\n /// True if this is the type which represents an integer literal used in a type position.\n /// For example `N` in `struct T<let N: Int> {}`\n public var isInteger: Bool { rawType.bridged.isInteger() }\n\n public var canBeClass: Type.TraitResult { rawType.bridged.canBeClass().result }\n\n /// True if this the nominal type `Swift.Optional`.\n public var isOptional: Bool { rawType.bridged.isOptional() }\n\n /// True if this type is a value type (struct/enum) that defines a `deinit`.\n public var isValueTypeWithDeinit: Bool {\n if let nominal = nominal, nominal.valueTypeDestructor != nil {\n return true\n }\n return false\n }\n\n //===--------------------------------------------------------------------===//\n // Properties of lowered `SILFunctionType`s\n //===--------------------------------------------------------------------===//\n\n public var isLoweredFunction: Bool { rawType.bridged.isLoweredFunction() }\n public var isNoEscapeFunction: Bool { rawType.bridged.isNoEscapeFunction() }\n public var isCalleeConsumedFunction: Bool { rawType.bridged.isCalleeConsumedFunction() }\n public var isThickFunction: Bool { rawType.bridged.isThickFunction() }\n public var isAsyncFunction: Bool { rawType.bridged.isAsyncFunction() }\n\n public var invocationGenericSignatureOfFunction: GenericSignature {\n GenericSignature(bridged: rawType.bridged.getInvocationGenericSignatureOfFunctionType())\n }\n\n //===--------------------------------------------------------------------===//\n // Type properties\n //===--------------------------------------------------------------------===//\n\n public var isLegalFormalType: Bool { rawType.bridged.isLegalFormalType() }\n public var hasArchetype: Bool { rawType.bridged.hasArchetype() }\n public var hasTypeParameter: Bool { rawType.bridged.hasTypeParameter() }\n public var hasLocalArchetype: Bool { rawType.bridged.hasLocalArchetype() }\n public var isEscapable: Bool { rawType.bridged.isEscapable() }\n public var isNoEscape: Bool { rawType.bridged.isNoEscape() }\n public var archetypeRequiresClass: Bool { rawType.bridged.archetypeRequiresClass() }\n\n public var representationOfMetatype: AST.`Type`.MetatypeRepresentation {\n rawType.bridged.getRepresentationOfMetatype().representation\n }\n\n /// Assumes this is a nominal type. Returns a substitution map that sends each\n /// generic parameter of the declaration's generic signature to the corresponding\n /// generic argument of this nominal type.\n ///\n /// Eg: Array<Int> ---> { Element := Int }\n public var contextSubstitutionMap: SubstitutionMap {\n SubstitutionMap(bridged: rawType.bridged.getContextSubstitutionMap())\n }\n\n // True if this type has generic parameters or it is in a context (e.g. an outer type) which has generic parameters.\n public var isGenericAtAnyLevel: Bool { rawType.bridged.isGenericAtAnyLevel() }\n\n public var nominal: NominalTypeDecl? {\n rawType.bridged.getNominalOrBoundGenericNominal().getAs(NominalTypeDecl.self)\n }\n\n /// Performas a global conformance lookup for this type for `protocol`.\n /// It checks conditional requirements.\n ///\n /// This type must be a contextualized type. It must not contain type parameters.\n ///\n /// The resulting conformance reference does not include "missing" conformances, which are synthesized for\n /// some protocols as an error recovery mechanism.\n ///\n /// Returns an invalid conformance if the search failed, otherwise an\n /// abstract, concrete or pack conformance, depending on the lookup type.\n public func checkConformance(to protocol: ProtocolDecl) -> Conformance {\n return Conformance(bridged: rawType.bridged.checkConformance(`protocol`.bridged))\n }\n}\n\npublic struct TypeArray : RandomAccessCollection, CustomReflectable {\n public let bridged: BridgedASTTypeArray\n\n public var startIndex: Int { return 0 }\n public var endIndex: Int { return bridged.getCount() }\n\n public init(bridged: BridgedASTTypeArray) {\n self.bridged = bridged\n }\n\n public subscript(_ index: Int) -> Type {\n Type(bridged: bridged.getAt(index))\n }\n\n public var customMirror: Mirror {\n let c: [Mirror.Child] = map { (label: nil, value: $0) }\n return Mirror(self, children: c)\n }\n}\n\nextension BridgedASTType.TraitResult {\n var result: Type.TraitResult {\n switch self {\n case .IsNot: return .isNot\n case .CanBe: return .canBe\n case .Is: return .is\n default:\n fatalError("wrong type TraitResult enum case")\n }\n }\n}\n\nextension BridgedASTType.MetatypeRepresentation {\n var representation: Type.MetatypeRepresentation {\n switch self {\n case .Thin: return .thin\n case .Thick: return .thick\n case .ObjC: return .objC\n default:\n fatalError("wrong type MetatypeRepresentation enum case")\n }\n }\n}\n\nextension Type: Equatable {\n public static func ==(lhs: Type, rhs: Type) -> Bool { \n lhs.bridged.type == rhs.bridged.type\n }\n}\n\nextension CanonicalType: Equatable {\n public static func ==(lhs: CanonicalType, rhs: CanonicalType) -> Bool { \n lhs.rawType == rhs.rawType\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_AST_Type.swift
cpp_apple_swift_SwiftCompilerSources_Sources_AST_Type.swift
Swift
10,332
0.95
0.066901
0.200873
react-lib
663
2023-07-27T04:17:14.634509
Apache-2.0
false
d9e80bd4d27708f848c539746b9cf557
//===--- SourceLoc.swift - SourceLoc bridging utilities ------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2022 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport BasicBridging\n\npublic struct SourceLoc {\n /// Points into a source file.\n public let bridged: BridgedSourceLoc\n\n public init?(bridged: BridgedSourceLoc) {\n guard bridged.isValid else {\n return nil\n }\n self.bridged = bridged\n }\n}\n\nextension SourceLoc {\n public func advanced(by n: Int) -> SourceLoc {\n SourceLoc(bridged: bridged.advanced(by: n))!\n }\n}\n\nextension Optional where Wrapped == SourceLoc {\n public var bridged: BridgedSourceLoc {\n self?.bridged ?? .init()\n }\n}\n\npublic struct CharSourceRange {\n public let start: SourceLoc\n public let byteLength: UInt32\n\n public init(start: SourceLoc, byteLength: UInt32) {\n self.start = start\n self.byteLength = byteLength\n }\n\n public init?(bridgedStart: BridgedSourceLoc, byteLength: UInt32) {\n guard let start = SourceLoc(bridged: bridgedStart) else {\n return nil\n }\n self.init(start: start, byteLength: byteLength)\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Basic_SourceLoc.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Basic_SourceLoc.swift
Swift
1,441
0.95
0.037037
0.26087
python-kit
88
2023-10-20T19:42:26.199703
BSD-3-Clause
false
6552c12533cecc3316400c59774021a9
//===--- StringParser.swift -----------------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\n/// A simple utility to parse a string.\npublic struct StringParser {\n private var s: Substring\n private let originalLength: Int\n \n private mutating func consumeWhitespace() {\n s = s.drop { $0.isWhitespace }\n }\n\n public init(_ string: String) {\n s = Substring(string)\n originalLength = string.count\n }\n \n public mutating func isEmpty() -> Bool {\n consumeWhitespace()\n return s.isEmpty\n }\n\n public mutating func consume(_ str: String) -> Bool {\n consumeWhitespace()\n if !s.starts(with: str) { return false }\n s = s.dropFirst(str.count)\n return true\n }\n\n public mutating func consumeInt(withWhiteSpace: Bool = true) -> Int? {\n if withWhiteSpace {\n consumeWhitespace()\n }\n var intStr = ""\n s = s.drop {\n if $0.isNumber {\n intStr.append($0)\n return true\n }\n return false\n }\n return Int(intStr)\n }\n \n public mutating func consumeIdentifier() -> String? {\n consumeWhitespace()\n var name = ""\n s = s.drop {\n if $0.isLetter {\n name.append($0)\n return true\n }\n return false\n }\n return name.isEmpty ? nil : name\n }\n \n public func throwError(_ message: StaticString) throws -> Never {\n throw ParsingError(message: message, position: originalLength - s.count)\n }\n}\n\npublic struct ParsingError : Error {\n public let message: StaticString\n public let position: Int\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Basic_StringParser.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Basic_StringParser.swift
Swift
1,920
0.8
0.08
0.181818
react-lib
863
2023-11-02T23:48:05.656674
BSD-3-Clause
false
62f99c1c09603fe5e853aa531b4dbe10
//===--- Utils.swift - Some bridging utilities ----------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2022 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\n@_exported import BasicBridging\n\n/// The assert function to be used in the compiler.\n///\n/// This overrides the standard Swift assert for two reasons:\n/// * We also like to check for assert failures in release builds. Although this could be\n/// achieved with `precondition`, it's easy to forget about it and use `assert` instead.\n/// * We need to see the error message in crashlogs of release builds. This is even not the\n/// case for `precondition`.\n@_transparent\npublic func assert(_ condition: Bool, _ message: @autoclosure () -> String,\n file: StaticString = #fileID, line: UInt = #line, function: StaticString = #function) {\n precondition(condition, message(), file: file, line: line, function: function)\n}\n\n/// The assert function (without a message) to be used in the compiler.\n///\n/// Unforuntately it's not possible to just add a default argument to `message` in the\n/// other `assert` function. We need to defined this overload.\n/// TODO: For some reason the compiler is not happy when adding a `function` argument.\n@_transparent\npublic func assert(_ condition: Bool, file: StaticString = #fileID, line: UInt = #line) {\n precondition(condition, "", file: file, line: line, function: "")\n}\n\n/// The assert function to be used in the compiler.\n///\n/// This overrides the standard Swift precondition and forwards an assertion failure\n/// to the assertion-handling in the C++ code base.\n@_transparent\npublic func precondition(_ condition: Bool, _ message: @autoclosure () -> String,\n file: StaticString = #fileID, line: UInt = #line, function: StaticString = #function) {\n if !_fastPath(condition) {\n let msg = message()\n msg.withCString { msgStr in\n file.withUTF8Buffer { fileBytes in\n function.withUTF8Buffer { functionBytes in\n assertFail(msgStr, fileBytes.baseAddress!, line, functionBytes.baseAddress!)\n }\n }\n }\n }\n}\n\n//===----------------------------------------------------------------------===//\n// Debugging Utilities\n//===----------------------------------------------------------------------===//\n\npublic func debugLog(prefix: Bool = true, _ message: @autoclosure () -> String) {\n let formatted = (prefix ? "### " : "") + message()\n formatted._withBridgedStringRef { ref in\n Bridged_dbgs().write(ref)\n }\n Bridged_dbgs().newLine()\n}\n\n/// Let's lldb's `po` command not print any "internal" properties of the conforming type.\n///\n/// This is useful if the `description` already contains all the information of a type instance.\npublic protocol NoReflectionChildren : CustomReflectable { }\n\npublic extension NoReflectionChildren {\n var customMirror: Mirror { Mirror(self, children: []) }\n}\n\n//===----------------------------------------------------------------------===//\n// StringRef\n//===----------------------------------------------------------------------===//\n\npublic struct StringRef : CustomStringConvertible, NoReflectionChildren {\n public let _bridged: BridgedStringRef\n\n public init(bridged: BridgedStringRef) { self._bridged = bridged }\n\n public var string: String { String(_bridged) }\n public var description: String { string }\n\n public var count: Int {\n _bridged.count\n }\n\n public subscript(index: Int) -> UInt8 {\n let buffer = UnsafeBufferPointer<UInt8>(start: _bridged.data, count: count)\n return buffer[index]\n }\n\n public func startsWith(_ prefix: StaticString) -> Bool {\n return prefix.withUTF8Buffer { (prefixBuffer: UnsafeBufferPointer<UInt8>) in\n if count < prefixBuffer.count {\n return false\n }\n let buffer = UnsafeBufferPointer<UInt8>(start: _bridged.data, count: prefixBuffer.count)\n return buffer.elementsEqual(prefixBuffer, by: ==)\n }\n }\n\n public static func ==(lhs: StringRef, rhs: StringRef) -> Bool {\n let lhsBuffer = UnsafeBufferPointer<UInt8>(start: lhs._bridged.data, count: lhs.count)\n let rhsBuffer = UnsafeBufferPointer<UInt8>(start: rhs._bridged.data, count: rhs.count)\n if lhsBuffer.count != rhsBuffer.count { return false }\n return lhsBuffer.elementsEqual(rhsBuffer, by: ==)\n }\n\n public static func ==(lhs: StringRef, rhs: StaticString) -> Bool {\n let lhsBuffer = UnsafeBufferPointer<UInt8>(start: lhs._bridged.data, count: lhs.count)\n return rhs.withUTF8Buffer { (rhsBuffer: UnsafeBufferPointer<UInt8>) in\n if lhsBuffer.count != rhsBuffer.count { return false }\n return lhsBuffer.elementsEqual(rhsBuffer, by: ==)\n }\n }\n \n public static func !=(lhs: StringRef, rhs: StaticString) -> Bool { !(lhs == rhs) }\n public static func !=(lhs: StringRef, rhs: StringRef) -> Bool { !(lhs == rhs) }\n\n public static func ~=(pattern: StaticString, value: StringRef) -> Bool { value == pattern }\n}\n\n//===----------------------------------------------------------------------===//\n// Bridging Utilities\n//===----------------------------------------------------------------------===//\n\nextension String {\n public func _withBridgedStringRef<T>(_ c: (BridgedStringRef) -> T) -> T {\n var str = self\n return str.withUTF8 { buffer in\n return c(BridgedStringRef(data: buffer.baseAddress, count: buffer.count))\n }\n }\n\n public init(_ s: BridgedStringRef) {\n let buffer = UnsafeBufferPointer<UInt8>(start: s.data, count: s.count)\n self.init(decoding: buffer, as: UTF8.self)\n }\n\n public init(taking s: BridgedOwnedString) {\n let buffer = UnsafeBufferPointer<UInt8>(start: s.data, count: s.count)\n self.init(decoding: buffer, as: UTF8.self)\n s.destroy()\n }\n}\n\nextension Array {\n public func withBridgedArrayRef<T>(_ c: (BridgedArrayRef) -> T) -> T {\n return withUnsafeBytes { buf in\n return c(BridgedArrayRef(data: buf.baseAddress!, count: count))\n }\n }\n}\n\npublic typealias SwiftObject = UnsafeMutablePointer<BridgedSwiftObject>\n\nextension UnsafeMutablePointer where Pointee == BridgedSwiftObject {\n public init<T: AnyObject>(_ object: T) {\n let ptr = unsafeBitCast(object, to: UnsafeMutableRawPointer.self)\n self = ptr.bindMemory(to: BridgedSwiftObject.self, capacity: 1)\n }\n\n public func getAs<T: AnyObject>(_ objectType: T.Type) -> T {\n return unsafeBitCast(self, to: T.self)\n }\n}\n\nextension Optional where Wrapped == UnsafeMutablePointer<BridgedSwiftObject> {\n public func getAs<T: AnyObject>(_ objectType: T.Type) -> T? {\n if let pointer = self {\n return pointer.getAs(objectType)\n }\n return nil\n }\n}\n\nextension BridgedArrayRef {\n public func withElements<T, R>(ofType ty: T.Type, _ c: (UnsafeBufferPointer<T>) -> R) -> R {\n let start = data?.assumingMemoryBound(to: ty)\n let buffer = UnsafeBufferPointer(start: start, count: count)\n return c(buffer)\n }\n}\n\n//===----------------------------------------------------------------------===//\n// Sequence Utilities\n//===----------------------------------------------------------------------===//\n\n/// RandomAccessCollection which bridges to some C++ array.\n///\n/// It fixes the default reflection for bridged random access collections, which usually have a\n/// `bridged` stored property.\n/// Conforming to this protocol displays the "real" children not just `bridged`.\npublic protocol BridgedRandomAccessCollection : RandomAccessCollection, CustomReflectable {\n}\n\nextension BridgedRandomAccessCollection {\n public var customMirror: Mirror {\n Mirror(self, children: self.map { (label: nil, value: $0 as Any) })\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Basic_Utils.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Basic_Utils.swift
Swift
7,996
0.95
0.120192
0.267045
vue-tools
489
2024-02-06T10:18:17.072914
Apache-2.0
false
98b16745b5e2997550fc2b250d4c975b
//===--- AliasAnalysis.swift - the alias analysis -------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport OptimizerBridging\nimport SIL\n\nextension FunctionPassContext {\n var aliasAnalysis: AliasAnalysis {\n let bridgedAA = _bridged.getAliasAnalysis()\n return AliasAnalysis(bridged: bridgedAA, context: self)\n }\n}\n\nextension Instruction {\n func mayRead(fromAddress: Value, _ aliasAnalysis: AliasAnalysis) -> Bool {\n aliasAnalysis.getMemoryEffect(of: self, on: fromAddress).read\n }\n\n func mayWrite(toAddress: Value, _ aliasAnalysis: AliasAnalysis) -> Bool {\n if toAddress.isImmutableAddress {\n // Take a shortcut for indirect-in arguments.\n return false\n }\n return aliasAnalysis.getMemoryEffect(of: self, on: toAddress).write\n }\n\n func mayReadOrWrite(address: Value, _ aliasAnalysis: AliasAnalysis) -> Bool {\n let effect = aliasAnalysis.getMemoryEffect(of: self, on: address)\n if address.isImmutableAddress {\n return effect.read\n }\n return effect.read || effect.write\n }\n}\n\n/// Alias analysis.\n///\n/// It's mainly used to check if an instruction may read or write from/to a specific address.\n///\nstruct AliasAnalysis {\n fileprivate let bridged: BridgedAliasAnalysis\n fileprivate let context: FunctionPassContext\n\n //===--------------------------------------------------------------------===//\n // Public interface\n //===--------------------------------------------------------------------===//\n\n /// Returns the effects of `inst`'s memory behavior on the memory pointed to by the `address`.\n func getMemoryEffect(of inst: Instruction, on address: Value) -> SideEffects.Memory {\n precondition(address.type.isAddress, "getMemoryEffects requires address value")\n var result = computeMemoryEffect(of: inst, on: MemoryLocation.memoryAddress(address))\n if result.write && isImmutable(instruction: inst, inScopeOf: address) {\n result.write = false\n }\n // In the past we cached the result per instruction-address pair. But it turned out that the hit-miss rate was\n // pretty high (~ 1:7) and the cache lookup took as long as recomputing.\n return result\n }\n\n /// Returns true if `v1` and `v2` do or may alias.\n ///\n /// Usually `v1` and `v2` are addresses and in this case the return value is true if both addresses\n /// do or may point to the same memory location.\n ///\n /// If `v1` or `v2` is not an address, the function checks if any "interior" pointer of the value may alias\n /// with the other value or address.\n /// If a value is a class, "interior" pointer means: an address of any stored property of the class instance.\n /// If a value is a struct or another value type, "interior" pointers refer to any stored propery addresses of any\n /// class references in the struct or value type. For example:\n ///\n /// class C { var x: Int; var y: Int }\n /// struct S { var c1: C; var c2: C }\n ///\n /// `mayAlias(s, someAddress)` checks if someAddress aliases with `s.c1.x`, `s.c1.y`, `s.c2.x` or `s.c2.y`\n ///\n func mayAlias(_ v1: Value, _ v2: Value) -> Bool {\n if v1.type.isAddress && v2.type.isAddress {\n // The projection-path based check and TBAA can only be done if both values are really addresses.\n // This is the common case.\n let accessPath1 = v1.accessPath\n let accessPath2 = v2.accessPath\n if accessPath1.isDistinct(from: accessPath2) {\n return false\n }\n // Type-based alias analysis is only of minor importance. It's only needed if unsafe pointers are in play.\n // There are some critical functions in the stdlib which use unsafe pointers. Therefore we cannot omit TBAA.\n if isTypeDistinct(v1, v2, accessPath1.base, accessPath2.base) {\n return false\n }\n }\n // Finaly use escape info to check if one address "escapes" to the other address.\n return v1.allContainedAddresss.canAddressAlias(with: v2.allContainedAddresss, context)\n }\n\n static func register() {\n BridgedAliasAnalysis.registerAnalysis(\n // initFn\n { (bridgedAliasAnalysis: BridgedAliasAnalysis, size: Int) in\n assert(MemoryLayout<Cache>.size <= size, "wrong AliasAnalysis.cache size")\n bridgedAliasAnalysis.mutableCachePointer.initializeMemory(as: Cache.self, repeating: Cache(), count: 1)\n },\n // destroyFn\n { (bridgedAliasAnalysis: BridgedAliasAnalysis) in\n bridgedAliasAnalysis.mutableCachePointer.assumingMemoryBound(to: Cache.self).deinitialize(count: 1)\n },\n // getMemEffectsFn\n { (bridgedCtxt: BridgedPassContext,\n bridgedAliasAnalysis: BridgedAliasAnalysis,\n bridgedAddr: BridgedValue,\n bridgedInst: BridgedInstruction) -> BridgedMemoryBehavior in\n let aa = AliasAnalysis(bridged: bridgedAliasAnalysis, context: FunctionPassContext(_bridged: bridgedCtxt))\n return aa.getMemoryEffect(of: bridgedInst.instruction, on: bridgedAddr.value).bridged\n },\n // isObjReleasedFn\n { (bridgedCtxt: BridgedPassContext,\n bridgedAliasAnalysis: BridgedAliasAnalysis,\n bridgedObj: BridgedValue,\n bridgedInst: BridgedInstruction) -> Bool in\n let context = FunctionPassContext(_bridged: bridgedCtxt)\n let aa = AliasAnalysis(bridged: bridgedAliasAnalysis, context: context)\n let inst = bridgedInst.instruction\n let obj = bridgedObj.value\n let path = SmallProjectionPath(.anyValueFields)\n let budget = aa.getComplexityBudget(for: inst.parentFunction)\n if let apply = inst as? FullApplySite {\n // Workaround for quadratic complexity in ARCSequenceOpts.\n // We need to use an ever lower budget to not get into noticeable compile time troubles.\n let effect = aa.getOwnershipEffect(of: apply, for: obj, path: path, complexityBudget: budget / 10)\n return effect.destroy\n }\n return obj.at(path).isEscaping(using: EscapesToInstructionVisitor(target: inst, isAddress: false),\n complexityBudget: budget, context)\n },\n\n // isAddrVisibleFromObj\n { (bridgedCtxt: BridgedPassContext,\n bridgedAliasAnalysis: BridgedAliasAnalysis,\n bridgedAddr: BridgedValue,\n bridgedObj: BridgedValue) -> Bool in\n let context = FunctionPassContext(_bridged: bridgedCtxt)\n let aa = AliasAnalysis(bridged: bridgedAliasAnalysis, context: context)\n let addr = bridgedAddr.value.allContainedAddresss\n\n // This is similar to `canReferenceSameFieldFn`, except that all addresses of all objects are\n // considered which are transitively visible from `bridgedObj`.\n let anythingReachableFromObj = bridgedObj.value.at(SmallProjectionPath(.anything))\n return addr.canAddressAlias(with: anythingReachableFromObj,\n complexityBudget: aa.getComplexityBudget(for: bridgedObj.value.parentFunction),\n context)\n },\n\n // mayAliasFn\n { (bridgedCtxt: BridgedPassContext,\n bridgedAliasAnalysis: BridgedAliasAnalysis,\n bridgedLhs: BridgedValue,\n bridgedRhs: BridgedValue) -> Bool in\n let context = FunctionPassContext(_bridged: bridgedCtxt)\n let aa = AliasAnalysis(bridged: bridgedAliasAnalysis, context: context)\n return aa.mayAlias(bridgedLhs.value, bridgedRhs.value)\n }\n )\n }\n\n //===--------------------------------------------------------------------===//\n // Internals\n //===--------------------------------------------------------------------===//\n\n private var cache: Cache {\n unsafeAddress {\n bridged.cachePointer.assumingMemoryBound(to: Cache.self)\n }\n nonmutating unsafeMutableAddress {\n bridged.mutableCachePointer.assumingMemoryBound(to: Cache.self)\n }\n }\n\n // The actual logic to compute the memory effect of an instruction.\n private func computeMemoryEffect(of inst: Instruction, on memLoc: MemoryLocation) -> SideEffects.Memory {\n switch inst {\n case let beginAccess as BeginAccessInst:\n // begin_access does not physically read or write memory. But we model it as a memory read and/or write\n // to prevent optimizations to move other aliased loads/stores across begin_access into the access scope.\n return getAccessScopeEffect(of: beginAccess, on: memLoc)\n\n case let endAccess as EndAccessInst:\n // Similar to begin_access, we model it as a memory read and/or write to prevent optimizations to move\n // other aliased loads/stores into the access scope.\n return getAccessScopeEffect(of: endAccess.beginAccess, on: memLoc)\n\n case is InjectEnumAddrInst,\n is UncheckedTakeEnumDataAddrInst,\n is InitExistentialAddrInst,\n is DeinitExistentialAddrInst,\n is FixLifetimeInst,\n is ClassifyBridgeObjectInst,\n is ValueToBridgeObjectInst,\n is DeallocStackInst:\n if memLoc.mayAlias(with: (inst as! UnaryInstruction).operand.value, self) {\n return inst.memoryEffects\n }\n return .noEffects\n\n case is CondFailInst,\n is StrongRetainInst,\n is UnownedRetainInst,\n is StrongRetainUnownedInst,\n is RetainValueInst,\n is UnmanagedRetainValueInst,\n is CopyValueInst,\n is StrongCopyUnownedValueInst,\n is StrongCopyUnmanagedValueInst,\n is StrongCopyWeakValueInst,\n is BeginBorrowInst,\n is BeginCOWMutationInst:\n return .noEffects\n\n case let load as LoadInst:\n if memLoc.mayAlias(with: load.address, self) {\n switch load.loadOwnership {\n case .unqualified, .copy, .trivial:\n return .init(read: true)\n case .take:\n // "take" is conceptually a write to the memory location.\n return .worstEffects\n }\n } else {\n return .noEffects\n }\n case let store as StoreInst:\n if memLoc.isLetValue && store.destination.accessBase != memLoc.address.accessBase {\n return .noEffects\n }\n if memLoc.mayAlias(with: store.destination, self) {\n return inst.memoryEffects\n } else {\n switch store.storeOwnership {\n case .unqualified, .initialize, .trivial:\n return .noEffects\n case .assign:\n // Consider side effects of the destructor\n return defaultEffects(of: store, on: memLoc)\n }\n }\n case let storeBorrow as StoreBorrowInst:\n return memLoc.mayAlias(with: storeBorrow.destination, self) ? .init(write: true) : .noEffects\n\n case let mdi as MarkDependenceInstruction:\n if mdi.base.type.isAddress && memLoc.mayAlias(with: mdi.base, self) {\n return .init(read: true)\n }\n return .noEffects\n\n case let copy as SourceDestAddrInstruction:\n let mayRead = memLoc.mayAlias(with: copy.source, self)\n let mayWrite = memLoc.mayAlias(with: copy.destination, self)\n var effects = SideEffects.Memory(read: mayRead, write: mayWrite || (mayRead && copy.isTakeOfSrc))\n if !copy.isInitializationOfDest {\n effects.merge(with: defaultEffects(of: copy, on: memLoc))\n }\n return effects\n\n case let apply as FullApplySite:\n return getApplyEffect(of: apply, on: memLoc)\n\n case let partialApply as PartialApplyInst:\n return getPartialApplyEffect(of: partialApply, on: memLoc)\n\n case let endApply as EndApplyInst:\n let beginApply = endApply.beginApply\n if case .yield(let addr) = memLoc.address.accessBase, addr.parentInstruction == beginApply {\n // The lifetime of yielded values always end at the end_apply. This is required because a yielded\n // address is non-aliasing inside the begin/end_apply scope, but might be aliasing after the end_apply.\n // For example, if the callee yields an `ref_element_addr` (which is encapsulated in a begin/end_access).\n // Therefore, even if the callee does not write anything, the effects must be "read" and "write".\n return .worstEffects\n }\n return getApplyEffect(of: beginApply, on: memLoc)\n\n case let abortApply as AbortApplyInst:\n let beginApply = abortApply.beginApply\n if case .yield(let addr) = memLoc.address.accessBase, addr.parentInstruction == beginApply {\n // See the comment for `end_apply` above.\n return .worstEffects\n }\n return getApplyEffect(of: beginApply, on: memLoc)\n\n case let builtin as BuiltinInst:\n return getBuiltinEffect(of: builtin, on: memLoc)\n\n case let endBorrow as EndBorrowInst:\n switch endBorrow.borrow {\n case let storeBorrow as StoreBorrowInst:\n precondition(endBorrow.borrow.type.isAddress)\n return memLoc.mayAlias(with: storeBorrow, self) ? .worstEffects : .noEffects\n case let beginBorrow as BeginBorrowInst where !beginBorrow.hasPointerEscape:\n return getBorrowEffects(of: endBorrow, on: memLoc)\n case let loadBorrow as LoadBorrowInst:\n let borrowEffects = getBorrowEffects(of: endBorrow, on: memLoc)\n // In addition to the "regular" borrow effects, a load_borrow also has effects on the memory location\n // from where it loads the value. This includes "write" to prevent any optimization to change the\n // memory location after the load_borrow.\n if borrowEffects != .worstEffects && memLoc.mayAlias(with: loadBorrow.address, self) {\n return .worstEffects\n }\n return borrowEffects\n default:\n break\n }\n return defaultEffects(of: endBorrow, on: memLoc)\n\n case let debugValue as DebugValueInst:\n if debugValue.operand.value.type.isAddress && memLoc.mayAlias(with: debugValue.operand.value, self) {\n return .init(read: true)\n } else {\n return .noEffects\n }\n\n case let destroy as DestroyValueInst:\n if destroy.destroyedValue.type.isNoEscapeFunction {\n return .noEffects\n }\n if destroy.isDeadEnd {\n // We don't have to take deinit effects into acount for a `destroy_value [dead_end]`.\n // Such destroys are lowered to no-ops and will not call any deinit.\n return .noEffects\n }\n return defaultEffects(of: destroy, on: memLoc)\n\n default:\n let effects = inst.memoryEffects\n if effects == .noEffects {\n return effects\n }\n return defaultEffects(of: inst, on: memLoc)\n }\n }\n\n /// Returns the memory effects which protect the interior pointers of a borrowed value.\n /// For example, an `end_borrow` of a class reference must alias with all field addresses (= the interior\n /// pointers) of the class instance.\n private func getBorrowEffects(of endBorrow: EndBorrowInst, on memLoc: MemoryLocation) -> SideEffects.Memory {\n let accessPath = memLoc.address.accessPath\n switch accessPath.base {\n case .stack, .global, .argument, .storeBorrow:\n // Those access bases cannot be interior pointers of a borrowed value\n return .noEffects\n case .pointer, .index, .unidentified, .yield:\n // We don't know anything about this address -> get the conservative effects\n return defaultEffects(of: endBorrow, on: memLoc)\n case .box, .class, .tail:\n // Check if the memLoc is "derived" from the begin_borrow, i.e. is an interior pointer.\n var walker = FindBeginBorrowWalker(beginBorrow: endBorrow.borrow as! BorrowIntroducingInstruction)\n return walker.visitAccessStorageRoots(of: accessPath) ? .noEffects : .worstEffects\n }\n }\n\n private func getAccessScopeEffect(of beginAccess: BeginAccessInst, on memLoc: MemoryLocation) -> SideEffects.Memory {\n if !memLoc.mayAlias(with: beginAccess.address, self) {\n return .noEffects\n }\n switch beginAccess.accessKind {\n case .`init`:\n return .init(read: false, write: true)\n case .read:\n return .init(read: true, write: false)\n case .modify:\n return memLoc.isLetValue ? .noEffects : .worstEffects\n case .deinit:\n // For the same reason we treat a `load [take]` or a `destroy_addr`\n // as a memory write, we do that for a `begin_access [deinit]` as well.\n return .worstEffects\n }\n }\n\n private func getApplyEffect(of apply: FullApplySite, on memLoc: MemoryLocation) -> SideEffects.Memory {\n let calleeAnalysis = context.calleeAnalysis\n let visitor = FullApplyEffectsVisitor(apply: apply, calleeAnalysis: calleeAnalysis, isAddress: true)\n let memoryEffects: SideEffects.Memory\n\n // First try to figure out to which argument(s) the address "escapes" to.\n if let result = memLoc.addressWithPath.visit(using: visitor,\n initialWalkingDirection: memLoc.walkingDirection,\n context)\n {\n // The resulting effects are the argument effects to which `address` escapes to.\n memoryEffects = result.memory\n } else {\n // The address has unknown escapes. So we have to take the global effects of the called function(s).\n memoryEffects = calleeAnalysis.getSideEffects(ofApply: apply).memory\n }\n return memoryEffects\n }\n\n private func getPartialApplyEffect(of partialApply: PartialApplyInst, on memLoc: MemoryLocation) -> SideEffects.Memory {\n let visitor = PartialApplyEffectsVisitor(partialApply: partialApply)\n\n // Figure out to which argument(s) the address "escapes" to.\n if let result = memLoc.addressWithPath.visit(using: visitor,\n initialWalkingDirection: memLoc.walkingDirection,\n context)\n {\n // The resulting effects are the argument effects to which the address escapes to.\n return result\n }\n return .worstEffects\n }\n\n private func getBuiltinEffect(of builtin: BuiltinInst, on memLoc: MemoryLocation) -> SideEffects.Memory {\n switch builtin.id {\n case .Once, .OnceWithContext:\n if !memLoc.addressWithPath.isEscaping(using: AddressVisibleByBuiltinOnceVisitor(),\n initialWalkingDirection: memLoc.walkingDirection,\n context)\n {\n return .noEffects\n }\n let callee = builtin.operands[1].value\n return context.calleeAnalysis.getSideEffects(ofCallee: callee).memory\n default:\n return defaultEffects(of: builtin, on: memLoc)\n }\n }\n\n private func getOwnershipEffect(of apply: FullApplySite, for value: Value,\n path: SmallProjectionPath,\n complexityBudget: Int) -> SideEffects.Ownership {\n let visitor = FullApplyEffectsVisitor(apply: apply, calleeAnalysis: context.calleeAnalysis, isAddress: false)\n if let result = value.at(path).visit(using: visitor, complexityBudget: complexityBudget, context) {\n // The resulting effects are the argument effects to which `value` escapes to.\n return result.ownership\n } else {\n // `value` has unknown escapes. So we have to take the global effects of the called function(s).\n return visitor.calleeAnalysis.getSideEffects(ofApply: apply).ownership\n }\n }\n\n /// Gets the default effects of an instruction.\n /// It just checks if `memLoc` can somehow be visible by `inst` at all.\n private func defaultEffects(of inst: Instruction, on memLoc: MemoryLocation) -> SideEffects.Memory {\n if memLoc.addressWithPath.isEscaping(using: EscapesToInstructionVisitor(target: inst, isAddress: true),\n initialWalkingDirection: memLoc.walkingDirection,\n complexityBudget: getComplexityBudget(for: inst.parentFunction), context)\n {\n return inst.memoryEffects\n }\n return .noEffects\n }\n\n // To avoid quadratic complexity for large functions, we limit the amount of work that the EscapeUtils are\n // allowed to to. This keeps the complexity linear.\n //\n // This arbitrary limit is good enough for almost all functions. It lets\n // the EscapeUtils do several hundred up/down walks which is much more than needed in most cases.\n private func getComplexityBudget(for function: Function) -> Int {\n if cache.estimatedFunctionSize == nil {\n var numInsts = 0\n for _ in function.instructions { numInsts += 1 }\n cache.estimatedFunctionSize = numInsts\n }\n return 1000000 / cache.estimatedFunctionSize!\n }\n\n /// Returns true if the `instruction` (which in general writes to memory) is immutable in a certain scope,\n /// defined by `address`.\n ///\n /// That means that even if we don't know anything about `instruction`, we can be sure\n /// that `instruction` cannot write to `address`, if it's inside the addresse's scope.\n /// An immutable scope is for example a read-only `begin_access`/`end_access` scope.\n /// Another example is a borrow scope of an immutable copy-on-write buffer.\n private func isImmutable(instruction: Instruction, inScopeOf address: Value) -> Bool {\n guard let immutableScope = ImmutableScope(for: address, context) else {\n return false\n }\n if case .wholeFunction = immutableScope {\n // No need to check if the instruction is inside the scope if the scope is the whole function.\n return true\n }\n\n if !isImmutableCacheComputed(for: immutableScope) {\n computeImmutableCache(for: immutableScope)\n }\n let key = Cache.ScopeKey(beginScope: immutableScope.beginScopeInstruction, instInScope: instruction)\n return cache.immutableInstructionsInScopes.contains(key)\n }\n\n private func isImmutableCacheComputed(for immutableScope: ImmutableScope) -> Bool {\n let beginScopeInst = immutableScope.beginScopeInstruction\n\n // The special key of (beginScopeInst, beginScopeInst) is used as a marker to check if the immutable scope\n // is already computed at all.\n let key = Cache.ScopeKey(beginScope: beginScopeInst, instInScope: beginScopeInst)\n return !cache.immutableInstructionsInScopes.insert(key).inserted\n }\n\n private func computeImmutableCache(for immutableScope: ImmutableScope) {\n let beginScopeInst = immutableScope.beginScopeInstruction\n var worklist = InstructionWorklist(context)\n defer { worklist.deinitialize() }\n\n immutableScope.pushEndScopeInstructions(to: &worklist)\n\n while let inst = worklist.pop() {\n if inst.mayWriteToMemory {\n if case .modifyAccess(let beginAccessInst) = immutableScope,\n computeMemoryEffect(of: inst, on: .modifyAccessScope(beginAccessInst)).write\n {\n } else {\n cache.immutableInstructionsInScopes.insert(Cache.ScopeKey(beginScope: beginScopeInst, instInScope: inst))\n }\n }\n worklist.pushPredecessors(of: inst, ignoring: beginScopeInst)\n }\n }\n}\n\n//===--------------------------------------------------------------------===//\n// Internal data structures\n//===--------------------------------------------------------------------===//\n\nprivate struct Cache {\n struct ScopeKey: Hashable {\n let beginScope: Instruction\n let instInScope: Instruction\n }\n\n // Caches immutable instructions inside specific scopes.\n var immutableInstructionsInScopes = Set<ScopeKey>()\n\n // Used to limit complexity. The size is computed lazily.\n var estimatedFunctionSize: Int? = nil\n}\n\n// A simple abstraction for the kind of address the memory effect is computed.\nprivate enum MemoryLocation {\n // The usual case: an arbitrary address\n case memoryAddress(Value)\n\n // The address of an modify-access, within the access scope.\n // The difference to an arbitrary address is that we know that there are no other reads or writes to the\n // access-address within the access scope.\n // This is used when computing the immutable-scope of a `begin_access [modify]`\n case modifyAccessScope(BeginAccessInst)\n\n var addressWithPath: ProjectedValue {\n let addrValue = self.address\n return addrValue.at(SmallProjectionPath(.anyValueFields))\n }\n\n var address: Value {\n switch self {\n case .memoryAddress(let value):\n precondition(value.type.isAddress, "expected address value")\n return value\n case .modifyAccessScope(let beginAccess):\n return beginAccess\n }\n }\n\n var walkingDirection: EscapeUtilityTypes.WalkingDirection {\n switch self {\n case .memoryAddress:\n // We need to consider where the address comes from\n return .up\n case .modifyAccessScope:\n // We don't care where the access-address comes from because we know that all accesses to the address\n // (in the access scope) must be derived from the `begin_access`.\n return .down\n }\n }\n\n var isLetValue: Bool {\n switch self {\n case .memoryAddress(let addr):\n return addr.accessBase.isLet\n case .modifyAccessScope:\n return false\n }\n }\n\n func mayAlias(with otherAddr: Value, _ aliasAnalysis: AliasAnalysis) -> Bool {\n return aliasAnalysis.mayAlias(address, otherAddr)\n }\n}\n\n/// A scope in which certain instructions can be assumed to be immutable,\n/// i.e. don't write to the scope's based address.\nprivate enum ImmutableScope {\n // If the based address is or is derived from an indirect-in or guaranteed function argument.\n // The scope spans over the whole function and we don't need to do any scope computation.\n case wholeFunction\n\n // If the based address is or is derived from a begin_access with access kind "read".\n case readAccess(BeginAccessInst)\n\n // If the based address is or is derived from a begin_access with access kind "modify".\n case modifyAccess(BeginAccessInst)\n\n // If the based address is an interior pointer (e.g. the address of a class field) of a borrowed object.\n case borrow(BeginBorrowValue)\n\n init?(for basedAddress: Value, _ context: FunctionPassContext) {\n switch basedAddress.enclosingAccessScope {\n case .access(let beginAccess):\n if beginAccess.isUnsafe {\n return nil\n }\n switch beginAccess.accessKind {\n case .read:\n self = .readAccess(beginAccess)\n case .modify:\n self = .modifyAccess(beginAccess)\n case .`init`, .deinit:\n return nil\n }\n case .base(let accessBase):\n let object: Value\n switch accessBase {\n case .class(let elementAddr):\n if !elementAddr.isImmutable {\n return nil\n }\n object = elementAddr.instance\n case .tail(let tailAddr):\n if !tailAddr.isImmutable {\n return nil\n }\n object = tailAddr.instance\n case .global(let global):\n if global.isLet && !basedAddress.parentFunction.canInitializeGlobal {\n self = .wholeFunction\n return\n }\n return nil\n default:\n return nil\n }\n if !object.parentFunction.hasOwnership {\n // Special handling for non-OSSA: we can only reason about guaranteed function arguments.\n var walker = IsGuaranteedFunctionArgumentWalker()\n if walker.walkUp(value: object, path: SmallProjectionPath()) != .continueWalk {\n return nil\n }\n self = .wholeFunction\n } else {\n guard let singleBorrowIntroducer = object.getBorrowIntroducers(context).singleElement else {\n return nil\n }\n\n switch singleBorrowIntroducer {\n case .beginBorrow, .loadBorrow, .reborrow:\n self = .borrow(singleBorrowIntroducer)\n case .functionArgument:\n self = .wholeFunction\n case .beginApply, .uncheckOwnershipConversion:\n return nil\n }\n }\n case .dependence(let markDep):\n // ignore mark_dependence for the purpose of alias analysis.\n self.init(for: markDep.value, context)\n }\n }\n\n var beginScopeInstruction: SingleValueInstruction {\n switch self {\n case .wholeFunction:\n fatalError("should not request the beginScopeInstruction of a whole function")\n case .readAccess(let beginAccess), .modifyAccess(let beginAccess):\n return beginAccess\n case .borrow(let beginBorrowValue):\n switch beginBorrowValue {\n case .beginBorrow(let bbi): return bbi\n case .loadBorrow(let lbi): return lbi\n case .reborrow(let phi): return phi.borrowedFrom!\n default: fatalError("unsupported BeginBorrowValue")\n }\n }\n }\n\n func pushEndScopeInstructions(to worklist: inout InstructionWorklist) {\n switch self {\n case .wholeFunction:\n fatalError("should not pushEndScopeInstructions of a whole function")\n case .readAccess(let beginAccess), .modifyAccess(let beginAccess):\n for endAccess in beginAccess.endAccessInstructions {\n worklist.pushPredecessors(of: endAccess, ignoring: beginAccess)\n }\n case .borrow(let beginBorrowValue):\n let beginScopeInst = beginScopeInstruction\n for endBorrowOp in beginBorrowValue.scopeEndingOperands {\n worklist.pushPredecessors(of: endBorrowOp.instruction, ignoring: beginScopeInst)\n }\n }\n }\n}\n\nprivate struct FindBeginBorrowWalker : ValueUseDefWalker {\n let beginBorrow: BorrowIntroducingInstruction\n var walkUpCache = WalkerCache<Path>()\n\n mutating func walkUp(value: Value, path: SmallProjectionPath) -> WalkResult {\n if value == beginBorrow {\n return .abortWalk\n }\n if value.ownership != .guaranteed {\n // If value is owned then it cannot be the borrowed value.\n return .continueWalk\n }\n return walkUpDefault(value: value, path: path)\n }\n\n mutating func rootDef(value: Value, path: SmallProjectionPath) -> WalkResult {\n switch value {\n case is FunctionArgument,\n // Loading a value from memory cannot be the borrowed value.\n // Note that we exclude the "regular" `load` by checking for guaranteed ownership in `walkUp`.\n is LoadBorrowInst:\n return .continueWalk\n default:\n return .abortWalk\n }\n }\n}\n\nprivate struct IsGuaranteedFunctionArgumentWalker : ValueUseDefWalker {\n var walkUpCache = WalkerCache<Path>()\n\n mutating func rootDef(value: Value, path: SmallProjectionPath) -> WalkResult {\n if let funcArg = value as? FunctionArgument, funcArg.convention.isGuaranteed {\n return .continueWalk\n }\n return .abortWalk\n }\n}\n\n// Computes the effects which a called function (potentially) has on an address.\nprivate struct FullApplyEffectsVisitor : EscapeVisitorWithResult {\n let apply: FullApplySite\n let calleeAnalysis: CalleeAnalysis\n let isAddress: Bool\n var result = SideEffects.GlobalEffects()\n\n mutating func visitUse(operand: Operand, path: EscapePath) -> UseResult {\n let user = operand.instruction\n if user is ReturnInst {\n // Anything which is returned cannot escape to an instruction inside the function.\n return .ignore\n }\n if user == apply {\n if apply.isCallee(operand: operand) {\n // If the address "escapes" to the callee of the apply it means that the address was captured\n // by an inout_aliasable operand of an partial_apply.\n // Therefore assume that the called function will both, read and write, to the address.\n return .abort\n }\n let e = calleeAnalysis.getSideEffects(of: apply, operand: operand, path: path.projectionPath)\n result.merge(with: e)\n }\n return .continueWalk\n }\n\n var followTrivialTypes: Bool { isAddress }\n var followLoads: Bool { !isAddress }\n}\n\n// In contrast to a full apply, the effects of a partial_apply don't depend on the callee\n// (a partial_apply doesn't call anything, it just creates a thick function pointer).\n// The only effects come from capturing the arguments (either consuming or guaranteeed).\nprivate struct PartialApplyEffectsVisitor : EscapeVisitorWithResult {\n let partialApply: PartialApplyInst\n var result = SideEffects.Memory.noEffects\n\n mutating func visitUse(operand: Operand, path: EscapePath) -> UseResult {\n let user = operand.instruction\n if user is ReturnInst {\n // Anything which is returned cannot escape to an instruction inside the function.\n return .ignore\n }\n if user == partialApply,\n let convention = partialApply.convention(of: operand)\n {\n switch convention {\n case .indirectIn, .indirectInGuaranteed:\n result.read = true\n if !partialApply.isOnStack {\n result.write = true\n }\n case .indirectInout, .indirectInoutAliasable, .packInout:\n break\n case .directOwned, .directUnowned, .directGuaranteed, .packOwned, .packGuaranteed:\n break\n case .indirectOut, .packOut, .indirectInCXX:\n fatalError("invalid convention for partial_apply")\n }\n }\n return .continueWalk\n }\n\n var followTrivialTypes: Bool { true }\n var followLoads: Bool { false }\n}\n\nprivate struct AddressVisibleByBuiltinOnceVisitor : EscapeVisitor {\n var followTrivialTypes: Bool { true }\n var followLoads: Bool { false }\n}\n\n/// Checks if a value is "escaping" to the `target` instruction.\nprivate struct EscapesToInstructionVisitor : EscapeVisitor {\n let target: Instruction\n let isAddress: Bool\n\n mutating func visitUse(operand: Operand, path: EscapePath) -> UseResult {\n let user = operand.instruction\n if user == target {\n return .abort\n }\n if user is ReturnInst {\n // Anything which is returned cannot escape to an instruction inside the function.\n return .ignore\n }\n return .continueWalk\n }\n\n var followTrivialTypes: Bool { isAddress }\n var followLoads: Bool { !isAddress }\n}\n\nprivate extension Value {\n var isImmutableAddress: Bool {\n switch accessBase {\n case .argument(let arg):\n return arg.convention == .indirectInGuaranteed\n default:\n return false\n }\n }\n}\n\n//===--------------------------------------------------------------------===//\n// Type-based alias analysis (TBAA)\n//===--------------------------------------------------------------------===//\n\n/// Perform type-based alias analysis (TBAA).\nprivate func isTypeDistinct(_ address1: Value, _ address2: Value,\n _ accessBase1: AccessBase, _ accessBase2: AccessBase\n) -> Bool {\n let type1 = address1.type\n let type2 = address2.type\n if type1 == type2 {\n return false\n }\n if !accessBase1.isEligibleForTBAA || !accessBase2.isEligibleForTBAA {\n return false\n }\n if !type1.isEligibleForTBAA || !type2.isEligibleForTBAA {\n return false\n }\n let function = address1.parentFunction\n\n // Even if the types are different, one type can contain the other type, e.g.\n //\n // struct S { var i: Int }\n // isTypeDistinct(addressOfS, addressOfInt) -> false\n //\n if type1.aggregateIsOrContains(type2, in: function) || type2.aggregateIsOrContains(type1, in: function) {\n return false\n }\n if type1.isClass && type2.isClass {\n return false\n }\n return true\n}\n\nprivate extension AccessBase {\n func isIndirectResult(of apply: FullApplySite) -> Bool {\n return apply.indirectResultOperands.contains { $0.value.accessBase == self }\n }\n\n var isEligibleForTBAA: Bool {\n // Only access bases which cannot be the result of an not-strict pointer conversion are eligible.\n switch self {\n case .box, .class, .tail, .global:\n return true\n case .pointer(let pointerToAddress):\n return pointerToAddress.isStrict\n default:\n return false\n }\n }\n}\n\nprivate extension Type {\n var isEligibleForTBAA: Bool {\n if hasArchetype {\n // Two distinct types which contain archetypes can be actually the same, e.g.:\n // SomeGenericStruct<T> // T is a type parameter, which can potentially also be Int\n // SomeGenericStruct<Int>\n return false\n }\n if isClass || isStruct || isEnum || isTuple {\n return true\n }\n // Only support the most important builtin types to be on the safe side.\n // Historically we assumed that Builtin.RawPointer can alias everything (but why?).\n if isBuiltinInteger || isBuiltinFloat {\n return true\n }\n return false\n }\n}\n\nprivate extension Function {\n var canInitializeGlobal: Bool {\n return isGlobalInitOnceFunction ||\n // In non -parse-as-library mode globals are initialized in the `main` function.\n name == "main"\n }\n}\n\n//===--------------------------------------------------------------------===//\n// Bridging\n//===--------------------------------------------------------------------===//\n\nprivate extension SideEffects.Memory {\n var bridged: BridgedMemoryBehavior {\n switch (read, write) {\n case (false, false): return .None\n case (true, false): return .MayRead\n case (false, true): return .MayWrite\n case (true, true): return .MayReadWrite\n }\n }\n}\n\nprivate extension BridgedAliasAnalysis {\n var cachePointer: UnsafeRawPointer {\n UnsafeRawPointer(aa)\n }\n\n var mutableCachePointer: UnsafeMutableRawPointer {\n UnsafeMutableRawPointer(aa)\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_Analysis_AliasAnalysis.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_Analysis_AliasAnalysis.swift
Swift
37,181
0.95
0.16999
0.187427
node-utils
15
2025-01-28T07:56:36.020862
GPL-3.0
false
ef574166ff00916a9dd8c6236578e5d6
//===--- CalleeAnalysis.swift - the callee analysis -----------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2021 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport OptimizerBridging\nimport SIL\n\nstruct CalleeAnalysis {\n let bridged: BridgedCalleeAnalysis\n\n static func register() {\n BridgedCalleeAnalysis.registerAnalysis(\n // isDeinitBarrierFn:\n { (inst : BridgedInstruction, bca: BridgedCalleeAnalysis) -> Bool in\n return inst.instruction.isDeinitBarrier(bca.analysis)\n },\n // getMemBehaviorFn\n { (bridgedApply: BridgedInstruction, observeRetains: Bool, bca: BridgedCalleeAnalysis) -> BridgedMemoryBehavior in\n let apply = bridgedApply.instruction as! FullApplySite\n let e = bca.analysis.getSideEffects(ofApply: apply)\n return e.getMemBehavior(observeRetains: observeRetains)\n }\n )\n }\n\n func getCallees(callee: Value) -> FunctionArray? {\n let bridgedFuncs = bridged.getCallees(callee.bridged)\n if bridgedFuncs.isIncomplete() {\n return nil\n }\n return FunctionArray(bridged: bridgedFuncs)\n }\n\n func getIncompleteCallees(callee: Value) -> FunctionArray {\n return FunctionArray(bridged: bridged.getCallees(callee.bridged))\n }\n\n func getDestructor(ofExactType type: Type) -> Function? {\n let destructors = FunctionArray(bridged: bridged.getDestructors(type.bridged, /*isExactType*/ true))\n if destructors.count == 1 {\n return destructors[0]\n }\n return nil\n }\n\n func getDestructors(of type: Type) -> FunctionArray? {\n let bridgedDtors = bridged.getDestructors(type.bridged, /*isExactType*/ false)\n if bridgedDtors.isIncomplete() {\n return nil\n }\n return FunctionArray(bridged: bridgedDtors)\n }\n\n /// Returns the global (i.e. not argument specific) side effects of an apply.\n func getSideEffects(ofApply apply: FullApplySite) -> SideEffects.GlobalEffects {\n return getSideEffects(ofCallee: apply.callee)\n }\n\n func getSideEffects(ofCallee callee: Value) -> SideEffects.GlobalEffects {\n guard let callees = getCallees(callee: callee) else {\n return .worstEffects\n }\n\n var result = SideEffects.GlobalEffects()\n for callee in callees {\n let calleeEffects = callee.getSideEffects()\n result.merge(with: calleeEffects)\n }\n return result\n }\n\n /// Returns the argument specific side effects of an apply.\n func getSideEffects(of apply: FullApplySite, operand: Operand, path: SmallProjectionPath) -> SideEffects.GlobalEffects {\n var result = SideEffects.GlobalEffects()\n guard let calleeArgIdx = apply.calleeArgumentIndex(of: operand) else {\n return result\n }\n let convention = apply.convention(of: operand)!\n let argument = operand.value.at(path)\n\n guard let callees = getCallees(callee: apply.callee) else {\n return .worstEffects.restrictedTo(argument: argument, withConvention: convention)\n }\n \n for callee in callees {\n let calleeEffects = callee.getSideEffects(forArgument: argument,\n atIndex: calleeArgIdx,\n withConvention: convention)\n result.merge(with: calleeEffects)\n }\n return result.restrictedTo(argument: argument, withConvention: convention)\n }\n}\n\nextension Value {\n fileprivate func isBarrier(_ analysis: CalleeAnalysis) -> Bool {\n guard let callees = analysis.getCallees(callee: self) else {\n return true\n }\n return callees.contains { $0.isDeinitBarrier }\n }\n}\n\nextension FullApplySite {\n fileprivate func isBarrier(_ analysis: CalleeAnalysis) -> Bool {\n return callee.isBarrier(analysis)\n }\n}\n\nextension EndApplyInst {\n fileprivate func isBarrier(_ analysis: CalleeAnalysis) -> Bool {\n return (operand.value.definingInstruction as! FullApplySite).isBarrier(analysis)\n }\n}\n\nextension AbortApplyInst {\n fileprivate func isBarrier(_ analysis: CalleeAnalysis) -> Bool {\n return (operand.value.definingInstruction as! FullApplySite).isBarrier(analysis)\n }\n}\n\nextension Instruction {\n /// Whether lifetime ends of lexical values may safely be hoisted over this\n /// instruction.\n ///\n /// Deinitialization barriers constrain variable lifetimes. Lexical\n /// end_borrow, destroy_value, and destroy_addr cannot be hoisted above them.\n final func isDeinitBarrier(_ analysis: CalleeAnalysis) -> Bool {\n if let site = self as? FullApplySite {\n return site.isBarrier(analysis)\n }\n if let eai = self as? EndApplyInst {\n return eai.isBarrier(analysis)\n }\n if let aai = self as? AbortApplyInst {\n return aai.isBarrier(analysis)\n }\n return mayAccessPointer || mayLoadWeakOrUnowned || maySynchronize\n }\n}\n\nstruct FunctionArray : RandomAccessCollection, FormattedLikeArray {\n fileprivate let bridged: BridgedCalleeAnalysis.CalleeList\n\n var startIndex: Int { 0 }\n var endIndex: Int { bridged.getCount() }\n\n subscript(_ index: Int) -> Function {\n return bridged.getCallee(index).function\n }\n}\n// Bridging utilities\n\nextension BridgedCalleeAnalysis {\n var analysis: CalleeAnalysis { .init(bridged: self) }\n}\n\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_Analysis_CalleeAnalysis.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_Analysis_CalleeAnalysis.swift
Swift
5,452
0.95
0.066667
0.147887
vue-tools
127
2025-05-01T04:44:01.070484
Apache-2.0
false
4dda33be41a81607f7c8bf390154af1c
//===--- DeadEndBlocksAnalysis.swift - the dead-end blocks analysis -------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2021 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport OptimizerBridging\nimport SIL\n\nstruct DeadEndBlocksAnalysis {\n let bridged: BridgedDeadEndBlocksAnalysis\n\n func isDeadEnd(_ block: BasicBlock) -> Bool {\n return bridged.isDeadEnd(block.bridged)\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_Analysis_DeadEndBlocksAnalysis.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_Analysis_DeadEndBlocksAnalysis.swift
Swift
731
0.95
0.090909
0.578947
react-lib
192
2025-03-22T23:27:52.716238
Apache-2.0
false
a1b5a5aaf8c1944dbd83ed0d9c67b0eb
//===--- DominatorTree.swift - the dominator tree -------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2022 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\nimport OptimizerBridging\n\nstruct DominatorTree {\n let bridged: BridgedDomTree\n}\n\nextension BasicBlock {\n func dominates(_ other: BasicBlock, _ domTree: DominatorTree) -> Bool {\n domTree.bridged.dominates(self.bridged, other.bridged)\n }\n \n func strictlyDominates(_ other: BasicBlock, _ domTree: DominatorTree) -> Bool {\n dominates(other, domTree) && self != other\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_Analysis_DominatorTree.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_Analysis_DominatorTree.swift
Swift
911
0.95
0.071429
0.458333
vue-tools
537
2024-07-06T22:14:21.443525
GPL-3.0
false
636f3056e7b9c526cba8328c4291fce0
//===--- PostDominatorTree.swift - the post dominator tree ----------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2022 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\nimport OptimizerBridging\n\nstruct PostDominatorTree {\n let bridged: BridgedPostDomTree\n}\n\nextension BasicBlock {\n func postDominates(_ other: BasicBlock, _ pdomTree: PostDominatorTree) -> Bool {\n pdomTree.bridged.postDominates(self.bridged, other.bridged)\n }\n \n func strictlyPostDominates(_ other: BasicBlock, _ pdomTree: PostDominatorTree) -> Bool {\n postDominates(other, pdomTree) && self != other\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_Analysis_PostDominatorTree.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_Analysis_PostDominatorTree.swift
Swift
947
0.95
0.071429
0.458333
vue-tools
818
2024-08-16T05:49:37.263257
BSD-3-Clause
false
13812b70bc5c5af516b8ea7fb96024cb
//===--- BasicBlockRange.swift - a range of basic blocks ------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2022 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\n\n/// A range of basic blocks.\n///\n/// The `BasicBlockRange` defines a range from a dominating "begin" block to one or more "end" blocks.\n/// The range is "exclusive", which means that the "end" blocks are not part of the range.\n///\n/// The `BasicBlockRange` is in the same spirit as a linear range, but as the control flow is a graph\n/// and not a linear list, there can be "exit" blocks from within the range.\n///\n/// One or more "potential" end blocks can be inserted.\n/// Though, not all inserted blocks end up as "end" blocks.\n///\n/// There are several kind of blocks:\n/// * begin: it is a single block which dominates all blocks of the range\n/// * range: all blocks from which there is a path from the begin block to any of the end blocks\n/// * ends: all inserted blocks which are at the end of the range\n/// * exits: all successor blocks of range blocks which are not in the range themselves\n/// * interiors: all inserted blocks which are not end blocks.\n///\n/// In the following example, let's assume `B` is the begin block and `I1`, `I2` and `I3`\n/// were inserted as potential end blocks:\n///\n/// B\n/// / \\n/// I1 I2\n/// / \\n/// I3 X\n///\n/// Then `I1` and `I3` are "end" blocks. `I2` is an interior block and `X` is an exit block.\n/// The range consists of `B` and `I2`. Note that the range does not include `I1` and `I3`\n/// because it's an _exclusive_ range.\n///\n/// This type should be a move-only type, but unfortunately we don't have move-only\n/// types yet. Therefore it's needed to call `deinitialize()` explicitly to\n/// destruct this data structure, e.g. in a `defer {}` block.\nstruct BasicBlockRange : CustomStringConvertible, NoReflectionChildren {\n\n /// The dominating begin block.\n let begin: BasicBlock\n\n /// The inclusive range, i.e. the exclusive range plus the end blocks.\n private(set) var inclusiveRange: Stack<BasicBlock>\n \n /// The exclusive range, i.e. not containing the end blocks.\n var range: LazyFilterSequence<Stack<BasicBlock>> {\n inclusiveRange.lazy.filter { contains($0) }\n }\n\n /// All inserted blocks.\n private(set) var inserted: Stack<BasicBlock>\n\n private var wasInserted: BasicBlockSet\n private var inExclusiveRange: BasicBlockSet\n private var worklist: BasicBlockWorklist\n \n init(begin: BasicBlock, _ context: some Context) {\n self.begin = begin\n self.inclusiveRange = Stack(context)\n self.inserted = Stack(context)\n self.wasInserted = BasicBlockSet(context)\n self.inExclusiveRange = BasicBlockSet(context)\n self.worklist = BasicBlockWorklist(context)\n worklist.pushIfNotVisited(begin)\n }\n\n /// Insert a potential end block.\n mutating func insert(_ block: BasicBlock) {\n if wasInserted.insert(block) {\n inserted.append(block)\n }\n worklist.pushIfNotVisited(block)\n while let b = worklist.pop() {\n inclusiveRange.append(b)\n if b != begin {\n for pred in b.predecessors {\n worklist.pushIfNotVisited(pred)\n inExclusiveRange.insert(pred)\n }\n }\n }\n }\n\n /// Insert a sequence of potential end blocks.\n mutating func insert<S: Sequence>(contentsOf other: S) where S.Element == BasicBlock {\n for block in other {\n insert(block)\n }\n }\n\n /// Returns true if the exclusive range contains `block`.\n func contains(_ block: BasicBlock) -> Bool { inExclusiveRange.contains(block) }\n \n /// Returns true if the inclusive range contains `block`.\n func inclusiveRangeContains (_ block: BasicBlock) -> Bool {\n worklist.hasBeenPushed(block)\n }\n\n /// Returns true if the range is valid and that's iff the begin block dominates all blocks of the range.\n var isValid: Bool {\n let entry = begin.parentFunction.entryBlock\n return begin == entry ||\n // If any block in the range is not dominated by `begin`, the range propagates back to the entry block.\n !inclusiveRangeContains(entry)\n }\n\n /// Returns the end blocks.\n var ends: LazyFilterSequence<Stack<BasicBlock>> {\n inserted.lazy.filter { !contains($0) }\n }\n\n /// Returns the exit blocks.\n var exits: LazySequence<FlattenSequence<\n LazyMapSequence<LazyFilterSequence<Stack<BasicBlock>>,\n LazyFilterSequence<SuccessorArray>>>> {\n range.flatMap {\n $0.successors.lazy.filter {\n !inclusiveRangeContains($0) || $0 == begin\n }\n }\n }\n\n /// Returns the interior blocks.\n var interiors: LazyFilterSequence<Stack<BasicBlock>> {\n inserted.lazy.filter { contains($0) && $0 != begin }\n }\n \n var description: String {\n return (isValid ? "" : "<invalid>\n") +\n """\n begin: \(begin.name)\n range: \(range)\n inclrange: \(inclusiveRange)\n ends: \(ends)\n exits: \(exits)\n interiors: \(interiors)\n """\n }\n\n /// TODO: once we have move-only types, make this a real deinit.\n mutating func deinitialize() {\n worklist.deinitialize()\n inExclusiveRange.deinitialize()\n wasInserted.deinitialize()\n inserted.deinitialize()\n inclusiveRange.deinitialize()\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_DataStructures_BasicBlockRange.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_DataStructures_BasicBlockRange.swift
Swift
5,576
0.95
0.0625
0.415493
node-utils
204
2024-02-16T03:23:27.519210
Apache-2.0
false
9a6e40c06670de1294fffca4180b62ca
//===--- DeadEndBlocks.swift ----------------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2022 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\n\n/// A utility for finding dead-end blocks.\n///\n/// Dead-end blocks are blocks from which there is no path to the function exit\n/// (`return`, `throw` or unwind). These are blocks which end with an unreachable\n/// instruction and blocks from which all paths end in "unreachable" blocks.\nstruct DeadEndBlocks : CustomStringConvertible, NoReflectionChildren {\n private var worklist: BasicBlockWorklist\n private var function: Function\n \n init(function: Function, _ context: FunctionPassContext) {\n self.function = function\n self.worklist = BasicBlockWorklist(context)\n \n // Initialize the worklist with all function-exiting blocks.\n for block in function.blocks where block.terminator.isFunctionExiting {\n worklist.pushIfNotVisited(block)\n }\n \n // Propagate lifeness up the control flow.\n while let block = worklist.pop() {\n worklist.pushIfNotVisited(contentsOf: block.predecessors)\n }\n }\n \n /// Returns true if `block` is a dead-end block.\n func isDeadEnd(block: BasicBlock) -> Bool {\n return !worklist.hasBeenPushed(block)\n }\n\n var description: String {\n let blockNames = function.blocks.filter(isDeadEnd).map(\.name)\n return "[" + blockNames.joined(separator: ",") + "]"\n }\n\n /// TODO: once we have move-only types, make this a real deinit.\n mutating func deinitialize() {\n worklist.deinitialize()\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_DataStructures_DeadEndBlocks.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_DataStructures_DeadEndBlocks.swift
Swift
1,900
0.95
0.264151
0.444444
node-utils
12
2023-08-05T15:30:30.347554
GPL-3.0
false
872dac9dd29f4922bea21f8b796da4e4
//===--- FunctionUses.swift -----------------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2022 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\n\n/// Provides a list of instructions, which reference a function.\n///\n/// A function "use" is an instruction in another (or the same) function which\n/// references the function. In most cases those are `function_ref` instructions,\n/// but can also be e.g. `keypath` instructions.\n///\n/// 'FunctionUses' performs an analysis of all functions in the module and collects\n/// instructions which reference other functions. This utility can be used to do\n/// inter-procedural caller-analysis.\n///\n/// In order to use `FunctionUses`, first call `collect()` and then get use-lists of\n/// functions with `getUses(of:)`.\nstruct FunctionUses {\n\n // Function uses are stored in a single linked list, whereas the "next" is not a pointer\n // but an index into `FunctionUses.useStorage`.\n fileprivate struct Use {\n // The index of the next use in `FunctionUses.useStorage`.\n let next: Int?\n\n // The instruction which references the function.\n let usingInstruction: Instruction\n }\n\n // The head of the single-linked list of function uses.\n fileprivate struct FirstUse {\n // The head of the use-list.\n var first: Int?\n \n // True if the function has unknown uses\n var hasUnknownUses: Bool\n\n init(of function: Function) {\n self.hasUnknownUses = function.isPossiblyUsedExternally || function.isDefinedExternally\n }\n\n mutating func insert(_ inst: Instruction, _ uses: inout [Use]) {\n let newFirst = uses.count\n uses.append(Use(next: first, usingInstruction: inst))\n first = newFirst\n }\n }\n\n /// The list of uses of a function.\n struct UseList : CollectionLikeSequence, CustomStringConvertible {\n struct Iterator : IteratorProtocol {\n fileprivate let useStorage: [Use]\n fileprivate var currentUseIdx: Int?\n \n mutating func next() -> Instruction? {\n if let useIdx = currentUseIdx {\n let use = useStorage[useIdx]\n currentUseIdx = use.next\n return use.usingInstruction\n }\n return nil\n }\n }\n\n // The "storage" for all function uses.\n fileprivate let useStorage: [Use]\n\n // The head of the single-linked use list.\n fileprivate let firstUse: FirstUse\n\n /// True if the function has unknown uses in addition to the list of referencing instructions.\n ///\n /// This is the case, e.g. if the function has public linkage or if the function\n /// is referenced from a vtable or witness table.\n var hasUnknownUses: Bool { firstUse.hasUnknownUses }\n\n func makeIterator() -> Iterator {\n return Iterator(useStorage: useStorage, currentUseIdx: firstUse.first)\n }\n \n var description: String {\n var result = "[\n"\n if hasUnknownUses {\n result += "<unknown uses>\n"\n }\n for inst in self {\n result += "@\(inst.parentFunction.name): \(inst)\n"\n \n }\n result += "]"\n return result\n }\n \n var customMirror: Mirror { Mirror(self, children: []) }\n }\n\n // The "storage" for all function uses.\n private var useStorage: [Use] = []\n\n // The use-list head for each function.\n private var uses: [Function: FirstUse] = [:]\n \n /// Returns the use-list of `function`.\n ///\n /// Note that `collect` must be called before `getUses` can be used.\n func getUses(of function: Function) -> UseList {\n UseList(useStorage: useStorage, firstUse: uses[function, default: FirstUse(of: function)])\n }\n\n /// Collects all uses of all function in the module.\n mutating func collect(context: ModulePassContext) {\n\n // Already start with a reasonable big capacity to reduce the number of\n // re-allocations when appending to the data structures.\n useStorage.reserveCapacity(128)\n uses.reserveCapacity(64)\n\n // Mark all functions, which are referenced from tables, to have "unknown" uses.\n\n for vTable in context.vTables {\n for entry in vTable.entries {\n markUnknown(entry.implementation)\n }\n }\n\n for witnessTable in context.witnessTables {\n for entry in witnessTable.entries {\n if case .method(_, let witness) = entry, let witness {\n markUnknown(witness)\n }\n }\n }\n\n for witnessTable in context.defaultWitnessTables {\n for entry in witnessTable.entries {\n if case .method(_, let witness) = entry, let witness {\n markUnknown(witness)\n }\n }\n }\n\n // Collect all instructions, which reference functions, in the module.\n for function in context.functions {\n for inst in function.instructions {\n inst.visitReferencedFunctions { referencedFunc in\n uses[referencedFunc, default: FirstUse(of: referencedFunc)].insert(inst, &useStorage)\n }\n }\n }\n }\n\n private mutating func markUnknown(_ function: Function) {\n uses[function, default: FirstUse(of: function)].hasUnknownUses = true\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_DataStructures_FunctionUses.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_DataStructures_FunctionUses.swift
Swift
5,357
0.95
0.29878
0.345588
react-lib
767
2024-12-02T12:30:07.807598
BSD-3-Clause
false
bdd8666ff24a9de8ae874a56df8f15f9
//===--- InstructionRange.swift - a range of instructions -----------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2022 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\n\n/// A range of instructions.\n///\n/// The `InstructionRange` defines a range from a dominating "begin" instruction to one or more "end" instructions.\n/// The range is "exclusive", which means that the "end" instructions are not part of the range.\n///\n/// One or more "potential" end instructions can be inserted.\n/// Though, not all inserted instructions end up as "end" instructions.\n///\n/// `InstructionRange` is useful for calculating the liverange of values.\n///\n/// The `InstructionRange` is similar to a `BasicBlockRange`, but defines the range\n/// in a "finer" granularity, i.e. on instructions instead of blocks.\n/// `InstructionRange` uses an underlying `BasicBlockRange` to compute the\n/// involved blocks of the instruction range.\n///\n/// There are several kind of instructions:\n/// * begin: it is a single instruction which dominates all instructions of the range\n/// * ends: all inserted instruction which are at the end of the range\n/// * exits: the first instructions of the exit blocks\n/// * interiors: all inserted instructions which are not end instructions.\n///\n/// See also `BasicBlockRange` for more information.\n///\n/// This type should be a move-only type, but unfortunately we don't have move-only\n/// types yet. Therefore it's needed to call `deinitialize()` explicitly to\n/// destruct this data structure, e.g. in a `defer {}` block.\nstruct InstructionRange : CustomStringConvertible, NoReflectionChildren {\n \n /// The underlying block range.\n private(set) var blockRange: BasicBlockRange\n\n private var insertedInsts: InstructionSet\n\n // For efficiency, this set does not include instructions in blocks which are not the begin or any end block.\n private var inExclusiveRange: InstructionSet\n\n init(begin beginInst: Instruction, _ context: some Context) {\n self = InstructionRange(beginBlock: beginInst.parentBlock, context)\n self.inExclusiveRange.insert(beginInst)\n }\n\n // Note: 'ends' are simply the instructions to insert in the range. 'self.ends' might not return the same sequence\n // as this 'ends' argument because 'self.ends' will not include block exits.\n init<S: Sequence>(begin beginInst: Instruction, ends: S, _ context: some Context) where S.Element: Instruction {\n self = InstructionRange(begin: beginInst, context)\n insert(contentsOf: ends)\n }\n\n init(for value: Value, _ context: some Context) {\n if let inst = value.definingInstruction {\n self = InstructionRange(begin: inst, context)\n } else if let arg = value as? Argument {\n self = InstructionRange(beginBlock: arg.parentBlock, context)\n } else {\n fatalError("cannot build an instruction range for \(value)")\n }\n }\n\n private init(beginBlock: BasicBlock, _ context: some Context) {\n self.blockRange = BasicBlockRange(begin: beginBlock, context)\n self.insertedInsts = InstructionSet(context)\n self.inExclusiveRange = InstructionSet(context)\n }\n\n /// Insert a potential end instruction.\n mutating func insert(_ inst: Instruction) {\n insertedInsts.insert(inst)\n insertIntoRange(instructions: ReverseInstructionList(first: inst.previous))\n blockRange.insert(inst.parentBlock)\n if inst.parentBlock != blockRange.begin {\n // The first time an instruction is inserted in another block than the begin-block we need to insert\n // instructions from the begin instruction to the end of the begin block.\n // For subsequent insertions this is a no-op: `insertIntoRange` will return immediately because those\n // instruction are already inserted.\n insertIntoRange(instructions: blockRange.begin.instructions.reversed())\n }\n }\n\n /// Insert a sequence of potential end instructions.\n mutating func insert<S: Sequence>(contentsOf other: S) where S.Element: Instruction {\n for inst in other {\n insert(inst)\n }\n }\n\n /// Returns true if the exclusive range contains `inst`.\n func contains(_ inst: Instruction) -> Bool {\n if inExclusiveRange.contains(inst) {\n return true\n }\n let block = inst.parentBlock\n return block != blockRange.begin && blockRange.contains(block)\n }\n\n /// Returns true if the inclusive range contains `inst`.\n func inclusiveRangeContains (_ inst: Instruction) -> Bool {\n contains(inst) || insertedInsts.contains(inst)\n }\n\n /// Returns the end instructions.\n ///\n /// Warning: this returns `begin` if no instructions were inserted.\n var ends: LazyMapSequence<LazyFilterSequence<Stack<BasicBlock>>, Instruction> {\n blockRange.ends.map {\n $0.instructions.reversed().first(where: { insertedInsts.contains($0)})!\n }\n }\n\n // Returns the exit blocks.\n var exitBlocks: LazySequence<FlattenSequence<\n LazyMapSequence<LazyFilterSequence<Stack<BasicBlock>>,\n LazyFilterSequence<SuccessorArray>>>> {\n blockRange.exits\n }\n\n /// Returns the exit instructions.\n var exits: LazyMapSequence<LazySequence<FlattenSequence<\n LazyMapSequence<LazyFilterSequence<Stack<BasicBlock>>,\n LazyFilterSequence<SuccessorArray>>>>,\n Instruction> {\n blockRange.exits.lazy.map { $0.instructions.first! }\n }\n\n /// Returns the interior instructions.\n var interiors: LazySequence<FlattenSequence<\n LazyMapSequence<Stack<BasicBlock>,\n LazyFilterSequence<ReverseInstructionList>>>> {\n blockRange.inserted.lazy.flatMap {\n var include = blockRange.contains($0)\n return $0.instructions.reversed().lazy.filter {\n if insertedInsts.contains($0) {\n let isInterior = include\n include = true\n return isInterior\n }\n return false\n }\n }\n }\n\n var begin: Instruction? {\n blockRange.begin.instructions.first(where: inExclusiveRange.contains)\n }\n\n private mutating func insertIntoRange(instructions: ReverseInstructionList) {\n for inst in instructions {\n if !inExclusiveRange.insert(inst) {\n return\n }\n }\n }\n\n var description: String {\n return (blockRange.isValid ? "" : "<invalid>\n") +\n """\n begin: \(begin?.description ?? blockRange.begin.name)\n ends: \(ends.map { $0.description }.joined(separator: "\n "))\n exits: \(exits.map { $0.description }.joined(separator: "\n "))\n interiors:\(interiors.map { $0.description }.joined(separator: "\n "))\n """\n }\n\n /// TODO: once we have move-only types, make this a real deinit.\n mutating func deinitialize() {\n inExclusiveRange.deinitialize()\n insertedInsts.deinitialize()\n blockRange.deinitialize()\n }\n}\n\nextension InstructionRange {\n enum PathOverlap {\n // range: ---\n // | pathBegin\n // | |\n // | pathEnd\n // ---\n case containsPath\n\n // range: ---\n // | pathBegin\n // --- |\n // pathEnd\n case containsBegin\n\n // pathBegin\n // range: --- |\n // | pathEnd\n // ---\n case containsEnd\n\n // pathBegin\n // range: --- |\n // | |\n // --- |\n // pathEnd\n case overlappedByPath\n\n // either: pathBegin\n // |\n // pathEnd\n // range: ---\n // |\n // ---\n // or: pathBegin\n // |\n // pathEnd\n case disjoint\n }\n\n /// Return true if any exclusive path from `begin` to `end` includes an instruction in this exclusive range.\n ///\n /// Returns .containsBegin, if this range has the same begin and end as the path.\n ///\n /// Precondition: `begin` dominates `end`.\n func overlaps(pathBegin: Instruction, pathEnd: Instruction, _ context: some Context) -> PathOverlap {\n assert(pathBegin != pathEnd, "expect an exclusive path")\n if contains(pathBegin) {\n // Note: pathEnd != self.begin here since self.contains(pathBegin)\n if contains(pathEnd) { return .containsPath }\n return .containsBegin\n }\n if contains(pathEnd) {\n if let rangeBegin = self.begin, rangeBegin == pathEnd {\n return .disjoint\n }\n return .containsEnd\n }\n // Neither end-point is contained. If a backward path walk encouters this range, then it must overlap this\n // range. Otherwise, it is disjoint.\n var backwardBlocks = BasicBlockWorklist(context)\n defer { backwardBlocks.deinitialize() }\n backwardBlocks.pushIfNotVisited(pathEnd.parentBlock)\n while let block = backwardBlocks.pop() {\n if blockRange.inclusiveRangeContains(block) {\n // This range overlaps with this block, but there are still three possibilities:\n // (1) range, pathBegin, pathEnd = disjoint (range might not begin in this block)\n // (2) pathBegin, pathEnd, range = disjoint (pathBegin might not be in this block)\n // (3) pathBegin, range, pathEnd = overlappedByPath (range or pathBegin might not be in this block)\n //\n // Walk backward from pathEnd to find either pathBegin or an instruction in this range.\n // Both this range and the path may or may not begin in this block.\n let endInBlock = block == pathEnd.parentBlock ? pathEnd : block.terminator\n for inst in ReverseInstructionList(first: endInBlock) {\n // Check pathBegin first because the range is exclusive.\n if inst == pathBegin {\n break\n }\n // Check inclusiveRangeContains() in case the range end is the first instruction in this block.\n if inclusiveRangeContains(inst) {\n return .overlappedByPath\n }\n }\n // No instructions in this range occur between pathBegin and pathEnd.\n return .disjoint\n }\n // No range blocks have been reached.\n if block == pathBegin.parentBlock {\n return .disjoint\n }\n backwardBlocks.pushIfNotVisited(contentsOf: block.predecessors)\n }\n fatalError("begin: \(pathBegin)\n must dominate end: \(pathEnd)")\n }\n}\n\nlet rangeOverlapsPathTest = FunctionTest("range_overlaps_path") {\n function, arguments, context in\n let rangeValue = arguments.takeValue()\n print("Range of: \(rangeValue)")\n var range = computeLinearLiveness(for: rangeValue, context)\n defer { range.deinitialize() }\n let pathInst = arguments.takeInstruction()\n print("Path begin: \(pathInst)")\n if let pathBegin = pathInst as? ScopedInstruction {\n for end in pathBegin.endInstructions {\n print("Overlap kind:", range.overlaps(pathBegin: pathInst, pathEnd: end, context))\n }\n return\n }\n if let pathValue = pathInst as? SingleValueInstruction, pathValue.ownership == .owned {\n for end in pathValue.uses.endingLifetime {\n print("Overlap kind:", range.overlaps(pathBegin: pathInst, pathEnd: end.instruction, context))\n }\n return\n }\n print("Test specification error: not a scoped or owned instruction: \(pathInst)")\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_DataStructures_InstructionRange.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_DataStructures_InstructionRange.swift
Swift
11,587
0.95
0.115894
0.372263
python-kit
569
2024-12-19T03:44:21.081230
Apache-2.0
false
544206dd6c17980f1362bd45ba570e78
//===--- ReachableBlocks.swift --------------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\n\n/// A utility for finding reachable and unreachable blocks.\n///\n/// Reachable blocks are all blocks which are reachable from the entry block of\n/// the function. All other blocks are dead blocks.\nstruct ReachableBlocks : CustomStringConvertible, NoReflectionChildren {\n private var worklist: BasicBlockWorklist\n private var function: Function\n\n init(function: Function, _ context: FunctionPassContext) {\n self.function = function\n self.worklist = BasicBlockWorklist(context)\n\n worklist.pushIfNotVisited(function.entryBlock)\n\n // Propagate lifeness down the control flow.\n while let block = worklist.pop() {\n worklist.pushIfNotVisited(contentsOf: block.successors)\n }\n }\n\n func isReachable(block: BasicBlock) -> Bool {\n return worklist.hasBeenPushed(block)\n }\n\n var description: String {\n let blockNames = function.blocks.filter(isReachable).map(\.name)\n return "[" + blockNames.joined(separator: ",") + "]"\n }\n\n /// TODO: once we have move-only types, make this a real deinit.\n mutating func deinitialize() {\n worklist.deinitialize()\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_DataStructures_ReachableBlocks.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_DataStructures_ReachableBlocks.swift
Swift
1,617
0.95
0.229167
0.425
node-utils
899
2024-04-19T02:20:55.701619
MIT
false
1ded6045523ec55c80dfbc5d28fdf60b
//===--- Set.swift - sets for basic blocks, values and instructions -------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2022 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\nimport OptimizerBridging\n\nprotocol IntrusiveSet : CustomStringConvertible, NoReflectionChildren {\n associatedtype Element\n\n init(_ context: some Context)\n mutating func insert(_ element: Element) -> Bool\n mutating func erase(_ element: Element)\n func contains(_ element: Element) -> Bool\n mutating func deinitialize()\n}\n\n/// A set of basic blocks.\n///\n/// This is an extremely efficient implementation which does not need memory\n/// allocations or hash lookups.\n///\n/// This type should be a move-only type, but unfortunately we don't have move-only\n/// types yet. Therefore it's needed to call `deinitialize()` explicitly to\n/// destruct this data structure, e.g. in a `defer {}` block.\nstruct BasicBlockSet : IntrusiveSet {\n\n private let context: BridgedPassContext\n private let bridged: BridgedBasicBlockSet\n \n init(_ context: some Context) {\n self.context = context._bridged\n self.bridged = self.context.allocBasicBlockSet()\n }\n\n func contains(_ block: BasicBlock) -> Bool {\n bridged.contains(block.bridged)\n }\n\n /// Returns true if `block` was not contained in the set before inserting.\n @discardableResult\n mutating func insert(_ block: BasicBlock) -> Bool {\n bridged.insert(block.bridged)\n }\n\n mutating func erase(_ block: BasicBlock) {\n bridged.erase(block.bridged)\n }\n\n var description: String {\n let function = bridged.getFunction().function\n let blockNames = function.blocks.enumerated().filter { contains($0.1) }\n .map { "bb\($0.0)"}\n return "{" + blockNames.joined(separator: ", ") + "}"\n }\n\n /// TODO: once we have move-only types, make this a real deinit.\n mutating func deinitialize() {\n context.freeBasicBlockSet(bridged)\n }\n}\n\n/// A set of values.\n///\n/// This is an extremely efficient implementation which does not need memory\n/// allocations or hash lookups.\n///\n/// This type should be a move-only type, but unfortunately we don't have move-only\n/// types yet. Therefore it's needed to call `deinitialize()` explicitly to\n/// destruct this data structure, e.g. in a `defer {}` block.\nstruct ValueSet : IntrusiveSet {\n\n private let context: BridgedPassContext\n private let bridged: BridgedNodeSet\n \n init(_ context: some Context) {\n self.context = context._bridged\n self.bridged = self.context.allocNodeSet()\n }\n\n func contains(_ value: Value) -> Bool {\n bridged.containsValue(value.bridged)\n }\n\n /// Returns true if `value` was not contained in the set before inserting.\n @discardableResult\n mutating func insert(_ value: Value) -> Bool {\n bridged.insertValue(value.bridged)\n }\n\n mutating func erase(_ value: Value) {\n bridged.eraseValue(value.bridged)\n }\n\n var description: String {\n let function = bridged.getFunction().function\n var d = "{\n"\n for block in function.blocks {\n for arg in block.arguments {\n if contains(arg) {\n d += arg.description\n }\n }\n for inst in block.instructions {\n for result in inst.results {\n if contains(result) {\n d += result.description\n }\n }\n }\n }\n d += "}\n"\n return d\n }\n\n /// TODO: once we have move-only types, make this a real deinit.\n mutating func deinitialize() {\n context.freeNodeSet(bridged)\n }\n}\n\n/// A set of instructions.\n///\n/// This is an extremely efficient implementation which does not need memory\n/// allocations or hash lookups.\n///\n/// This type should be a move-only type, but unfortunately we don't have move-only\n/// types yet. Therefore it's needed to call `deinitialize()` explicitly to\n/// destruct this data structure, e.g. in a `defer {}` block.\nstruct SpecificInstructionSet<InstType: Instruction> : IntrusiveSet {\n\n private let context: BridgedPassContext\n private let bridged: BridgedNodeSet\n \n init(_ context: some Context) {\n self.context = context._bridged\n self.bridged = self.context.allocNodeSet()\n }\n\n func contains(_ inst: InstType) -> Bool {\n bridged.containsInstruction(inst.bridged)\n }\n\n /// Returns true if `inst` was not contained in the set before inserting.\n @discardableResult\n mutating func insert(_ inst: InstType) -> Bool {\n bridged.insertInstruction(inst.bridged)\n }\n\n mutating func erase(_ inst: InstType) {\n bridged.eraseInstruction(inst.bridged)\n }\n\n var description: String {\n let function = bridged.getFunction().function\n var d = "{\n"\n for i in function.instructions {\n if let inst = i as? InstType, contains(inst) {\n d += inst.description + "\n"\n }\n }\n d += "}\n"\n return d\n }\n\n /// TODO: once we have move-only types, make this a real deinit.\n mutating func deinitialize() {\n context.freeNodeSet(bridged)\n }\n}\n\ntypealias InstructionSet = SpecificInstructionSet<Instruction>\n\n/// A set of operands.\n///\n/// This is an extremely efficient implementation which does not need memory\n/// allocations or hash lookups.\n///\n/// This type should be a move-only type, but unfortunately we don't have move-only\n/// types yet. Therefore it's needed to call `deinitialize()` explicitly to\n/// destruct this data structure, e.g. in a `defer {}` block.\nstruct OperandSet : IntrusiveSet {\n\n private let context: BridgedPassContext\n private let bridged: BridgedOperandSet\n\n init(_ context: some Context) {\n self.context = context._bridged\n self.bridged = self.context.allocOperandSet()\n }\n\n func contains(_ operand: Operand) -> Bool {\n bridged.contains(operand.bridged)\n }\n\n /// Returns true if `inst` was not contained in the set before inserting.\n @discardableResult\n mutating func insert(_ operand: Operand) -> Bool {\n bridged.insert(operand.bridged)\n }\n\n mutating func erase(_ operand: Operand) {\n bridged.erase(operand.bridged)\n }\n\n var description: String {\n let function = bridged.getFunction().function\n var d = "{\n"\n for inst in function.instructions {\n for op in inst.operands {\n if contains(op) {\n d += op.description\n }\n }\n }\n d += "}\n"\n return d\n }\n\n /// TODO: once we have move-only types, make this a real deinit.\n mutating func deinitialize() {\n context.freeOperandSet(bridged)\n }\n}\n\nextension IntrusiveSet {\n mutating func insert(contentsOf source: some Sequence<Element>) {\n for element in source {\n _ = insert(element)\n }\n }\n\n init(insertContentsOf source: some Sequence<Element>, _ context: some Context) {\n self.init(context)\n insert(contentsOf: source)\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_DataStructures_Set.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_DataStructures_Set.swift
Swift
7,034
0.95
0.126531
0.246377
vue-tools
298
2025-07-06T22:52:11.402582
GPL-3.0
false
6132c9daa1fdee6cba3c137a50f0aba3
//===--- Stack.swift - defines the Stack data structure -------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2022 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport OptimizerBridging\nimport SIL\n\n/// A very efficient implementation of a stack, which can also be iterated over.\n///\n/// A Stack is the best choice for things like worklists, etc., if no random\n/// access is needed.\n/// Compared to Array, it does not require any memory allocations, because it\n/// uses a recycling bump pointer allocator for allocating the slabs.\n/// All operations have (almost) zero cost.\n///\n/// This type should be a move-only type, but unfortunately we don't have move-only\n/// types yet. Therefore it's needed to call `deinitialize()` explicitly to\n/// destruct this data structure, e.g. in a `defer {}` block.\nstruct Stack<Element> : CollectionLikeSequence {\n\n private let bridgedContext: BridgedPassContext\n private var firstSlab = BridgedPassContext.Slab(nil)\n private var lastSlab = BridgedPassContext.Slab(nil)\n private var endIndex: Int = 0\n\n private static var slabCapacity: Int {\n BridgedPassContext.Slab.getCapacity() / MemoryLayout<Element>.stride\n }\n\n private func allocate(after lastSlab: BridgedPassContext.Slab? = nil) -> BridgedPassContext.Slab {\n let lastSlab = lastSlab ?? BridgedPassContext.Slab(nil)\n let newSlab = bridgedContext.allocSlab(lastSlab)\n UnsafeMutableRawPointer(newSlab.data!).bindMemory(to: Element.self, capacity: Stack.slabCapacity)\n return newSlab\n }\n\n private static func element(in slab: BridgedPassContext.Slab, at index: Int) -> Element {\n return pointer(in: slab, at: index).pointee\n }\n\n private static func pointer(in slab: BridgedPassContext.Slab, at index: Int) -> UnsafeMutablePointer<Element> {\n return UnsafeMutableRawPointer(slab.data!).assumingMemoryBound(to: Element.self) + index\n }\n\n struct Iterator : IteratorProtocol {\n var slab: BridgedPassContext.Slab\n var index: Int\n let lastSlab: BridgedPassContext.Slab\n let endIndex: Int\n \n mutating func next() -> Element? {\n let end = (slab.data == lastSlab.data ? endIndex : slabCapacity)\n \n guard index < end else { return nil }\n \n let elem = Stack.element(in: slab, at: index)\n index += 1\n \n if index >= end && slab.data != lastSlab.data {\n slab = slab.getNext()\n index = 0\n }\n return elem\n }\n }\n \n init(_ context: some Context) { self.bridgedContext = context._bridged }\n\n func makeIterator() -> Iterator {\n return Iterator(slab: firstSlab, index: 0, lastSlab: lastSlab, endIndex: endIndex)\n }\n\n var first: Element? {\n isEmpty ? nil : Stack.element(in: firstSlab, at: 0)\n }\n\n var last: Element? {\n isEmpty ? nil : Stack.element(in: lastSlab, at: endIndex &- 1)\n }\n\n mutating func push(_ element: Element) {\n if endIndex >= Stack.slabCapacity {\n lastSlab = allocate(after: lastSlab)\n endIndex = 0\n } else if firstSlab.data == nil {\n assert(endIndex == 0)\n firstSlab = allocate()\n lastSlab = firstSlab\n }\n Stack.pointer(in: lastSlab, at: endIndex).initialize(to: element)\n endIndex += 1\n }\n\n /// The same as `push` to provide an Array-like append API.\n mutating func append(_ element: Element) { push(element) }\n\n mutating func append<S: Sequence>(contentsOf other: S) where S.Element == Element {\n for elem in other {\n append(elem)\n }\n }\n\n var isEmpty: Bool { return endIndex == 0 }\n \n mutating func pop() -> Element? {\n if isEmpty {\n return nil\n }\n assert(endIndex > 0)\n endIndex -= 1\n let elem = Stack.pointer(in: lastSlab, at: endIndex).move()\n \n if endIndex == 0 {\n if lastSlab.data == firstSlab.data {\n _ = bridgedContext.freeSlab(lastSlab)\n firstSlab.data = nil\n lastSlab.data = nil\n endIndex = 0\n } else {\n lastSlab = bridgedContext.freeSlab(lastSlab)\n endIndex = Stack.slabCapacity\n }\n }\n\n return elem\n }\n \n mutating func removeAll() {\n while pop() != nil { }\n }\n\n /// TODO: once we have move-only types, make this a real deinit.\n mutating func deinitialize() { removeAll() }\n}\n\nextension Stack {\n /// Mark a stack location for future iteration.\n ///\n /// TODO: Marker should be ~Escapable.\n struct Marker {\n let slab: BridgedPassContext.Slab\n let index: Int\n }\n\n var top: Marker { Marker(slab: lastSlab, index: endIndex) }\n\n struct Segment : CollectionLikeSequence {\n let low: Marker\n let high: Marker\n\n init(in stack: Stack, low: Marker, high: Marker) {\n if low.slab.data == nil {\n assert(low.index == 0, "invalid empty stack marker")\n // `low == nil` and `high == nil` is a valid empty segment,\n // even though `assertValid(marker:)` would return false.\n if high.slab.data != nil {\n stack.assertValid(marker: high)\n }\n self.low = Marker(slab: stack.firstSlab, index: 0)\n self.high = high\n return\n }\n stack.assertValid(marker: low)\n stack.assertValid(marker: high)\n self.low = low\n self.high = high\n }\n\n func makeIterator() -> Stack.Iterator {\n return Iterator(slab: low.slab, index: low.index,\n lastSlab: high.slab, endIndex: high.index)\n }\n }\n\n /// Assert that `marker` is valid based on the current `top`.\n ///\n /// This is an assert rather than a query because slabs can reuse\n /// memory leading to a stale marker that appears valid.\n func assertValid(marker: Marker) {\n var currentSlab = lastSlab\n var currentIndex = endIndex\n while currentSlab.data != marker.slab.data {\n assert(currentSlab.data != firstSlab.data, "Invalid stack marker")\n currentSlab = currentSlab.getPrevious()\n currentIndex = Stack.slabCapacity\n }\n assert(marker.index <= currentIndex, "Invalid stack marker")\n }\n\n /// Execute the `body` closure, passing it `self` for further\n /// mutation of the stack and passing `marker` to mark the stack\n /// position prior to executing `body`. `marker` must not escape the\n /// `body` closure.\n mutating func withMarker<R>(\n _ body: (inout Stack<Element>, Marker) throws -> R) rethrows -> R {\n return try body(&self, top)\n }\n\n /// Record a stack marker, execute a `body` closure, then execute a\n /// `handleNewElements` closure with the Segment that contains all\n /// elements that remain on the stack after being pushed on the\n /// stack while executing `body`. `body` must push more elements\n /// than it pops.\n mutating func withMarker<R>(\n pushElements body: (inout Stack) throws -> R,\n withNewElements handleNewElements: ((Segment) -> ())\n ) rethrows -> R {\n return try withMarker { (stack: inout Stack<Element>, marker: Marker) in\n let result = try body(&stack)\n handleNewElements(Segment(in: stack, low: marker, high: stack.top))\n return result\n }\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_DataStructures_Stack.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_DataStructures_Stack.swift
Swift
7,263
0.95
0.099099
0.222222
python-kit
147
2023-12-07T23:23:17.336161
Apache-2.0
false
72b65765e2ba945b2d55ba85396776da
//===--- Worklist.swift ---------------------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\n\n/// A utility for processing entities in a worklist.\n///\n/// A `Worklist` is basically a combination of a stack and a set.\n/// It can be used for typical worklist-processing algorithms.\n///\n/// This type should be a move-only type, but unfortunately we don't have move-only\n/// types yet. Therefore it's needed to call `deinitialize()` explicitly to\n/// destruct this data structure, e.g. in a `defer {}` block.\nstruct Worklist<Set: IntrusiveSet> : CustomStringConvertible, NoReflectionChildren {\n typealias Element = Set.Element\n private var worklist: Stack<Element>\n private var pushedElements: Set\n\n init(_ context: some Context) {\n self.worklist = Stack(context)\n self.pushedElements = Set(context)\n }\n \n mutating func pop() -> Element? { return worklist.pop() }\n\n /// Pop and allow the popped element to be pushed again to the worklist.\n mutating func popAndForget() -> Element? {\n if let element = worklist.pop() {\n pushedElements.erase(element)\n return element\n }\n return nil\n }\n\n mutating func pushIfNotVisited(_ element: Element) {\n if pushedElements.insert(element) {\n worklist.append(element)\n }\n }\n\n mutating func pushIfNotVisited<S: Sequence>(contentsOf other: S) where S.Element == Element {\n for element in other {\n pushIfNotVisited(element)\n }\n }\n\n /// Returns true if \p element was pushed to the worklist, regardless if it's already popped or not.\n func hasBeenPushed(_ element: Element) -> Bool { pushedElements.contains(element) }\n\n var isEmpty: Bool { worklist.isEmpty }\n\n var description: String {\n """\n worklist: \(worklist)\n pushed: \(pushedElements)\n """\n }\n\n /// TODO: once we have move-only types, make this a real deinit.\n mutating func deinitialize() {\n pushedElements.deinitialize()\n worklist.deinitialize()\n }\n}\n\ntypealias BasicBlockWorklist = Worklist<BasicBlockSet>\ntypealias InstructionWorklist = Worklist<InstructionSet>\ntypealias SpecificInstructionWorklist<InstType: Instruction> = Worklist<SpecificInstructionSet<InstType>>\ntypealias ValueWorklist = Worklist<ValueSet>\ntypealias OperandWorklist = Worklist<OperandSet>\n\nextension InstructionWorklist {\n mutating func pushPredecessors(of inst: Instruction, ignoring ignoreInst: Instruction) {\n if let prev = inst.previous {\n if prev != ignoreInst {\n pushIfNotVisited(prev)\n }\n } else {\n for predBlock in inst.parentBlock.predecessors {\n let termInst = predBlock.terminator\n if termInst != ignoreInst {\n pushIfNotVisited(termInst)\n }\n }\n }\n }\n}\n\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_DataStructures_Worklist.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_DataStructures_Worklist.swift
Swift
3,120
0.95
0.134021
0.26506
awesome-app
665
2024-05-18T23:13:24.406875
Apache-2.0
false
e23af3af2da3550c903a6da918e6ab97
//===--- AssumeSingleThreaded.swift - Assume single-threaded execution ----===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2022 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n//\n// Assume that user code is single-threaded.\n//\n// Convert all reference counting operations into non-atomic ones.\n//\n// To get rid of most atomic reference counting operations, the standard\n// library should be compiled in this mode as well .\n//\n// This pass affects only reference counting operations resulting from SIL\n// instructions. It wouldn't affect places in the runtime C++ code which\n// hard-code calls to retain/release. We could take advantage of the Instruments\n// instrumentation stubs to redirect calls from the runtime if it was\n// significant, or else just build a single-threaded variant of the runtime.\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\n\nlet assumeSingleThreadedPass = FunctionPass(name: "sil-assume-single-threaded") {\n (function: Function, context: FunctionPassContext) in\n\n for inst in function.instructions {\n guard let rcInst = inst as? RefCountingInst else { continue }\n\n rcInst.setAtomicity(isAtomic: false, context)\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_AssumeSingleThreaded.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_AssumeSingleThreaded.swift
Swift
1,542
0.95
0.157895
0.764706
python-kit
238
2024-06-29T00:27:17.459155
Apache-2.0
false
4c3a8ddd5242b038d1d61a5593473ae3
//===--- SimplificationPasses.swift ----------------------------------------==//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\n\n/// Removes redundant `debug_step` instructions.\n/// If a `debug_step` has the same debug location as a previous or succeeding instruction\n/// it is removed. It's just important that there is at least one instruction for a\n/// certain debug location so that single stepping on that location will work.\nlet cleanupDebugStepsPass = FunctionPass(name: "cleanup-debug-steps") {\n (function: Function, context: FunctionPassContext) in\n\n for block in function.blocks {\n cleanupDebugSteps(in: block, context)\n }\n}\n\nprivate func cleanupDebugSteps(in block: BasicBlock, _ context: FunctionPassContext) {\n var lastInstWithSameLocation: Instruction?\n\n for inst in block.instructions {\n if !inst.location.isDebugSteppable {\n if inst is DebugStepInst && !inst.location.isDebugSteppable {\n // First case: the instruction which is replaced by the debug_step didn't have a valid\n // location itself. Then we don't need the debug_step either.\n context.erase(instruction: inst)\n }\n continue\n }\n\n if let li = lastInstWithSameLocation,\n !inst.location.hasSameSourceLocation(as: li.location) {\n lastInstWithSameLocation = nil\n }\n\n // Only instructions which are really compiled down to some machine instructions can be\n // single stepped on.\n if !inst.producesMachineCode {\n continue\n }\n\n if let li = lastInstWithSameLocation {\n if inst is DebugStepInst {\n\n // Second case:\n // %li = some_instruction, loc "l"\n // debug_step, loc "l" // current inst -> erase\n context.erase(instruction: inst)\n continue\n } else if li is DebugStepInst {\n\n // Third case:\n // debug_step, loc "l" // li -> erase\n // %inst = some_instruction, loc "l" // current inst\n context.erase(instruction: li)\n }\n }\n lastInstWithSameLocation = inst\n }\n}\n\nprivate extension Instruction {\n var producesMachineCode: Bool {\n switch self {\n // We could include more instructions here.\n // In worst case a debug_step instruction remains in the code although it's not needed.\n // This is harmless.\n case is DebugStepInst, is ApplySite, is LoadInst, is StoreInst, is TermInst:\n return location.isDebugSteppable\n default:\n return false\n }\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_CleanupDebugSteps.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_CleanupDebugSteps.swift
Swift
2,857
0.95
0.180723
0.388889
vue-tools
734
2024-10-04T05:44:07.845664
Apache-2.0
false
c773fc85bc78c02808e49a3f13f865f1
//===--- ClosureSpecialization.swift ---------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===-----------------------------------------------------------------------===//\n\n/// This file contains the closure-specialization optimizations for general and differentiable Swift.\n\n/// General Closure Specialization\n/// ------------------------------------\n/// TODO: Add description when the functionality is added.\n\n/// AutoDiff Closure Specialization\n/// -------------------------------\n/// This optimization performs closure specialization tailored for the patterns seen in Swift Autodiff. In principle,\n/// the optimization does the same thing as the existing closure specialization pass. However, it is tailored to the\n/// patterns of Swift Autodiff.\n///\n/// The compiler performs reverse-mode differentiation on functions marked with `@differentiable(reverse)`. In doing so,\n/// it generates corresponding VJP and Pullback functions, which perform the forward and reverse pass respectively. You\n/// can think of VJPs as functions that "differentiate" an original function and Pullbacks as the calculated\n/// "derivative" of the original function. \n/// \n/// VJPs always return a tuple of 2 values -- the original result and the Pullback. Pullbacks are essentially a chain \n/// of closures, where the closure-contexts are implicitly used as the so-called "tape" during the reverse\n/// differentiation process. It is this chain of closures contained within the Pullbacks that this optimization aims\n/// to optimize via closure specialization.\n///\n/// The code patterns that this optimization targets, look similar to the one below:\n/// ``` swift\n/// \n/// // Since `foo` is marked with the `differentiable(reverse)` attribute the compiler\n/// // will generate corresponding VJP and Pullback functions in SIL. Let's assume that\n/// // these functions are called `vjp_foo` and `pb_foo` respectively.\n/// @differentiable(reverse) \n/// func foo(_ x: Float) -> Float { \n/// return sin(x)\n/// }\n///\n/// //============== Before closure specialization ==============// \n/// // VJP of `foo`. Returns the original result and the Pullback of `foo`.\n/// sil @vjp_foo: $(Float) -> (originalResult: Float, pullback: (Float) -> Float) { \n/// bb0(%0: $Float): \n/// // __Inlined__ `vjp_sin`: It is important for all intermediate VJPs to have \n/// // been inlined in `vjp_foo`, otherwise `vjp_foo` will not be able to determine\n/// // that `pb_foo` is closing over other closures and no specialization will happen.\n/// \ \n/// %originalResult = apply @sin(%0): $(Float) -> Float \__ Inlined `vjp_sin`\n/// %partially_applied_pb_sin = partial_apply pb_sin(%0): $(Float) -> Float /\n/// / \n///\n/// %pb_foo = function_ref @pb_foo: $@convention(thin) (Float, (Float) -> Float) -> Float\n/// %partially_applied_pb_foo = partial_apply %pb_foo(%partially_applied_pb_sin): $(Float, (Float) -> Float) -> Float\n/// \n/// return (%originalResult, %partially_applied_pb_foo)\n/// }\n///\n/// // Pullback of `foo`. \n/// //\n/// // It receives what are called as intermediate closures that represent\n/// // the calculations that the Pullback needs to perform to calculate a function's\n/// // derivative.\n/// //\n/// // The intermediate closures may themselves contain intermediate closures and\n/// // that is why the Pullback for a function differentiated at the "top" level\n/// // may end up being a "chain" of closures.\n/// sil @pb_foo: $(Float, (Float) -> Float) -> Float { \n/// bb0(%0: $Float, %pb_sin: $(Float) -> Float): \n/// %derivative_of_sin = apply %pb_sin(%0): $(Float) -> Float \n/// return %derivative_of_sin: Float\n/// }\n///\n/// //============== After closure specialization ==============// \n/// sil @vjp_foo: $(Float) -> (originalResult: Float, pullback: (Float) -> Float) { \n/// bb0(%0: $Float): \n/// %originalResult = apply @sin(%0): $(Float) -> Float \n/// \n/// // Before the optimization, pullback of `foo` used to take a closure for computing\n/// // pullback of `sin`. Now, the specialized pullback of `foo` takes the arguments that\n/// // pullback of `sin` used to close over and pullback of `sin` is instead copied over\n/// // inside pullback of `foo`.\n/// %specialized_pb_foo = function_ref @specialized_pb_foo: $@convention(thin) (Float, Float) -> Float\n/// %partially_applied_pb_foo = partial_apply %specialized_pb_foo(%0): $(Float, Float) -> Float \n/// \n/// return (%originalResult, %partially_applied_pb_foo)\n/// }\n/// \n/// sil @specialized_pb_foo: $(Float, Float) -> Float { \n/// bb0(%0: $Float, %1: $Float): \n/// %2 = partial_apply @pb_sin(%1): $(Float) -> Float \n/// %3 = apply %2(): $() -> Float \n/// return %3: $Float\n/// }\n/// ```\n\nimport AST\nimport SIL\nimport SILBridging\n\nprivate let verbose = false\n\nprivate func log(prefix: Bool = true, _ message: @autoclosure () -> String) {\n if verbose {\n debugLog(prefix: prefix, message())\n }\n}\n\n// =========== Entry point =========== //\nlet generalClosureSpecialization = FunctionPass(name: "experimental-swift-based-closure-specialization") {\n (function: Function, context: FunctionPassContext) in\n // TODO: Implement general closure specialization optimization\n print("NOT IMPLEMENTED")\n}\n\nlet autodiffClosureSpecialization = FunctionPass(name: "autodiff-closure-specialization") {\n (function: Function, context: FunctionPassContext) in\n\n guard !function.isDefinedExternally,\n function.isAutodiffVJP,\n function.blocks.singleElement != nil else {\n return\n }\n \n var remainingSpecializationRounds = 5\n\n repeat {\n // TODO: Names here are pretty misleading. We are looking for a place where\n // the pullback closure is created (so for `partial_apply` instruction).\n var callSites = gatherCallSites(in: function, context)\n guard !callSites.isEmpty else {\n return\n }\n\n for callSite in callSites {\n var (specializedFunction, alreadyExists) = getOrCreateSpecializedFunction(basedOn: callSite, context)\n\n if !alreadyExists {\n context.notifyNewFunction(function: specializedFunction, derivedFrom: callSite.applyCallee)\n }\n\n rewriteApplyInstruction(using: specializedFunction, callSite: callSite, context)\n }\n\n var deadClosures: InstructionWorklist = callSites.reduce(into: InstructionWorklist(context)) { deadClosures, callSite in\n callSite.closureArgDescriptors\n .map { $0.closure }\n .forEach { deadClosures.pushIfNotVisited($0) }\n }\n\n defer {\n deadClosures.deinitialize()\n }\n\n while let deadClosure = deadClosures.pop() {\n let isDeleted = context.tryDeleteDeadClosure(closure: deadClosure as! SingleValueInstruction)\n if isDeleted {\n context.notifyInvalidatedStackNesting()\n }\n }\n\n if context.needFixStackNesting {\n function.fixStackNesting(context)\n }\n\n remainingSpecializationRounds -= 1\n } while remainingSpecializationRounds > 0\n}\n\n// =========== Top-level functions ========== //\n\nprivate let specializationLevelLimit = 2\n\nprivate func gatherCallSites(in caller: Function, _ context: FunctionPassContext) -> [CallSite] {\n /// __Root__ closures created via `partial_apply` or `thin_to_thick_function` may be converted and reabstracted\n /// before finally being used at an apply site. We do not want to handle these intermediate closures separately\n /// as they are handled and cloned into the specialized function as part of the root closures. Therefore, we keep \n /// track of these intermediate closures in a set. \n /// \n /// This set is populated via the `markConvertedAndReabstractedClosuresAsUsed` function which is called when we're\n /// handling the different uses of our root closures.\n ///\n /// Below SIL example illustrates the above point.\n /// ``` \n /// // The below set of a "root" closure and its reabstractions/conversions\n /// // will be handled as a unit and the entire set will be copied over\n /// // in the specialized version of `takesClosure` if we determine that we \n /// // can specialize `takesClosure` against its closure argument.\n /// __ \n /// %someFunction = function_ref @someFunction: $@convention(thin) (Int, Int) -> Int \ \n /// %rootClosure = partial_apply [callee_guaranteed] %someFunction (%someInt): $(Int, Int) -> Int \\n /// %thunk = function_ref @reabstractionThunk : $@convention(thin) (@callee_guaranteed (Int) -> Int) -> @out Int / \n /// %reabstractedClosure = partial_apply [callee_guaranteed] %thunk(%rootClosure) : / \n /// $@convention(thin) (@callee_guaranteed (Int) -> Int) -> @out Int __/ \n /// \n /// %takesClosure = function_ref @takesClosure : $@convention(thin) (@owned @callee_guaranteed (Int) -> @out Int) -> Int\n /// %result = partial_apply %takesClosure(%reabstractedClosure) : $@convention(thin) (@owned @callee_guaranteed () -> @out Int) -> Int\n /// ret %result\n /// ```\n var convertedAndReabstractedClosures = InstructionSet(context)\n\n defer {\n convertedAndReabstractedClosures.deinitialize()\n }\n\n var callSiteMap = CallSiteMap()\n\n for inst in caller.instructions {\n if !convertedAndReabstractedClosures.contains(inst),\n let rootClosure = inst.asSupportedClosure\n {\n updateCallSites(for: rootClosure, in: &callSiteMap, \n convertedAndReabstractedClosures: &convertedAndReabstractedClosures, context)\n }\n }\n\n return callSiteMap.callSites\n}\n\nprivate func getOrCreateSpecializedFunction(basedOn callSite: CallSite, _ context: FunctionPassContext)\n -> (function: Function, alreadyExists: Bool)\n{\n let specializedFunctionName = callSite.specializedCalleeName(context)\n if let specializedFunction = context.lookupFunction(name: specializedFunctionName) {\n return (specializedFunction, true)\n }\n\n let applySiteCallee = callSite.applyCallee\n let specializedParameters = applySiteCallee.convention.getSpecializedParameters(basedOn: callSite)\n\n let specializedFunction = \n context.createFunctionForClosureSpecialization(from: applySiteCallee, withName: specializedFunctionName, \n withParams: specializedParameters, \n withSerialization: applySiteCallee.isSerialized)\n\n context.buildSpecializedFunction(specializedFunction: specializedFunction,\n buildFn: { (emptySpecializedFunction, functionPassContext) in \n let closureSpecCloner = SpecializationCloner(emptySpecializedFunction: emptySpecializedFunction, functionPassContext)\n closureSpecCloner.cloneAndSpecializeFunctionBody(using: callSite)\n })\n\n return (specializedFunction, false)\n}\n\nprivate func rewriteApplyInstruction(using specializedCallee: Function, callSite: CallSite, \n _ context: FunctionPassContext) {\n let newApplyArgs = callSite.getArgumentsForSpecializedApply(of: specializedCallee)\n\n for newApplyArg in newApplyArgs {\n if case let .PreviouslyCaptured(capturedArg, needsRetain, parentClosureArgIndex) = newApplyArg,\n needsRetain \n {\n let closureArgDesc = callSite.closureArgDesc(at: parentClosureArgIndex)!\n var builder = Builder(before: closureArgDesc.closure, context)\n\n // TODO: Support only OSSA instructions once the OSSA elimination pass is moved after all function optimization \n // passes.\n if callSite.applySite.parentBlock != closureArgDesc.closure.parentBlock {\n // Emit the retain and release that keeps the argument live across the callee using the closure.\n builder.createRetainValue(operand: capturedArg)\n\n for instr in closureArgDesc.lifetimeFrontier {\n builder = Builder(before: instr, context)\n builder.createReleaseValue(operand: capturedArg)\n }\n\n // Emit the retain that matches the captured argument by the partial_apply in the callee that is consumed by\n // the partial_apply.\n builder = Builder(before: callSite.applySite, context)\n builder.createRetainValue(operand: capturedArg)\n } else {\n builder.createRetainValue(operand: capturedArg)\n }\n }\n }\n\n // Rewrite apply instruction\n var builder = Builder(before: callSite.applySite, context)\n let oldApply = callSite.applySite as! PartialApplyInst\n let funcRef = builder.createFunctionRef(specializedCallee)\n let capturedArgs = Array(newApplyArgs.map { $0.value })\n\n let newApply = builder.createPartialApply(function: funcRef, substitutionMap: SubstitutionMap(), \n capturedArguments: capturedArgs, calleeConvention: oldApply.calleeConvention,\n hasUnknownResultIsolation: oldApply.hasUnknownResultIsolation,\n isOnStack: oldApply.isOnStack)\n\n builder = Builder(before: callSite.applySite.next!, context)\n // TODO: Support only OSSA instructions once the OSSA elimination pass is moved after all function optimization \n // passes.\n for closureArgDesc in callSite.closureArgDescriptors {\n if closureArgDesc.isClosureConsumed,\n !closureArgDesc.isPartialApplyOnStack,\n !closureArgDesc.parameterInfo.isTrivialNoescapeClosure\n {\n builder.createReleaseValue(operand: closureArgDesc.closure)\n }\n }\n\n oldApply.replace(with: newApply, context)\n}\n\n// ===================== Utility functions and extensions ===================== //\n\nprivate func updateCallSites(for rootClosure: SingleValueInstruction, in callSiteMap: inout CallSiteMap, \n convertedAndReabstractedClosures: inout InstructionSet, _ context: FunctionPassContext) {\n var rootClosurePossibleLiveRange = InstructionRange(begin: rootClosure, context)\n defer {\n rootClosurePossibleLiveRange.deinitialize()\n }\n\n var rootClosureApplies = OperandWorklist(context) \n defer {\n rootClosureApplies.deinitialize()\n }\n\n // A "root" closure undergoing conversions and/or reabstractions has additional restrictions placed upon it, in order\n // for a call site to be specialized against it. We handle conversion/reabstraction uses before we handle apply uses\n // to gather the parameters required to evaluate these restrictions or to skip call site uses of "unsupported" \n // closures altogether.\n //\n // There are currently 2 restrictions that are evaluated prior to specializing a callsite against a converted and/or \n // reabstracted closure -\n // 1. A reabstracted root closure can only be specialized against, if the reabstracted closure is ultimately passed\n // trivially (as a noescape+thick function) into the call site.\n //\n // 2. A root closure may be a partial_apply [stack], in which case we need to make sure that all mark_dependence \n // bases for it will be available in the specialized callee in case the call site is specialized against this root\n // closure.\n\n let (foundUnexpectedUse, haveUsedReabstraction) = \n handleNonApplies(for: rootClosure, rootClosureApplies: &rootClosureApplies,\n rootClosurePossibleLiveRange: &rootClosurePossibleLiveRange, context);\n\n\n if foundUnexpectedUse {\n return\n }\n\n let intermediateClosureArgDescriptorData = \n handleApplies(for: rootClosure, callSiteMap: &callSiteMap, rootClosureApplies: &rootClosureApplies, \n rootClosurePossibleLiveRange: &rootClosurePossibleLiveRange, \n convertedAndReabstractedClosures: &convertedAndReabstractedClosures,\n haveUsedReabstraction: haveUsedReabstraction, context)\n\n finalizeCallSites(for: rootClosure, in: &callSiteMap, \n rootClosurePossibleLiveRange: rootClosurePossibleLiveRange,\n intermediateClosureArgDescriptorData: intermediateClosureArgDescriptorData, context)\n}\n\n/// Handles all non-apply direct and transitive uses of `rootClosure`.\n///\n/// Returns: \n/// haveUsedReabstraction - whether the root closure is reabstracted via a thunk \n/// foundUnexpectedUse - whether the root closure is directly or transitively used in an instruction that we don't know\n/// how to handle. If true, then `rootClosure` should not be specialized against.\nprivate func handleNonApplies(for rootClosure: SingleValueInstruction, \n rootClosureApplies: inout OperandWorklist,\n rootClosurePossibleLiveRange: inout InstructionRange, \n _ context: FunctionPassContext) \n -> (foundUnexpectedUse: Bool, haveUsedReabstraction: Bool)\n{\n var foundUnexpectedUse = false\n var haveUsedReabstraction = false\n\n /// The root closure or an intermediate closure created by reabstracting the root closure may be a `partial_apply\n /// [stack]` and we need to make sure that all `mark_dependence` bases for this `onStack` closure will be available in\n /// the specialized callee, in case the call site is specialized against this root closure.\n ///\n /// `possibleMarkDependenceBases` keeps track of all potential values that may be used as bases for creating\n /// `mark_dependence`s for our `onStack` root/reabstracted closures. For root closures these values are non-trivial\n /// closure captures (which are always available as function arguments in the specialized callee). For reabstracted\n /// closures these values may be the root closure or its conversions (below is a short SIL example representing this\n /// case).\n /// ```\n /// %someFunction = function_ref @someFunction : $@convention(thin) (Int) -> Int\n /// %rootClosure = partial_apply [callee_guaranteed] %someFunction(%someInt) : $@convention(thin) (Int) -> Int\n /// %noescapeRootClosure = convert_escape_to_noescape %rootClosure : $@callee_guaranteed () -> Int to $@noescape @callee_guaranteed () -> Int\n /// %thunk = function_ref @reabstractionThunk : $@convention(thin) (@noescape @callee_guaranteed () -> Int) -> @out Int\n /// %thunkedRootClosure = partial_apply [callee_guaranteed] [on_stack] %thunk(%noescapeRootClosure) : $@convention(thin) (@noescape @callee_guaranteed () -> Int) -> @out Int\n /// %dependency = mark_dependence %thunkedRootClosure : $@noescape @callee_guaranteed () -> @out Int on %noescapeClosure : $@noescape @callee_guaranteed () -> Int\n /// %takesClosure = function_ref @takesClosure : $@convention(thin) (@owned @noescape @callee_guaranteed () -> @out Int)\n /// %ret = apply %takesClosure(%dependency) : $@convention(thin) (@owned @noescape @callee_guaranteed () -> @out Int)\n /// ```\n ///\n /// Any value outside of the aforementioned values is not going to be available in the specialized callee and a\n /// `mark_dependence` of the root closure on such a value means that we cannot specialize the call site against it.\n var possibleMarkDependenceBases = ValueSet(context)\n defer {\n possibleMarkDependenceBases.deinitialize()\n }\n\n var rootClosureConversionsAndReabstractions = OperandWorklist(context) \n rootClosureConversionsAndReabstractions.pushIfNotVisited(contentsOf: rootClosure.uses)\n defer {\n rootClosureConversionsAndReabstractions.deinitialize()\n }\n\n if let pai = rootClosure as? PartialApplyInst {\n for arg in pai.arguments {\n possibleMarkDependenceBases.insert(arg)\n }\n }\n \n while let use = rootClosureConversionsAndReabstractions.pop() {\n switch use.instruction {\n case let cfi as ConvertFunctionInst:\n rootClosureConversionsAndReabstractions.pushIfNotVisited(contentsOf: cfi.uses)\n possibleMarkDependenceBases.insert(cfi)\n rootClosurePossibleLiveRange.insert(use.instruction)\n\n case let cvt as ConvertEscapeToNoEscapeInst:\n rootClosureConversionsAndReabstractions.pushIfNotVisited(contentsOf: cvt.uses)\n possibleMarkDependenceBases.insert(cvt)\n rootClosurePossibleLiveRange.insert(use.instruction)\n\n case let pai as PartialApplyInst:\n if !pai.isPullbackInResultOfAutodiffVJP,\n pai.isSupportedClosure,\n pai.isPartialApplyOfThunk,\n // Argument must be a closure\n pai.arguments[0].type.isThickFunction \n {\n rootClosureConversionsAndReabstractions.pushIfNotVisited(contentsOf: pai.uses)\n possibleMarkDependenceBases.insert(pai)\n rootClosurePossibleLiveRange.insert(use.instruction)\n haveUsedReabstraction = true\n } else if pai.isPullbackInResultOfAutodiffVJP {\n rootClosureApplies.pushIfNotVisited(use)\n }\n\n case let mv as MoveValueInst:\n rootClosureConversionsAndReabstractions.pushIfNotVisited(contentsOf: mv.uses)\n possibleMarkDependenceBases.insert(mv)\n rootClosurePossibleLiveRange.insert(use.instruction)\n\n case let mdi as MarkDependenceInst:\n if possibleMarkDependenceBases.contains(mdi.base), \n mdi.value == use.value,\n mdi.value.type.isNoEscapeFunction,\n mdi.value.type.isThickFunction\n {\n rootClosureConversionsAndReabstractions.pushIfNotVisited(contentsOf: mdi.uses)\n rootClosurePossibleLiveRange.insert(use.instruction)\n }\n \n case is CopyValueInst,\n is DestroyValueInst,\n is RetainValueInst,\n is ReleaseValueInst,\n is StrongRetainInst,\n is StrongReleaseInst:\n rootClosurePossibleLiveRange.insert(use.instruction)\n\n case let ti as TupleInst:\n if ti.parentFunction.isAutodiffVJP,\n let returnInst = ti.parentFunction.returnInstruction,\n ti == returnInst.returnedValue\n {\n // This is the pullback closure returned from an Autodiff VJP and we don't need to handle it.\n } else {\n fallthrough\n }\n\n default:\n foundUnexpectedUse = true\n log("Found unexpected direct or transitive user of root closure: \(use.instruction)")\n return (foundUnexpectedUse, haveUsedReabstraction) \n }\n }\n\n return (foundUnexpectedUse, haveUsedReabstraction)\n}\n\nprivate typealias IntermediateClosureArgDescriptorDatum = (applySite: SingleValueInstruction, closureArgIndex: Int, paramInfo: ParameterInfo)\n\nprivate func handleApplies(for rootClosure: SingleValueInstruction, callSiteMap: inout CallSiteMap, \n rootClosureApplies: inout OperandWorklist, \n rootClosurePossibleLiveRange: inout InstructionRange, \n convertedAndReabstractedClosures: inout InstructionSet, haveUsedReabstraction: Bool, \n _ context: FunctionPassContext) -> [IntermediateClosureArgDescriptorDatum] \n{\n var intermediateClosureArgDescriptorData: [IntermediateClosureArgDescriptorDatum] = []\n \n while let use = rootClosureApplies.pop() {\n rootClosurePossibleLiveRange.insert(use.instruction)\n\n // TODO [extend to general swift]: Handle full apply sites\n guard let pai = use.instruction as? PartialApplyInst else {\n continue\n }\n\n // TODO: Handling generic closures may be possible but is not yet implemented\n if pai.hasSubstitutions || !pai.calleeIsDynamicFunctionRef || !pai.isPullbackInResultOfAutodiffVJP {\n continue\n }\n\n guard let callee = pai.referencedFunction else {\n continue\n }\n\n if callee.isDefinedExternally {\n continue\n }\n\n // Don't specialize non-fragile (read as non-serialized) callees if the caller is fragile; the specialized callee\n // will have shared linkage, and thus cannot be referenced from the fragile caller.\n let caller = rootClosure.parentFunction\n if caller.isSerialized && !callee.isSerialized {\n continue\n }\n\n // If the callee uses a dynamic Self, we cannot specialize it, since the resulting specialization might no longer\n // have 'self' as the last parameter.\n //\n // TODO: We could fix this by inserting new arguments more carefully, or changing how we model dynamic Self\n // altogether.\n if callee.mayBindDynamicSelf {\n continue\n }\n\n // Proceed if the closure is passed as an argument (and not called). If it is called we have nothing to do.\n //\n // `closureArgumentIndex` is the index of the closure in the callee's argument list.\n guard let closureArgumentIndex = pai.calleeArgumentIndex(of: use) else {\n continue\n }\n\n // Ok, we know that we can perform the optimization but not whether or not the optimization is profitable. Check if\n // the closure is actually called in the callee (or in a function called by the callee).\n if !isClosureApplied(in: callee, closureArgIndex: closureArgumentIndex) {\n continue\n }\n\n let onlyHaveThinToThickClosure = rootClosure is ThinToThickFunctionInst && !haveUsedReabstraction\n\n guard let closureParamInfo = pai.operandConventions[parameter: use.index] else {\n fatalError("While handling apply uses, parameter info not found for operand: \(use)!")\n }\n\n // If we are going to need to release the copied over closure, we must make sure that we understand all the exit\n // blocks, i.e., they terminate with an instruction that clearly indicates whether to release the copied over \n // closure or leak it.\n if closureParamInfo.convention.isGuaranteed,\n !onlyHaveThinToThickClosure,\n !callee.blocks.allSatisfy({ $0.isReachableExitBlock || $0.terminator is UnreachableInst })\n {\n continue\n }\n\n // Functions with a readnone, readonly or releasenone effect and a nontrivial context cannot be specialized.\n // Inserting a release in such a function results in miscompilation after other optimizations. For now, the\n // specialization is disabled.\n //\n // TODO: A @noescape closure should never be converted to an @owned argument regardless of the function's effect\n // attribute.\n if !callee.effectAllowsSpecialization && !onlyHaveThinToThickClosure {\n continue\n }\n\n // Avoid an infinite specialization loop caused by repeated runs of ClosureSpecializer and CapturePropagation.\n // CapturePropagation propagates constant function-literals. Such function specializations can then be optimized\n // again by the ClosureSpecializer and so on. This happens if a closure argument is called _and_ referenced in\n // another closure, which is passed to a recursive call. E.g.\n //\n // func foo(_ c: @escaping () -> ()) { \n // c() foo({ c() })\n // }\n //\n // A limit of 2 is good enough and will not be exceed in "regular" optimization scenarios.\n let closureCallee = rootClosure is PartialApplyInst \n ? (rootClosure as! PartialApplyInst).referencedFunction!\n : (rootClosure as! ThinToThickFunctionInst).referencedFunction!\n\n if closureCallee.specializationLevel > specializationLevelLimit {\n continue\n }\n\n if haveUsedReabstraction {\n markConvertedAndReabstractedClosuresAsUsed(rootClosure: rootClosure, convertedAndReabstractedClosure: use.value, \n convertedAndReabstractedClosures: &convertedAndReabstractedClosures)\n }\n \n if callSiteMap[pai] == nil {\n callSiteMap.insert(key: pai, value: CallSite(applySite: pai))\n }\n\n intermediateClosureArgDescriptorData\n .append((applySite: pai, closureArgIndex: closureArgumentIndex, paramInfo: closureParamInfo))\n }\n\n return intermediateClosureArgDescriptorData\n}\n\n/// Finalizes the call sites for a given root closure by adding a corresponding `ClosureArgDescriptor`\n/// to all call sites where the closure is ultimately passed as an argument.\nprivate func finalizeCallSites(for rootClosure: SingleValueInstruction, in callSiteMap: inout CallSiteMap, \n rootClosurePossibleLiveRange: InstructionRange, \n intermediateClosureArgDescriptorData: [IntermediateClosureArgDescriptorDatum], \n _ context: FunctionPassContext) \n{\n let closureInfo = ClosureInfo(closure: rootClosure, lifetimeFrontier: Array(rootClosurePossibleLiveRange.ends))\n\n for (applySite, closureArgumentIndex, parameterInfo) in intermediateClosureArgDescriptorData {\n guard var callSite = callSiteMap[applySite] else {\n fatalError("While finalizing call sites, call site descriptor not found for call site: \(applySite)!")\n }\n let closureArgDesc = ClosureArgDescriptor(closureInfo: closureInfo, closureArgumentIndex: closureArgumentIndex, \n parameterInfo: parameterInfo)\n callSite.appendClosureArgDescriptor(closureArgDesc)\n callSiteMap.update(key: applySite, value: callSite)\n }\n}\n\nprivate func isClosureApplied(in callee: Function, closureArgIndex index: Int) -> Bool {\n func inner(_ callee: Function, _ index: Int, _ handledFuncs: inout Set<Function>) -> Bool {\n let closureArg = callee.argument(at: index)\n\n for use in closureArg.uses {\n if let fai = use.instruction as? ApplySite {\n if fai.callee == closureArg {\n return true\n }\n\n if let faiCallee = fai.referencedFunction,\n !faiCallee.blocks.isEmpty,\n handledFuncs.insert(faiCallee).inserted,\n handledFuncs.count <= recursionBudget\n {\n if inner(faiCallee, fai.calleeArgumentIndex(of: use)!, &handledFuncs) {\n return true\n }\n }\n }\n }\n\n return false\n }\n\n // Limit the number of recursive calls to not go into exponential behavior in corner cases.\n let recursionBudget = 8\n var handledFuncs: Set<Function> = []\n return inner(callee, index, &handledFuncs)\n}\n\n/// Marks any converted/reabstracted closures, corresponding to a given root closure as used. We do not want to \n/// look at such closures separately as during function specialization they will be handled as part of the root closure. \nprivate func markConvertedAndReabstractedClosuresAsUsed(rootClosure: Value, convertedAndReabstractedClosure: Value, \n convertedAndReabstractedClosures: inout InstructionSet) \n{\n if convertedAndReabstractedClosure != rootClosure {\n switch convertedAndReabstractedClosure {\n case let pai as PartialApplyInst:\n convertedAndReabstractedClosures.insert(pai)\n return \n markConvertedAndReabstractedClosuresAsUsed(rootClosure: rootClosure, \n convertedAndReabstractedClosure: pai.arguments[0], \n convertedAndReabstractedClosures: &convertedAndReabstractedClosures)\n case let cvt as ConvertFunctionInst:\n convertedAndReabstractedClosures.insert(cvt)\n return \n markConvertedAndReabstractedClosuresAsUsed(rootClosure: rootClosure, \n convertedAndReabstractedClosure: cvt.fromFunction,\n convertedAndReabstractedClosures: &convertedAndReabstractedClosures)\n case let cvt as ConvertEscapeToNoEscapeInst:\n convertedAndReabstractedClosures.insert(cvt)\n return \n markConvertedAndReabstractedClosuresAsUsed(rootClosure: rootClosure, \n convertedAndReabstractedClosure: cvt.fromFunction,\n convertedAndReabstractedClosures: &convertedAndReabstractedClosures)\n case let mdi as MarkDependenceInst:\n convertedAndReabstractedClosures.insert(mdi)\n return \n markConvertedAndReabstractedClosuresAsUsed(rootClosure: rootClosure, convertedAndReabstractedClosure: mdi.value,\n convertedAndReabstractedClosures: &convertedAndReabstractedClosures)\n default:\n log("Parent function of callSite: \(rootClosure.parentFunction)")\n log("Root closure: \(rootClosure)")\n log("Converted/reabstracted closure: \(convertedAndReabstractedClosure)")\n fatalError("While marking converted/reabstracted closures as used, found unexpected instruction: \(convertedAndReabstractedClosure)")\n }\n }\n}\n\nprivate extension SpecializationCloner {\n func cloneAndSpecializeFunctionBody(using callSite: CallSite) {\n self.cloneEntryBlockArgsWithoutOrigClosures(usingOrigCalleeAt: callSite)\n\n let (allSpecializedEntryBlockArgs, closureArgIndexToAllClonedReleasableClosures) = cloneAllClosures(at: callSite)\n\n self.cloneFunctionBody(from: callSite.applyCallee, entryBlockArguments: allSpecializedEntryBlockArgs)\n\n self.insertCleanupCodeForClonedReleasableClosures(\n from: callSite, closureArgIndexToAllClonedReleasableClosures: closureArgIndexToAllClonedReleasableClosures)\n }\n\n private func cloneEntryBlockArgsWithoutOrigClosures(usingOrigCalleeAt callSite: CallSite) {\n let originalEntryBlock = callSite.applyCallee.entryBlock\n let clonedFunction = self.cloned\n let clonedEntryBlock = self.entryBlock\n\n originalEntryBlock.arguments\n .enumerated()\n .filter { index, _ in !callSite.hasClosureArg(at: index) }\n .forEach { _, arg in\n let clonedEntryBlockArgType = arg.type.getLoweredType(in: clonedFunction)\n let clonedEntryBlockArg = clonedEntryBlock.addFunctionArgument(type: clonedEntryBlockArgType, self.context)\n clonedEntryBlockArg.copyFlags(from: arg as! FunctionArgument)\n }\n }\n\n /// Clones all closures, originally passed to the callee at the given callSite, into the specialized function.\n ///\n /// Returns the following -\n /// - allSpecializedEntryBlockArgs: Complete list of entry block arguments for the specialized function. This includes\n /// the original arguments to the function (minus the closure arguments) and the arguments representing the values\n /// originally captured by the skipped closure arguments.\n ///\n /// - closureArgIndexToAllClonedReleasableClosures: Mapping from a closure's argument index at `callSite` to the list\n /// of corresponding releasable closures cloned into the specialized function. We have a "list" because we clone\n /// "closure chains", which consist of a "root" closure and its conversions/reabstractions. This map is used to\n /// generate cleanup code for the cloned closures in the specialized function.\n private func cloneAllClosures(at callSite: CallSite) \n -> (allSpecializedEntryBlockArgs: [Value], \n closureArgIndexToAllClonedReleasableClosures: [Int: [SingleValueInstruction]]) \n {\n func entryBlockArgsWithOrigClosuresSkipped() -> [Value?] {\n var clonedNonClosureEntryBlockArgs = self.entryBlock.arguments.makeIterator()\n\n return callSite.applyCallee\n .entryBlock\n .arguments\n .enumerated()\n .reduce(into: []) { result, origArgTuple in\n let (index, _) = origArgTuple\n if !callSite.hasClosureArg(at: index) {\n result.append(clonedNonClosureEntryBlockArgs.next())\n } else {\n result.append(Optional.none)\n }\n }\n }\n\n var entryBlockArgs: [Value?] = entryBlockArgsWithOrigClosuresSkipped()\n var closureArgIndexToAllClonedReleasableClosures: [Int: [SingleValueInstruction]] = [:]\n\n for closureArgDesc in callSite.closureArgDescriptors {\n let (finalClonedReabstractedClosure, allClonedReleasableClosures) =\n self.cloneClosureChain(representedBy: closureArgDesc, at: callSite)\n\n entryBlockArgs[closureArgDesc.closureArgIndex] = finalClonedReabstractedClosure\n closureArgIndexToAllClonedReleasableClosures[closureArgDesc.closureArgIndex] = allClonedReleasableClosures\n }\n\n return (entryBlockArgs.map { $0! }, closureArgIndexToAllClonedReleasableClosures)\n }\n\n private func cloneClosureChain(representedBy closureArgDesc: ClosureArgDescriptor, at callSite: CallSite) \n -> (finalClonedReabstractedClosure: SingleValueInstruction, allClonedReleasableClosures: [SingleValueInstruction]) \n {\n let (origToClonedValueMap, capturedArgRange) = self.addEntryBlockArgs(forValuesCapturedBy: closureArgDesc)\n let clonedFunction = self.cloned\n let clonedEntryBlock = self.entryBlock\n let clonedClosureArgs = Array(clonedEntryBlock.arguments[capturedArgRange])\n\n let builder = clonedEntryBlock.instructions.isEmpty\n ? Builder(atStartOf: clonedFunction, self.context)\n : Builder(atEndOf: clonedEntryBlock, location: clonedEntryBlock.instructions.last!.location, self.context)\n\n let clonedRootClosure = builder.cloneRootClosure(representedBy: closureArgDesc, capturedArguments: clonedClosureArgs)\n\n let finalClonedReabstractedClosure =\n builder.cloneRootClosureReabstractions(rootClosure: closureArgDesc.closure, clonedRootClosure: clonedRootClosure,\n reabstractedClosure: callSite.appliedArgForClosure(at: closureArgDesc.closureArgIndex)!,\n origToClonedValueMap: origToClonedValueMap,\n self.context)\n\n let allClonedReleasableClosures = [ finalClonedReabstractedClosure ];\n return (finalClonedReabstractedClosure, allClonedReleasableClosures)\n }\n\n private func addEntryBlockArgs(forValuesCapturedBy closureArgDesc: ClosureArgDescriptor) \n -> (origToClonedValueMap: [HashableValue: Value], capturedArgRange: Range<Int>) \n {\n var origToClonedValueMap: [HashableValue: Value] = [:]\n let clonedFunction = self.cloned\n let clonedEntryBlock = self.entryBlock\n\n let capturedArgRangeStart = clonedEntryBlock.arguments.count\n \n for arg in closureArgDesc.arguments {\n let capturedArg = clonedEntryBlock.addFunctionArgument(type: arg.type.getLoweredType(in: clonedFunction), \n self.context)\n origToClonedValueMap[arg] = capturedArg\n }\n\n let capturedArgRangeEnd = clonedEntryBlock.arguments.count\n let capturedArgRange = capturedArgRangeStart == capturedArgRangeEnd \n ? 0..<0 \n : capturedArgRangeStart..<capturedArgRangeEnd\n\n return (origToClonedValueMap, capturedArgRange)\n }\n\n private func insertCleanupCodeForClonedReleasableClosures(from callSite: CallSite, \n closureArgIndexToAllClonedReleasableClosures: [Int: [SingleValueInstruction]])\n {\n for closureArgDesc in callSite.closureArgDescriptors {\n let allClonedReleasableClosures = closureArgIndexToAllClonedReleasableClosures[closureArgDesc.closureArgIndex]!\n\n // Insert a `destroy_value`, for all releasable closures, in all reachable exit BBs if the closure was passed as a\n // guaranteed parameter or its type was noescape+thick. This is b/c the closure was passed at +0 originally and we\n // need to balance the initial increment of the newly created closure(s).\n if closureArgDesc.isClosureGuaranteed || closureArgDesc.parameterInfo.isTrivialNoescapeClosure,\n !allClonedReleasableClosures.isEmpty\n {\n for exitBlock in callSite.reachableExitBBsInCallee {\n let clonedExitBlock = self.getClonedBlock(for: exitBlock)\n \n let terminator = clonedExitBlock.terminator is UnreachableInst\n ? clonedExitBlock.terminator.previous!\n : clonedExitBlock.terminator\n\n let builder = Builder(before: terminator, self.context)\n\n for closure in allClonedReleasableClosures {\n if let pai = closure as? PartialApplyInst {\n builder.destroyPartialApply(pai: pai, self.context) \n }\n }\n }\n }\n }\n\n if (self.context.needFixStackNesting) {\n self.cloned.fixStackNesting(self.context)\n }\n }\n}\n\nprivate extension [HashableValue: Value] {\n subscript(key: Value) -> Value? {\n get {\n self[key.hashable]\n }\n set {\n self[key.hashable] = newValue\n }\n }\n}\n\nprivate extension CallSite {\n enum NewApplyArg {\n case Original(Value)\n // TODO: This can be simplified in OSSA. We can just do a copy_value for everything - except for addresses???\n case PreviouslyCaptured(\n value: Value, needsRetain: Bool, parentClosureArgIndex: Int)\n\n var value: Value {\n switch self {\n case let .Original(originalArg):\n return originalArg\n case let .PreviouslyCaptured(capturedArg, _, _):\n return capturedArg\n }\n }\n }\n\n func getArgumentsForSpecializedApply(of specializedCallee: Function) -> [NewApplyArg]\n {\n var newApplyArgs: [NewApplyArg] = []\n\n // Original arguments\n for (applySiteIndex, arg) in self.applySite.arguments.enumerated() {\n let calleeArgIndex = self.applySite.unappliedArgumentCount + applySiteIndex\n if !self.hasClosureArg(at: calleeArgIndex) {\n newApplyArgs.append(.Original(arg))\n }\n }\n\n // Previously captured arguments\n for closureArgDesc in self.closureArgDescriptors {\n for (applySiteIndex, capturedArg) in closureArgDesc.arguments.enumerated() {\n let needsRetain = closureArgDesc.isCapturedArgNonTrivialObjectType(applySiteIndex: applySiteIndex, \n specializedCallee: specializedCallee)\n\n newApplyArgs.append(.PreviouslyCaptured(value: capturedArg, needsRetain: needsRetain, \n parentClosureArgIndex: closureArgDesc.closureArgIndex))\n }\n }\n\n return newApplyArgs\n }\n}\n\nprivate extension ClosureArgDescriptor {\n func isCapturedArgNonTrivialObjectType(applySiteIndex: Int, specializedCallee: Function) -> Bool {\n precondition(self.closure is PartialApplyInst, "ClosureArgDescriptor is not for a partial_apply closure!")\n\n let capturedArg = self.arguments[applySiteIndex]\n let pai = self.closure as! PartialApplyInst\n let capturedArgIndexInCallee = applySiteIndex + pai.unappliedArgumentCount\n let capturedArgConvention = self.callee.argumentConventions[capturedArgIndexInCallee]\n\n return !capturedArg.type.isTrivial(in: specializedCallee) && \n !capturedArgConvention.isAllowedIndirectConvForClosureSpec\n }\n}\n\nprivate extension Builder {\n func cloneRootClosure(representedBy closureArgDesc: ClosureArgDescriptor, capturedArguments: [Value]) \n -> SingleValueInstruction \n {\n let function = self.createFunctionRef(closureArgDesc.callee)\n\n if let pai = closureArgDesc.closure as? PartialApplyInst {\n return self.createPartialApply(function: function, substitutionMap: SubstitutionMap(), \n capturedArguments: capturedArguments, calleeConvention: pai.calleeConvention,\n hasUnknownResultIsolation: pai.hasUnknownResultIsolation, \n isOnStack: pai.isOnStack)\n } else {\n return self.createThinToThickFunction(thinFunction: function, resultType: closureArgDesc.closure.type)\n }\n }\n\n func cloneRootClosureReabstractions(rootClosure: Value, clonedRootClosure: Value, reabstractedClosure: Value,\n origToClonedValueMap: [HashableValue: Value], _ context: FunctionPassContext) \n -> SingleValueInstruction\n {\n func inner(_ rootClosure: Value, _ clonedRootClosure: Value, _ reabstractedClosure: Value, \n _ origToClonedValueMap: inout [HashableValue: Value]) -> Value {\n switch reabstractedClosure {\n case let reabstractedClosure where reabstractedClosure == rootClosure:\n origToClonedValueMap[reabstractedClosure] = clonedRootClosure\n return clonedRootClosure\n \n case let cvt as ConvertFunctionInst:\n let toBeReabstracted = inner(rootClosure, clonedRootClosure, cvt.fromFunction, \n &origToClonedValueMap)\n let reabstracted = self.createConvertFunction(originalFunction: toBeReabstracted, resultType: cvt.type, \n withoutActuallyEscaping: cvt.withoutActuallyEscaping)\n origToClonedValueMap[cvt] = reabstracted\n return reabstracted\n \n case let cvt as ConvertEscapeToNoEscapeInst:\n let toBeReabstracted = inner(rootClosure, clonedRootClosure, cvt.fromFunction, \n &origToClonedValueMap)\n let reabstracted = self.createConvertEscapeToNoEscape(originalFunction: toBeReabstracted, resultType: cvt.type,\n isLifetimeGuaranteed: true)\n origToClonedValueMap[cvt] = reabstracted\n return reabstracted\n\n case let pai as PartialApplyInst:\n let toBeReabstracted = inner(rootClosure, clonedRootClosure, pai.arguments[0], \n &origToClonedValueMap)\n \n guard let function = pai.referencedFunction else {\n log("Parent function of callSite: \(rootClosure.parentFunction)")\n log("Root closure: \(rootClosure)")\n log("Unsupported reabstraction closure: \(pai)")\n fatalError("Encountered unsupported reabstraction (via partial_apply) of root closure!")\n }\n\n let fri = self.createFunctionRef(function)\n let reabstracted = self.createPartialApply(function: fri, substitutionMap: SubstitutionMap(), \n capturedArguments: [toBeReabstracted], \n calleeConvention: pai.calleeConvention, \n hasUnknownResultIsolation: pai.hasUnknownResultIsolation, \n isOnStack: pai.isOnStack)\n origToClonedValueMap[pai] = reabstracted\n return reabstracted\n \n case let mdi as MarkDependenceInst:\n let toBeReabstracted = inner(rootClosure, clonedRootClosure, mdi.value, &origToClonedValueMap)\n let base = origToClonedValueMap[mdi.base]!\n let reabstracted = self.createMarkDependence(value: toBeReabstracted, base: base, kind: .Escaping)\n origToClonedValueMap[mdi] = reabstracted\n return reabstracted\n \n default:\n log("Parent function of callSite: \(rootClosure.parentFunction)")\n log("Root closure: \(rootClosure)")\n log("Converted/reabstracted closure: \(reabstractedClosure)")\n fatalError("Encountered unsupported reabstraction of root closure: \(reabstractedClosure)")\n }\n }\n\n var origToClonedValueMap = origToClonedValueMap\n let finalClonedReabstractedClosure = inner(rootClosure, clonedRootClosure, reabstractedClosure, \n &origToClonedValueMap)\n return (finalClonedReabstractedClosure as! SingleValueInstruction)\n }\n\n func destroyPartialApply(pai: PartialApplyInst, _ context: FunctionPassContext){\n // TODO: Support only OSSA instructions once the OSSA elimination pass is moved after all function optimization \n // passes.\n\n if pai.isOnStack {\n // for arg in pai.arguments {\n // self.createDestroyValue(operand: arg)\n // }\n // self.createDestroyValue(operand: pai)\n\n if pai.parentFunction.hasOwnership {\n // Under OSSA, the closure acts as an owned value whose lifetime is a borrow scope for the captures, so we need to\n // end the borrow scope before ending the lifetimes of the captures themselves.\n self.createDestroyValue(operand: pai)\n self.destroyCapturedArgs(for: pai)\n } else {\n self.destroyCapturedArgs(for: pai)\n self.createDeallocStack(pai)\n context.notifyInvalidatedStackNesting()\n }\n } else {\n if pai.parentFunction.hasOwnership {\n self.createDestroyValue(operand: pai)\n } else {\n self.createReleaseValue(operand: pai)\n }\n }\n }\n}\n\nprivate extension FunctionConvention {\n func getSpecializedParameters(basedOn callSite: CallSite) -> [ParameterInfo] {\n let applySiteCallee = callSite.applyCallee\n var specializedParamInfoList: [ParameterInfo] = []\n\n // Start by adding all original parameters except for the closure parameters.\n let firstParamIndex = applySiteCallee.argumentConventions.firstParameterIndex\n for (index, paramInfo) in applySiteCallee.convention.parameters.enumerated() {\n let argIndex = index + firstParamIndex\n if !callSite.hasClosureArg(at: argIndex) {\n specializedParamInfoList.append(paramInfo)\n }\n }\n\n // Now, append parameters captured by each of the original closure parameter.\n //\n // Captured parameters are always appended to the function signature. If the argument type of the captured \n // parameter in the callee is:\n // - direct and trivial, pass the new parameter as Direct_Unowned.\n // - direct and non-trivial, pass the new parameter as Direct_Owned.\n // - indirect, pass the new parameter using the same parameter convention as in\n // the original closure.\n for closureArgDesc in callSite.closureArgDescriptors {\n if let closure = closureArgDesc.closure as? PartialApplyInst {\n let closureCallee = closureArgDesc.callee\n let closureCalleeConvention = closureCallee.convention\n let unappliedArgumentCount = closure.unappliedArgumentCount - closureCalleeConvention.indirectSILResultCount\n\n let prevCapturedParameters =\n closureCalleeConvention\n .parameters[unappliedArgumentCount...]\n .enumerated()\n .map { index, paramInfo in\n let argIndexOfParam = closureCallee.argumentConventions.firstParameterIndex + unappliedArgumentCount + index\n let argType = closureCallee.argumentTypes[argIndexOfParam]\n return paramInfo.withSpecializedConvention(isArgTypeTrivial: argType.isTrivial(in: closureCallee))\n }\n\n specializedParamInfoList.append(contentsOf: prevCapturedParameters)\n }\n }\n\n return specializedParamInfoList\n }\n}\n\nprivate extension ParameterInfo {\n func withSpecializedConvention(isArgTypeTrivial: Bool) -> Self {\n let specializedParamConvention = self.convention.isAllowedIndirectConvForClosureSpec\n ? self.convention\n : isArgTypeTrivial ? ArgumentConvention.directUnowned : ArgumentConvention.directOwned\n\n return ParameterInfo(type: self.type, convention: specializedParamConvention, options: self.options, \n hasLoweredAddresses: self.hasLoweredAddresses)\n }\n\n var isTrivialNoescapeClosure: Bool {\n SILFunctionType_isTrivialNoescape(type.bridged)\n }\n}\n\nprivate extension ArgumentConvention {\n var isAllowedIndirectConvForClosureSpec: Bool {\n switch self {\n case .indirectInout, .indirectInoutAliasable:\n return true\n default:\n return false\n }\n }\n}\n\nprivate extension PartialApplyInst {\n /// True, if the closure obtained from this partial_apply is the\n /// pullback returned from an autodiff VJP\n var isPullbackInResultOfAutodiffVJP: Bool {\n if self.parentFunction.isAutodiffVJP,\n let use = self.uses.singleUse,\n let tupleInst = use.instruction as? TupleInst,\n let returnInst = self.parentFunction.returnInstruction,\n tupleInst == returnInst.returnedValue\n {\n return true\n }\n\n return false\n }\n\n var isPartialApplyOfThunk: Bool {\n if self.numArguments == 1, \n let fun = self.referencedFunction,\n fun.thunkKind == .reabstractionThunk || fun.thunkKind == .thunk,\n self.arguments[0].type.isLoweredFunction,\n self.arguments[0].type.isReferenceCounted(in: self.parentFunction) || self.callee.type.isThickFunction\n {\n return true\n }\n \n return false\n }\n\n var hasOnlyInoutIndirectArguments: Bool {\n self.argumentOperands\n .filter { !$0.value.type.isObject }\n .allSatisfy { self.convention(of: $0)!.isInout } \n }\n}\n\nprivate extension Instruction {\n var asSupportedClosure: SingleValueInstruction? {\n switch self {\n case let tttf as ThinToThickFunctionInst where tttf.callee is FunctionRefInst:\n return tttf\n // TODO: figure out what to do with non-inout indirect arguments\n // https://forums.swift.org/t/non-inout-indirect-types-not-supported-in-closure-specialization-optimization/70826\n case let pai as PartialApplyInst where pai.callee is FunctionRefInst && pai.hasOnlyInoutIndirectArguments:\n return pai\n default:\n return nil\n }\n }\n\n var isSupportedClosure: Bool {\n asSupportedClosure != nil\n }\n}\n\nprivate extension ApplySite {\n var calleeIsDynamicFunctionRef: Bool {\n return !(callee is DynamicFunctionRefInst || callee is PreviousDynamicFunctionRefInst)\n }\n}\n\nprivate extension Function {\n var effectAllowsSpecialization: Bool {\n switch self.effectAttribute {\n case .readNone, .readOnly, .releaseNone: return false\n default: return true\n }\n }\n}\n\n// ===================== Utility Types ===================== //\nprivate struct OrderedDict<Key: Hashable, Value> {\n private var valueIndexDict: [Key: Int] = [:]\n private var entryList: [(Key, Value)] = []\n\n subscript(key: Key) -> Value? {\n if let index = valueIndexDict[key] {\n return entryList[index].1\n }\n return nil\n }\n\n mutating func insert(key: Key, value: Value) {\n if valueIndexDict[key] == nil {\n valueIndexDict[key] = entryList.count\n entryList.append((key, value))\n }\n }\n\n mutating func update(key: Key, value: Value) {\n if let index = valueIndexDict[key] {\n entryList[index].1 = value\n }\n }\n\n var keys: LazyMapSequence<Array<(Key, Value)>, Key> {\n entryList.lazy.map { $0.0 }\n }\n\n var values: LazyMapSequence<Array<(Key, Value)>, Value> {\n entryList.lazy.map { $0.1 }\n }\n}\n\nprivate typealias CallSiteMap = OrderedDict<SingleValueInstruction, CallSite>\n\nprivate extension CallSiteMap {\n var callSites: [CallSite] {\n Array(self.values)\n }\n}\n\n/// Represents all the information required to represent a closure in isolation, i.e., outside of a callsite context\n/// where the closure may be getting passed as an argument.\n///\n/// Composed with other information inside a `ClosureArgDescriptor` to represent a closure as an argument at a callsite.\nprivate struct ClosureInfo {\n let closure: SingleValueInstruction\n let lifetimeFrontier: [Instruction]\n\n init(closure: SingleValueInstruction, lifetimeFrontier: [Instruction]) {\n self.closure = closure\n self.lifetimeFrontier = lifetimeFrontier\n }\n\n}\n\n/// Represents a closure as an argument at a callsite.\nprivate struct ClosureArgDescriptor {\n let closureInfo: ClosureInfo\n /// The index of the closure in the callsite's argument list.\n let closureArgumentIndex: Int\n let parameterInfo: ParameterInfo\n\n var closure: SingleValueInstruction {\n closureInfo.closure\n }\n var lifetimeFrontier: [Instruction] {\n closureInfo.lifetimeFrontier\n }\n\n var isPartialApplyOnStack: Bool {\n if let pai = closure as? PartialApplyInst {\n return pai.isOnStack\n }\n return false\n }\n\n var callee: Function {\n if let pai = closure as? PartialApplyInst {\n return pai.referencedFunction!\n } else {\n return (closure as! ThinToThickFunctionInst).referencedFunction!\n }\n }\n\n var location: Location {\n closure.location\n }\n\n var closureArgIndex: Int {\n closureArgumentIndex\n }\n\n var closureParamInfo: ParameterInfo {\n parameterInfo\n }\n\n var numArguments: Int {\n if let pai = closure as? PartialApplyInst {\n return pai.numArguments\n } else {\n return 0\n }\n }\n\n var arguments: LazyMapSequence<OperandArray, Value> {\n if let pai = closure as? PartialApplyInst {\n return pai.arguments\n }\n\n return OperandArray.empty.lazy.map { $0.value } as LazyMapSequence<OperandArray, Value>\n }\n\n var isClosureGuaranteed: Bool {\n closureParamInfo.convention.isGuaranteed\n }\n\n var isClosureConsumed: Bool {\n closureParamInfo.convention.isConsumed\n }\n}\n\n/// Represents a callsite containing one or more closure arguments.\nprivate struct CallSite {\n let applySite: ApplySite\n var closureArgDescriptors: [ClosureArgDescriptor] = []\n\n init(applySite: ApplySite) {\n self.applySite = applySite\n }\n\n mutating func appendClosureArgDescriptor(_ descriptor: ClosureArgDescriptor) {\n self.closureArgDescriptors.append(descriptor)\n }\n\n var applyCallee: Function {\n applySite.referencedFunction!\n }\n\n var reachableExitBBsInCallee: [BasicBlock] {\n applyCallee.blocks.filter { $0.isReachableExitBlock }\n }\n\n func hasClosureArg(at index: Int) -> Bool {\n closureArgDescriptors.contains { $0.closureArgumentIndex == index }\n }\n\n func closureArgDesc(at index: Int) -> ClosureArgDescriptor? {\n closureArgDescriptors.first { $0.closureArgumentIndex == index }\n }\n\n func appliedArgForClosure(at index: Int) -> Value? {\n if let closureArgDesc = closureArgDesc(at: index) {\n return applySite.arguments[closureArgDesc.closureArgIndex - applySite.unappliedArgumentCount]\n }\n\n return nil\n }\n\n func specializedCalleeName(_ context: FunctionPassContext) -> String {\n let closureArgs = Array(self.closureArgDescriptors.map { $0.closure })\n let closureIndices = Array(self.closureArgDescriptors.map { $0.closureArgIndex })\n\n return context.mangle(withClosureArguments: closureArgs, closureArgIndices: closureIndices, \n from: applyCallee)\n }\n}\n\n// ===================== Unit tests ===================== //\n\nlet gatherCallSitesTest = FunctionTest("closure_specialize_gather_call_sites") { function, arguments, context in\n print("Specializing closures in function: \(function.name)")\n print("===============================================")\n var callSites = gatherCallSites(in: function, context)\n\n callSites.forEach { callSite in\n print("PartialApply call site: \(callSite.applySite)")\n print("Passed in closures: ")\n for index in callSite.closureArgDescriptors.indices {\n var closureArgDescriptor = callSite.closureArgDescriptors[index]\n print("\(index+1). \(closureArgDescriptor.closureInfo.closure)")\n }\n }\n print("\n")\n}\n\nlet specializedFunctionSignatureAndBodyTest = FunctionTest(\n "closure_specialize_specialized_function_signature_and_body") { function, arguments, context in\n\n var callSites = gatherCallSites(in: function, context)\n\n for callSite in callSites {\n let (specializedFunction, _) = getOrCreateSpecializedFunction(basedOn: callSite, context)\n print("Generated specialized function: \(specializedFunction.name)")\n print("\(specializedFunction)\n")\n }\n}\n\nlet rewrittenCallerBodyTest = FunctionTest("closure_specialize_rewritten_caller_body") { function, arguments, context in\n var callSites = gatherCallSites(in: function, context)\n\n for callSite in callSites {\n let (specializedFunction, _) = getOrCreateSpecializedFunction(basedOn: callSite, context)\n rewriteApplyInstruction(using: specializedFunction, callSite: callSite, context)\n\n print("Rewritten caller body for: \(function.name):")\n print("\(function)\n")\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_ClosureSpecialization.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_ClosureSpecialization.swift
Swift
59,705
0.75
0.132609
0.224872
node-utils
207
2023-11-19T09:31:59.643701
BSD-3-Clause
false
12304130f990bf5659aacc8f9e3a259d
//===--- ComputeEscapeEffects.swift ----------------------------------------==//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2022 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\n\n/// Computes escape effects for function arguments.\n///\n/// For example, if an argument does not escape, adds a non-escaping effect,\n/// ```\n/// sil @foo : $@convention(thin) (@guaranteed X) -> () {\n/// [%0: noecape **]\n/// bb0(%0 : $X):\n/// %1 = tuple ()\n/// return %1 : $()\n/// }\n/// ```\n/// The pass does not try to change or re-compute _defined_ effects.\n///\nlet computeEscapeEffects = FunctionPass(name: "compute-escape-effects") {\n (function: Function, context: FunctionPassContext) in\n\n var newEffects = function.effects.escapeEffects.arguments.filter {!$0.isDerived }\n\n let returnInst = function.returnInstruction\n let argsWithDefinedEffects = getArgIndicesWithDefinedEscapingEffects(of: function)\n\n for arg in function.arguments {\n // We are not interested in arguments with trivial types.\n if arg.hasTrivialNonPointerType { continue }\n \n // Also, we don't want to override defined effects.\n if argsWithDefinedEffects.contains(arg.index) { continue }\n\n struct IgnoreRecursiveCallVisitor : EscapeVisitor {\n func visitUse(operand: Operand, path: EscapePath) -> UseResult {\n return isOperandOfRecursiveCall(operand) ? .ignore : .continueWalk\n }\n }\n\n // First check: is the argument (or a projected value of it) escaping at all?\n if !arg.at(.anything).isEscaping(using: IgnoreRecursiveCallVisitor(),\n initialWalkingDirection: .down,\n context)\n {\n let effect = EscapeEffects.ArgumentEffect(.notEscaping, argumentIndex: arg.index,\n pathPattern: SmallProjectionPath(.anything))\n newEffects.append(effect)\n continue\n }\n \n // Now compute effects for two important cases:\n // * the argument itself + any value projections, and...\n if addArgEffects(arg, argPath: SmallProjectionPath(), to: &newEffects, returnInst, context) {\n // * single class indirections\n _ = addArgEffects(arg, argPath: SmallProjectionPath(.anyValueFields).push(.anyClassField),\n to: &newEffects, returnInst, context)\n }\n }\n\n // Don't modify the effects if they didn't change. This avoids sending a change notification\n // which can trigger unnecessary other invalidations.\n if newEffects == function.effects.escapeEffects.arguments {\n return\n }\n\n context.modifyEffects(in: function) { (effects: inout FunctionEffects) in\n effects.escapeEffects.arguments = newEffects\n }\n}\n\n/// Returns true if an argument effect was added.\nprivate\nfunc addArgEffects(_ arg: FunctionArgument, argPath ap: SmallProjectionPath,\n to newEffects: inout [EscapeEffects.ArgumentEffect],\n _ returnInst: ReturnInst?, _ context: FunctionPassContext) -> Bool {\n // Correct the path if the argument is not a class reference itself, but a value type\n // containing one or more references.\n let argPath = arg.type.isClass ? ap : ap.push(.anyValueFields)\n \n guard let result = arg.at(argPath).visit(using: ArgEffectsVisitor(), initialWalkingDirection: .down, context) else {\n return false\n }\n \n // If the function never returns, the argument can not escape to another arg/return.\n guard let returnInst = arg.parentFunction.returnInstruction else {\n return false\n }\n\n let effect: EscapeEffects.ArgumentEffect\n switch result {\n case .notSet:\n effect = EscapeEffects.ArgumentEffect(.notEscaping, argumentIndex: arg.index, pathPattern: argPath)\n\n case .toReturn(let toPath):\n let visitor = IsExclusiveReturnEscapeVisitor(argument: arg, argumentPath: argPath, returnPath: toPath)\n let exclusive = visitor.isExclusiveEscape(returnInst: returnInst, context)\n effect = EscapeEffects.ArgumentEffect(.escapingToReturn(toPath: toPath, isExclusive: exclusive),\n argumentIndex: arg.index, pathPattern: argPath)\n\n case .toArgument(let toArgIdx, let toPath):\n // Exclusive argument -> argument effects cannot appear because such an effect would\n // involve a store which is not permitted for exclusive escapes.\n effect = EscapeEffects.ArgumentEffect(.escapingToArgument(toArgumentIndex: toArgIdx, toPath: toPath),\n argumentIndex: arg.index, pathPattern: argPath)\n }\n newEffects.append(effect)\n return true\n}\n\n/// Returns a set of argument indices for which there are "defined" effects (as opposed to derived effects).\nprivate func getArgIndicesWithDefinedEscapingEffects(of function: Function) -> Set<Int> {\n var argsWithDefinedEffects = Set<Int>()\n\n for effect in function.effects.escapeEffects.arguments {\n if effect.isDerived { continue }\n\n argsWithDefinedEffects.insert(effect.argumentIndex)\n switch effect.kind {\n case .notEscaping, .escapingToReturn:\n break\n case .escapingToArgument(let toArgIdx, _):\n argsWithDefinedEffects.insert(toArgIdx)\n }\n }\n return argsWithDefinedEffects\n}\n\n/// Returns true if `op` is passed to a recursive call to the current function -\n/// at the same argument index.\nprivate func isOperandOfRecursiveCall(_ op: Operand) -> Bool {\n let inst = op.instruction\n if let applySite = inst as? FullApplySite,\n let callee = applySite.referencedFunction,\n callee == inst.parentFunction,\n let argIdx = applySite.calleeArgumentIndex(of: op),\n op.value == callee.arguments[argIdx] {\n return true\n }\n return false\n}\n\nprivate struct ArgEffectsVisitor : EscapeVisitorWithResult {\n enum EscapeDestination {\n case notSet\n case toReturn(SmallProjectionPath)\n case toArgument(Int, SmallProjectionPath) // argument index, path\n }\n var result = EscapeDestination.notSet\n\n mutating func visitUse(operand: Operand, path: EscapePath) -> UseResult {\n if operand.instruction is ReturnInst {\n // The argument escapes to the function return\n if path.followStores {\n // The escaping path must not introduce a followStores.\n return .abort\n }\n switch result {\n case .notSet:\n result = .toReturn(path.projectionPath)\n case .toReturn(let oldPath):\n result = .toReturn(oldPath.merge(with: path.projectionPath))\n case .toArgument:\n return .abort\n }\n return .ignore\n }\n if isOperandOfRecursiveCall(operand) {\n return .ignore\n }\n return .continueWalk\n }\n\n mutating func visitDef(def: Value, path: EscapePath) -> DefResult {\n guard let destArg = def as? FunctionArgument else {\n return .continueWalkUp\n }\n // The argument escapes to another argument (e.g. an out or inout argument)\n if path.followStores {\n // The escaping path must not introduce a followStores.\n return .abort\n }\n let argIdx = destArg.index\n switch result {\n case .notSet:\n result = .toArgument(argIdx, path.projectionPath)\n case .toArgument(let oldArgIdx, let oldPath) where oldArgIdx == argIdx:\n result = .toArgument(argIdx, oldPath.merge(with: path.projectionPath))\n default:\n return .abort\n }\n return .walkDown\n }\n}\n\n/// Returns true if when walking up from the return instruction, the `fromArgument`\n/// is the one and only argument which is reached - with a matching `fromPath`.\nprivate struct IsExclusiveReturnEscapeVisitor : EscapeVisitorWithResult {\n let argument: Argument\n let argumentPath: SmallProjectionPath\n let returnPath: SmallProjectionPath\n var result = false\n\n func isExclusiveEscape(returnInst: ReturnInst, _ context: FunctionPassContext) -> Bool {\n return returnInst.returnedValue.at(returnPath).visit(using: self, context) ?? false\n }\n\n mutating func visitUse(operand: Operand, path: EscapePath) -> UseResult {\n switch operand.instruction {\n case is ReturnInst:\n if path.followStores { return .abort }\n if path.projectionPath.matches(pattern: returnPath) {\n return .ignore\n }\n return .abort\n case let si as StoringInstruction:\n // Don't allow store instructions because this allows the EscapeUtils to walk up\n // an apply result with `followStores`.\n if operand == si.destinationOperand {\n return .abort\n }\n case let ca as CopyAddrInst:\n // `copy_addr` is like a store.\n if operand == ca.destinationOperand {\n return .abort\n }\n default:\n break\n }\n return .continueWalk\n }\n\n mutating func visitDef(def: Value, path: EscapePath) -> DefResult {\n guard let arg = def as? FunctionArgument else {\n return .continueWalkUp\n }\n if path.followStores { return .abort }\n if arg == argument && path.projectionPath.matches(pattern: argumentPath) {\n result = true\n return .walkDown\n }\n return .abort\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_ComputeEscapeEffects.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_ComputeEscapeEffects.swift
Swift
9,271
0.95
0.222222
0.223214
react-lib
533
2023-09-12T19:27:48.202696
MIT
false
9ee65ea1ff57cb3b1eabd8938011a468
//===--- ComputeSideEffects.swift ------------------------------------------==//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2022 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\n\n/// Computes function side effects.\n///\n/// Computes the `SideEffects` for a function, which consists of argument- and global\n/// effects.\n/// For example, if a function writes to the first argument and reads from a global variable,\n/// the side effects\n/// ```\n/// [%0: write v**]\n/// [global: read]\n/// ```\n/// are computed.\n///\nlet computeSideEffects = FunctionPass(name: "compute-side-effects") {\n (function: Function, context: FunctionPassContext) in\n\n if function.isDefinedExternally {\n // We cannot assume anything about function, which are defined in another module,\n // even if the serialized SIL of its body is available in the current module.\n // If the other module was compiled with library evolution, the implementation\n // (and it's effects) might change in future versions of the other module/library.\n //\n // TODO: only do this for functions which are de-serialized from library-evolution modules.\n return\n }\n\n if function.effectAttribute != .none {\n // Don't try to infer side effects if there are defined effect attributes.\n return\n }\n\n var collectedEffects = CollectedEffects(function: function, context)\n\n // First step: collect effects from all instructions.\n //\n for block in function.blocks {\n for inst in block.instructions {\n collectedEffects.addInstructionEffects(inst)\n }\n }\n\n // Second step: If an argument has unknown uses, we must add all previously collected\n // global effects to the argument, because we don't know to which "global" side-effect\n // instruction the argument might have escaped.\n for argument in function.arguments {\n collectedEffects.addEffectsForEscapingArgument(argument: argument)\n collectedEffects.addEffectsForConsumingArgument(argument: argument)\n }\n\n // Don't modify the effects if they didn't change. This avoids sending a change notification\n // which can trigger unnecessary other invalidations.\n if let existingEffects = function.effects.sideEffects,\n existingEffects.arguments == collectedEffects.argumentEffects,\n existingEffects.global == collectedEffects.globalEffects {\n return\n }\n\n // Finally replace the function's side effects.\n context.modifyEffects(in: function) { (effects: inout FunctionEffects) in\n effects.sideEffects = SideEffects(arguments: collectedEffects.argumentEffects, global: collectedEffects.globalEffects)\n }\n}\n\n/// The collected argument and global side effects of the function.\nprivate struct CollectedEffects {\n\n private let context: FunctionPassContext\n private let calleeAnalysis: CalleeAnalysis\n\n private(set) var argumentEffects: [SideEffects.ArgumentEffects]\n private(set) var globalEffects = SideEffects.GlobalEffects()\n\n init(function: Function, _ context: FunctionPassContext) {\n self.context = context\n self.calleeAnalysis = context.calleeAnalysis\n self.argumentEffects = Array(repeating: SideEffects.ArgumentEffects(), count: function.entryBlock.arguments.count)\n }\n\n mutating func addInstructionEffects(_ inst: Instruction) {\n var checkedIfDeinitBarrier = false\n switch inst {\n case is CopyValueInst, is RetainValueInst, is StrongRetainInst:\n addEffects(.copy, to: inst.operands[0].value, fromInitialPath: SmallProjectionPath(.anyValueFields))\n\n case is DestroyValueInst, is DestroyNotEscapedClosureInst, is ReleaseValueInst, is StrongReleaseInst:\n addDestroyEffects(ofValue: inst.operands[0].value)\n\n case let da as DestroyAddrInst:\n addDestroyEffects(ofAddress: da.destroyedAddress)\n\n case let copy as CopyAddrInst:\n addEffects(.read, to: copy.source)\n addEffects(.write, to: copy.destination)\n\n if !copy.isTakeOfSrc {\n addEffects(.copy, to: copy.source)\n }\n if !copy.isInitializationOfDest {\n addDestroyEffects(ofAddress: copy.destination)\n }\n\n case let store as StoreInst:\n addEffects(.write, to: store.destination)\n if store.storeOwnership == .assign {\n addDestroyEffects(ofAddress: store.destination)\n }\n\n case let store as StoreWeakInst:\n addEffects(.write, to: store.destination)\n\n case let store as StoreUnownedInst:\n addEffects(.write, to: store.destination)\n\n case is LoadInst, is LoadWeakInst, is LoadUnownedInst, is LoadBorrowInst:\n let addr = inst.operands[0].value\n addEffects(.read, to: addr)\n\n case let apply as FullApplySite:\n if apply.callee.type.isCalleeConsumedFunction {\n addEffects(.destroy, to: apply.callee)\n globalEffects = .worstEffects\n }\n handleApply(apply)\n checkedIfDeinitBarrier = true\n\n case let pa as PartialApplyInst:\n if pa.canBeAppliedInFunction(context) {\n // Only if the created closure can actually be called in the function\n // we have to consider side-effects within the closure.\n handleApply(pa)\n checkedIfDeinitBarrier = true\n }\n // In addition to the effects of the apply, also consider the\n // effects of the capture, which reads the captured value in\n // order to move it into the context. This only applies to\n // addressable values, because capturing does not dereference\n // any class objects.\n //\n // Ignore captures for on-stack partial applies. They only\n // bitwise-move or capture by address, so the call to\n // handleApply above is sufficient. And, if they are not applied\n // in this function, then they are never applied.\n if !pa.isOnStack {\n // callee is never an address.\n for argument in pa.arguments {\n if argument.type.isAddress {\n addEffects(.read, to: argument)\n }\n }\n }\n\n case let fl as FixLifetimeInst:\n // A fix_lifetime instruction acts like a read on the operand to prevent\n // releases moving above the fix_lifetime.\n addEffects(.read, to: fl.operand.value)\n\n // Instructions which have effects defined in SILNodes.def, but those effects are\n // not relevant for our purpose.\n // In most cases these conservative effects are there to prevent code re-scheduling within\n // the function. But this is not relevant for side effect summaries which we compute here.\n case is DeallocStackInst, is DeallocStackRefInst,\n is BeginAccessInst, is EndAccessInst,\n is BeginBorrowInst, is EndBorrowInst,\n is DebugValueInst, is KeyPathInst, is FixLifetimeInst,\n is EndApplyInst, is AbortApplyInst,\n is EndCOWMutationInst, is UnconditionalCheckedCastInst,\n is CondFailInst:\n break\n\n case is BeginCOWMutationInst, is IsUniqueInst:\n // Model reference count reading as "destroy" for now. Although we could introduce a "read-refcount"\n // effect, it would not give any significant benefit in any of our current optimizations.\n addEffects(.destroy, to: inst.operands[0].value, fromInitialPath: SmallProjectionPath(.anyValueFields))\n\n default:\n if inst.mayRelease {\n globalEffects = .worstEffects\n }\n if inst.mayReadFromMemory {\n globalEffects.memory.read = true\n }\n if inst.mayWriteToMemory {\n globalEffects.memory.write = true\n }\n if inst.hasUnspecifiedSideEffects {\n globalEffects.ownership.copy = true\n }\n\n // Ignore "local" allocations, which don't escape. They cannot be observed\n // from outside the function.\n if let alloc = inst as? Allocation, !(inst is AllocStackInst),\n alloc.isEscaping(context) {\n globalEffects.allocates = true\n }\n }\n // If we didn't already, check whether the instruction could be a deinit\n // barrier. If it's an apply of some sort, that was already done in\n // handleApply.\n if !checkedIfDeinitBarrier,\n inst.mayBeDeinitBarrierNotConsideringSideEffects {\n globalEffects.isDeinitBarrier = true\n }\n }\n \n mutating func addEffectsForEscapingArgument(argument: FunctionArgument) {\n var escapeWalker = ArgumentEscapingWalker(context)\n\n if escapeWalker.hasUnknownUses(argument: argument) {\n // Worst case: we don't know anything about how the argument escapes.\n addEffects(globalEffects.restrictedTo(argument: argument.at(SmallProjectionPath(.anything)),\n withConvention: argument.convention), to: argument)\n\n } else if escapeWalker.foundTakingLoad {\n // In most cases we can just ignore loads. But if the load is actually "taking" the\n // underlying memory allocation, we must consider this as a "destroy", because we don't\n // know what's happening with the loaded value. If there is any destroying instruction in the\n // function, it might be the destroy of the loaded value.\n let effects = SideEffects.GlobalEffects(ownership: globalEffects.ownership)\n addEffects(effects.restrictedTo(argument: argument.at(SmallProjectionPath(.anything)),\n withConvention: argument.convention), to: argument)\n\n } else if escapeWalker.foundConsumingPartialApply && globalEffects.ownership.destroy {\n // Similar situation with apply instructions which consume the callee closure.\n addEffects(.destroy, to: argument)\n }\n }\n\n mutating func addEffectsForConsumingArgument(argument: FunctionArgument) {\n if argument.convention == .indirectIn {\n // Usually there _must_ be a read from a consuming in-argument, because the function has to consume the argument.\n // But in the special case if all control paths end up in an `unreachable`, the consuming read might have been\n // dead-code eliminated. Therefore make sure to add the read-effect in any case. Otherwise it can result\n // in memory lifetime failures at a call site.\n addEffects(.read, to: argument)\n }\n }\n\n private mutating func handleApply(_ apply: ApplySite) {\n let callees = calleeAnalysis.getCallees(callee: apply.callee)\n let args = apply.argumentOperands.lazy.map {\n (calleeArgumentIndex: apply.calleeArgumentIndex(of: $0)!,\n callerArgument: $0.value)\n }\n addEffects(ofFunctions: callees, withArguments: args)\n }\n\n private mutating func addDestroyEffects(ofValue value: Value) {\n // First thing: add the destroy effect itself.\n addEffects(.destroy, to: value)\n\n if value.type.isClass {\n // Treat destroying a class value just like a call to it's destructor(s).\n let destructors = calleeAnalysis.getDestructors(of: value.type)\n let theSelfArgument = CollectionOfOne((calleeArgumentIndex: 0, callerArgument: value))\n addEffects(ofFunctions: destructors, withArguments: theSelfArgument)\n } else {\n // TODO: dig into the type and check for destructors of individual class fields\n addEffects(.worstEffects, to: value)\n globalEffects = .worstEffects\n }\n }\n\n private mutating func addDestroyEffects(ofAddress address: Value) {\n // First thing: add the destroy effect itself.\n addEffects(.destroy, to: address)\n\n // A destroy also involves a read from the address.\n // E.g. a `destroy_addr` is equivalent to a `%x = load [take]` and `destroy_value %x`.\n addEffects(.read, to: address)\n // Conceptually, it's also a write, because the stored value is not available anymore after the destroy\n addEffects(.write, to: address)\n\n // Second: add all effects of (potential) destructors which might be called if the destroy deallocates an object.\n // Note that we don't need to add any effects specific to the `address`, because the memory location is not\n // affected by a destructor of the stored value (and effects don't include anything which is loaded from memory).\n if let destructors = calleeAnalysis.getDestructors(of: address.type) {\n for destructor in destructors {\n globalEffects.merge(with: destructor.getSideEffects())\n }\n } else {\n globalEffects = .worstEffects\n }\n }\n\n private mutating func addEffects<Arguments: Sequence>(ofFunctions callees: FunctionArray?,\n withArguments arguments: Arguments)\n where Arguments.Element == (calleeArgumentIndex: Int, callerArgument: Value) {\n // The argument summary for @in_cxx is insufficient in OSSA because the function body does not contain the\n // destroy. But the call is still effectively a release from the caller's perspective.\n guard let callees = callees else {\n // We don't know which function(s) are called.\n globalEffects = .worstEffects\n for (_, argument) in arguments {\n addEffects(.worstEffects, to: argument)\n }\n return\n }\n for callee in callees {\n if let sideEffects = callee.effects.sideEffects {\n globalEffects.merge(with: sideEffects.global)\n } else {\n // The callee doesn't have any computed effects. At least we can do better\n // if it has any defined effect attribute (like e.g. `[readnone]`).\n globalEffects.merge(with: callee.definedGlobalEffects)\n }\n }\n\n for (calleeArgIdx, argument) in arguments {\n for callee in callees {\n if let sideEffects = callee.effects.sideEffects {\n let calleeEffect = sideEffects.getArgumentEffects(for: calleeArgIdx)\n\n // Merge the callee effects into this function's effects\n if let calleePath = calleeEffect.read { addEffects(.read, to: argument, fromInitialPath: calleePath) }\n if let calleePath = calleeEffect.write { addEffects(.write, to: argument, fromInitialPath: calleePath) }\n if let calleePath = calleeEffect.copy { addEffects(.copy, to: argument, fromInitialPath: calleePath) }\n if let calleePath = calleeEffect.destroy { addEffects(.destroy, to: argument, fromInitialPath: calleePath) }\n } else {\n let convention = callee.argumentConventions[calleeArgIdx]\n let wholeArgument = argument.at(defaultPath(for: argument))\n let calleeEffects = callee.getSideEffects(forArgument: wholeArgument,\n atIndex: calleeArgIdx,\n withConvention: convention)\n addEffects(calleeEffects.restrictedTo(argument: wholeArgument, withConvention: convention), to: argument)\n }\n }\n }\n }\n\n /// Adds effects to a specific value.\n ///\n /// If the value comes from an argument (or multiple arguments), then the effects are added\n /// to the corresponding `argumentEffects`. Otherwise they are added to the `global` effects.\n private mutating func addEffects(_ effects: SideEffects.GlobalEffects, to value: Value) {\n addEffects(effects, to: value, fromInitialPath: defaultPath(for: value))\n }\n\n private mutating func addEffects(_ effects: SideEffects.GlobalEffects, to value: Value,\n fromInitialPath: SmallProjectionPath) {\n\n /// Collects the (non-address) roots of a value.\n struct GetRootsWalker : ValueUseDefWalker {\n // All function-argument roots of the value, including the path from the arguments to the values.\n var roots: Stack<(FunctionArgument, SmallProjectionPath)>\n\n // True, if the value has at least one non function-argument root.\n var nonArgumentRootsFound = false\n\n var walkUpCache = WalkerCache<SmallProjectionPath>()\n\n init(_ context: FunctionPassContext) {\n self.roots = Stack(context)\n }\n\n mutating func rootDef(value: Value, path: SmallProjectionPath) -> WalkResult {\n if let arg = value as? FunctionArgument {\n roots.push((arg, path))\n } else if value is Allocation {\n // Ignore effects on local allocations - even if those allocations escape.\n // Effects on local (potentially escaping) allocations cannot be relevant in the caller.\n return .continueWalk\n } else {\n nonArgumentRootsFound = true\n }\n return .continueWalk\n }\n }\n\n var findRoots = GetRootsWalker(context)\n if value.type.isAddress {\n let accessPath = value.getAccessPath(fromInitialPath: fromInitialPath)\n switch accessPath.base {\n case .stack:\n // We don't care about read and writes from/to stack locations (because they are\n // not observable from outside the function). But we need to consider copies and destroys.\n // For example, an argument could be "moved" to a stack location, which is eventually destroyed.\n // In this case it's in fact the original argument value which is destroyed.\n globalEffects.ownership.merge(with: effects.ownership)\n return\n case .argument(let arg):\n // The `value` is an address projection of an indirect argument.\n argumentEffects[arg.index].merge(effects, with: accessPath.projectionPath)\n return\n default:\n // Handle address `value`s which are are field projections from class references in direct arguments.\n if !findRoots.visitAccessStorageRoots(of: accessPath) {\n findRoots.nonArgumentRootsFound = true\n }\n }\n } else {\n _ = findRoots.walkUp(value: value, path: fromInitialPath)\n }\n // Because of phi-arguments, a single (non-address) `value` can come from multiple arguments.\n while let (arg, path) = findRoots.roots.pop() {\n argumentEffects[arg.index].merge(effects, with: path)\n }\n if findRoots.nonArgumentRootsFound {\n // The `value` comes from some non-argument root, e.g. a load instruction.\n globalEffects.merge(with: effects)\n }\n }\n}\n\nprivate func defaultPath(for value: Value) -> SmallProjectionPath {\n if value.type.isAddress {\n return SmallProjectionPath(.anyValueFields)\n }\n if value.type.isClass {\n return SmallProjectionPath(.anyValueFields).push(.anyClassField)\n }\n return SmallProjectionPath(.anyValueFields).push(.anyClassField).push(.anyValueFields)\n}\n\n/// Checks if an argument escapes to some unknown user.\nprivate struct ArgumentEscapingWalker : ValueDefUseWalker, AddressDefUseWalker {\n var walkDownCache = WalkerCache<UnusedWalkingPath>()\n private let calleeAnalysis: CalleeAnalysis\n\n /// True if the argument escapes to a load which (potentially) "takes" the memory location.\n private(set) var foundTakingLoad = false\n\n /// True, if the argument escapes to a closure context which might be destroyed when called.\n private(set) var foundConsumingPartialApply = false\n\n init(_ context: FunctionPassContext) {\n self.calleeAnalysis = context.calleeAnalysis\n }\n\n mutating func hasUnknownUses(argument: FunctionArgument) -> Bool {\n if argument.type.isAddress {\n return walkDownUses(ofAddress: argument, path: UnusedWalkingPath()) == .abortWalk\n } else if argument.hasTrivialNonPointerType {\n return false\n } else {\n return walkDownUses(ofValue: argument, path: UnusedWalkingPath()) == .abortWalk\n }\n }\n\n mutating func leafUse(value: Operand, path: UnusedWalkingPath) -> WalkResult {\n switch value.instruction {\n case is RefTailAddrInst, is RefElementAddrInst, is ProjectBoxInst:\n return walkDownUses(ofAddress: value.instruction as! SingleValueInstruction, path: path)\n\n // Warning: all instruction listed here, must also be handled in `CollectedEffects.addInstructionEffects`\n case is CopyValueInst, is RetainValueInst, is StrongRetainInst,\n is DestroyValueInst, is ReleaseValueInst, is StrongReleaseInst,\n is DebugValueInst, is UnconditionalCheckedCastInst,\n is ReturnInst:\n return .continueWalk\n\n case let apply as ApplySite:\n if let pa = apply as? PartialApplyInst, !pa.isOnStack {\n foundConsumingPartialApply = true\n }\n // `CollectedEffects.handleApply` only handles argument operands of an apply, but not the callee operand.\n if let calleeArgIdx = apply.calleeArgumentIndex(of: value),\n let callees = calleeAnalysis.getCallees(callee: apply.callee)\n {\n // If an argument escapes in a called function, we don't know anything about the argument's side effects.\n // For example, it could escape to the return value and effects might occur in the caller.\n for callee in callees {\n if callee.effects.escapeEffects.canEscape(argumentIndex: calleeArgIdx, path: SmallProjectionPath.init(.anyValueFields)) {\n return .abortWalk\n }\n }\n return .continueWalk\n }\n return .abortWalk\n default:\n return .abortWalk\n }\n }\n\n mutating func leafUse(address: Operand, path: UnusedWalkingPath) -> WalkResult {\n let inst = address.instruction\n let function = inst.parentFunction\n switch inst {\n case let copy as CopyAddrInst:\n if address == copy.sourceOperand &&\n !address.value.hasTrivialType &&\n (!function.hasOwnership || copy.isTakeOfSrc) {\n foundTakingLoad = true\n }\n return .continueWalk\n \n case let load as LoadInst:\n if !address.value.hasTrivialType &&\n // In non-ossa SIL we don't know if a load is taking.\n (!function.hasOwnership || load.loadOwnership == .take) {\n foundTakingLoad = true\n }\n return .continueWalk\n\n case is LoadWeakInst, is LoadUnownedInst, is LoadBorrowInst:\n if !function.hasOwnership && !address.value.hasTrivialType {\n foundTakingLoad = true\n }\n return .continueWalk\n\n // Warning: all instruction listed here, must also be handled in `CollectedEffects.addInstructionEffects`\n case is StoreInst, is StoreWeakInst, is StoreUnownedInst, is ApplySite, is DestroyAddrInst,\n is DebugValueInst:\n return .continueWalk\n\n default:\n return .abortWalk\n }\n }\n}\n\nprivate extension SideEffects.GlobalEffects {\n static var read: Self { Self(memory: SideEffects.Memory(read: true)) }\n static var write: Self { Self(memory: SideEffects.Memory(write: true)) }\n static var copy: Self { Self(ownership: SideEffects.Ownership(copy: true)) }\n static var destroy: Self { Self(ownership: SideEffects.Ownership(destroy: true)) }\n}\n\nprivate extension SideEffects.ArgumentEffects {\n mutating func merge(_ effects: SideEffects.GlobalEffects, with path: SmallProjectionPath) {\n if effects.memory.read { read.merge(with: path) }\n if effects.memory.write { write.merge(with: path) }\n if effects.ownership.copy { copy.merge(with: path) }\n if effects.ownership.destroy { destroy.merge(with: path) }\n }\n}\n\nprivate extension PartialApplyInst {\n func canBeAppliedInFunction(_ context: FunctionPassContext) -> Bool {\n struct EscapesToApply : EscapeVisitor {\n func visitUse(operand: Operand, path: EscapePath) -> UseResult {\n switch operand.instruction {\n case is FullApplySite:\n // Any escape to apply - regardless if it's an argument or the callee operand - might cause\n // the closure to be called.\n return .abort\n case is ReturnInst:\n return .ignore\n default:\n return .continueWalk\n }\n }\n var followTrivialTypes: Bool { true }\n }\n\n return self.isEscaping(using: EscapesToApply(), initialWalkingDirection: .down, context)\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_ComputeSideEffects.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_ComputeSideEffects.swift
Swift
23,689
0.95
0.239286
0.242363
react-lib
659
2025-01-23T22:53:49.915794
GPL-3.0
false
58414af0d265ce405dc8e427f680c0aa
//===--- CopyToBorrowOptimization.swift ------------------------------------==//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\n\n/// 1. replaces a `load [copy]` with a `load_borrow` if possible:\n///\n/// ```\n/// %1 = load [copy] %0\n/// // no writes to %0\n/// destroy_value %1\n/// ```\n/// ->\n/// ```\n/// %1 = load_borrow %0\n/// // no writes to %0\n/// end_borrow %1\n/// ```\n///\n/// 2. removes a `copy_value` where the source is a guaranteed value, if possible:\n///\n/// ```\n/// %1 = copy_value %0 // %0 = a guaranteed value\n/// // uses of %1\n/// destroy_value %1 // borrow scope of %0 is still valid here\n/// ```\n/// ->\n/// ```\n/// // uses of %0\n/// ```\n\n/// The optimization can be done if:\n/// * In caseof a `load`: during the (forward-extended) lifetime of the loaded value the\n/// memory location is not changed.\n/// * In case of a `copy_value`: the (guaranteed) lifetime of the source operand extends\n/// the lifetime of the copied value.\n/// * All (forward-extended) uses of the load or copy support guaranteed ownership.\n/// * The (forward-extended) lifetime of the load or copy ends with `destroy_value`(s).\n///\nlet copyToBorrowOptimization = FunctionPass(name: "copy-to-borrow-optimization") {\n (function: Function, context: FunctionPassContext) in\n\n if !function.hasOwnership {\n return\n }\n\n for inst in function.instructions {\n switch inst {\n case let load as LoadInst:\n optimize(load: load, context)\n case let copy as CopyValueInst:\n optimize(copy: copy, context)\n default:\n break\n }\n }\n}\n\nprivate func optimize(load: LoadInst, _ context: FunctionPassContext) {\n if load.loadOwnership != .copy {\n return\n }\n\n var collectedUses = Uses(context)\n defer { collectedUses.deinitialize() }\n if !collectedUses.collectUses(of: load) {\n return\n }\n\n if mayWrite(toAddressOf: load,\n within: collectedUses.destroys,\n usersInDeadEndBlocks: collectedUses.usersInDeadEndBlocks,\n context)\n {\n return\n }\n\n load.replaceWithLoadBorrow(collectedUses: collectedUses)\n}\n\nprivate func optimize(copy: CopyValueInst, _ context: FunctionPassContext) {\n if copy.fromValue.ownership != .guaranteed {\n return\n }\n\n var collectedUses = Uses(context)\n defer { collectedUses.deinitialize() }\n if !collectedUses.collectUses(of: copy) {\n return\n }\n\n var liverange = InstructionRange(begin: copy, context)\n defer { liverange.deinitialize() }\n liverange.insert(contentsOf: collectedUses.destroys)\n liverange.insert(contentsOf: collectedUses.usersInDeadEndBlocks)\n\n if !liverange.isFullyContainedIn(borrowScopeOf: copy.fromValue.lookThroughForwardingInstructions) {\n return\n }\n\n remove(copy: copy, collectedUses: collectedUses, liverange: liverange)\n}\n\nprivate struct Uses {\n let context: FunctionPassContext\n\n // Operand of all forwarding instructions, which - if possible - are converted from "owned" to "guaranteed"\n private(set) var forwardingUses: Stack<Operand>\n\n // All destroys of the load/copy_value and its forwarded values.\n private(set) var destroys: Stack<DestroyValueInst>\n\n // Exit blocks of the load/copy_value's liverange which don't have a destroy.\n // Those are successor blocks of terminators, like `switch_enum`, which do _not_ forward the value.\n // E.g. the none-case of a switch_enum of an Optional.\n private(set) var nonDestroyingLiverangeExits: Stack<Instruction>\n\n private(set) var usersInDeadEndBlocks: Stack<Instruction>\n\n init(_ context: FunctionPassContext) {\n self.context = context\n self.forwardingUses = Stack(context)\n self.destroys = Stack(context)\n self.nonDestroyingLiverangeExits = Stack(context)\n self.usersInDeadEndBlocks = Stack(context)\n }\n\n mutating func collectUses(of initialValue: SingleValueInstruction) -> Bool {\n var worklist = ValueWorklist(context)\n defer { worklist.deinitialize() }\n\n // If the load/copy_value is immediately followed by a single `move_value`, use the moved value.\n // Note that `move_value` is _not_ a forwarding instruction.\n worklist.pushIfNotVisited(initialValue.singleMoveValueUser ?? initialValue)\n\n while let value = worklist.pop() {\n for use in value.uses.endingLifetime {\n switch use.instruction {\n case let destroy as DestroyValueInst:\n destroys.append(destroy)\n\n case let forwardingInst as ForwardingInstruction where forwardingInst.canChangeToGuaranteedOwnership:\n forwardingUses.append(use)\n findNonDestroyingLiverangeExits(of: forwardingInst)\n worklist.pushIfNotVisited(contentsOf: forwardingInst.forwardedResults.lazy.filter { $0.ownership == .owned})\n default:\n return false\n }\n }\n // Get potential additional uses in dead-end blocks for which a final destroy is missing.\n // In such a case the dataflow would _not_ visit potential writes to the load's memory location.\n // In the following example, the `load [copy]` must not be converted to a `load_borrow`:\n //\n // %1 = load [copy] %0\n // ...\n // store %2 to %0\n // ...\n // use of %1 // additional use: the lifetime of %1 ends here\n // ... // no destroy of %1!\n // unreachable\n //\n // TODO: we can remove this once with have completed OSSA lifetimes throughout the SIL pipeline.\n findAdditionalUsesInDeadEndBlocks(of: value)\n }\n return true\n }\n\n private mutating func findNonDestroyingLiverangeExits(of forwardingInst: ForwardingInstruction) {\n if let termInst = forwardingInst as? TermInst {\n // A terminator instruction can implicitly end the lifetime of its operand in a success block,\n // e.g. a `switch_enum` with a non-payload case block. Such success blocks need an `end_borrow`, though.\n for succ in termInst.successors where !succ.arguments.contains(where: {$0.ownership == .owned}) {\n nonDestroyingLiverangeExits.append(succ.instructions.first!)\n }\n } else if !forwardingInst.forwardedResults.contains(where: { $0.ownership == .owned }) {\n // The forwarding instruction has no owned result, which means it ends the lifetime of its owned operand.\n // This can happen with an `unchecked_enum_data` which extracts a trivial payload out of a\n // non-trivial enum.\n nonDestroyingLiverangeExits.append(forwardingInst.next!)\n }\n }\n\n private mutating func findAdditionalUsesInDeadEndBlocks(of value: Value) {\n var users = Stack<Instruction>(context)\n defer { users.deinitialize() }\n\n // Finds all uses except destroy_value.\n var visitor = InteriorUseWalker(definingValue: value, ignoreEscape: true, visitInnerUses: true, context) {\n let user = $0.instruction\n if !(user is DestroyValueInst) {\n users.append(user)\n }\n return .continueWalk\n }\n defer { visitor.deinitialize() }\n\n _ = visitor.visitUses()\n usersInDeadEndBlocks.append(contentsOf: users)\n }\n\n mutating func deinitialize() {\n forwardingUses.deinitialize()\n destroys.deinitialize()\n nonDestroyingLiverangeExits.deinitialize()\n usersInDeadEndBlocks.deinitialize()\n }\n}\n\nprivate func mayWrite(\n toAddressOf load: LoadInst,\n within destroys: Stack<DestroyValueInst>,\n usersInDeadEndBlocks: Stack<Instruction>,\n _ context: FunctionPassContext\n) -> Bool {\n let aliasAnalysis = context.aliasAnalysis\n var worklist = InstructionWorklist(context)\n defer { worklist.deinitialize() }\n\n for destroy in destroys {\n worklist.pushPredecessors(of: destroy, ignoring: load)\n }\n worklist.pushIfNotVisited(contentsOf: usersInDeadEndBlocks)\n\n // Visit all instructions starting from the destroys in backward order.\n while let inst = worklist.pop() {\n if inst.mayWrite(toAddress: load.address, aliasAnalysis) {\n return true\n }\n worklist.pushPredecessors(of: inst, ignoring: load)\n }\n return false\n}\n\nprivate extension LoadInst {\n func replaceWithLoadBorrow(collectedUses: Uses) {\n let context = collectedUses.context\n let builder = Builder(before: self, context)\n let loadBorrow = builder.createLoadBorrow(fromAddress: address)\n\n var liverange = InstructionRange(begin: self, ends: collectedUses.destroys, context)\n defer { liverange.deinitialize() }\n\n replaceMoveWithBorrow(of: self, replacedBy: loadBorrow, liverange: liverange, collectedUses: collectedUses)\n createEndBorrows(for: loadBorrow, atEndOf: liverange, collectedUses: collectedUses)\n\n uses.replaceAll(with: loadBorrow, context)\n context.erase(instruction: self)\n\n for forwardingUse in collectedUses.forwardingUses {\n forwardingUse.changeOwnership(from: .owned, to: .guaranteed, context)\n }\n context.erase(instructions: collectedUses.destroys)\n }\n}\n\nprivate func remove(copy: CopyValueInst, collectedUses: Uses, liverange: InstructionRange) {\n let context = collectedUses.context\n replaceMoveWithBorrow(of: copy, replacedBy: copy.fromValue, liverange: liverange, collectedUses: collectedUses)\n copy.replace(with: copy.fromValue, context)\n\n for forwardingUse in collectedUses.forwardingUses {\n forwardingUse.changeOwnership(from: .owned, to: .guaranteed, context)\n }\n context.erase(instructions: collectedUses.destroys)\n}\n\n// Handle the special case if the `load` or `copy_value` is immediately followed by a single `move_value`.\n// In this case we have to preserve the move's flags by inserting a `begin_borrow` with the same flags.\n// For example:\n//\n// %1 = load [copy] %0\n// %2 = move_value [lexical] %1\n// ...\n// destroy_value %2\n// ->\n// %1 = load_borrow %0\n// %2 = begin_borrow [lexical] %1\n// ...\n// end_borrow %2\n// end_borrow %1\n//\nprivate func replaceMoveWithBorrow(\n of value: Value,\n replacedBy newValue: Value,\n liverange: InstructionRange,\n collectedUses: Uses\n) {\n guard let moveInst = value.singleMoveValueUser else {\n return\n }\n let context = collectedUses.context\n\n // An inner borrow is needed to keep the flags of the `move_value`.\n let builder = Builder(before: moveInst, context)\n let bbi = builder.createBeginBorrow(of: newValue,\n isLexical: moveInst.isLexical,\n hasPointerEscape: moveInst.hasPointerEscape,\n isFromVarDecl: moveInst.isFromVarDecl)\n moveInst.replace(with: bbi, context)\n createEndBorrows(for: bbi, atEndOf: liverange, collectedUses: collectedUses)\n}\n\nprivate func createEndBorrows(for beginBorrow: Value, atEndOf liverange: InstructionRange, collectedUses: Uses) {\n let context = collectedUses.context\n\n // There can be multiple destroys in a row in case of decomposing an aggregate, e.g.\n // %1 = load [copy] %0\n // ...\n // (%2, %3) = destructure_struct %1\n // destroy_value %2\n // destroy_value %3 // The final destroy. Here we need to create the `end_borrow`(s)\n //\n\n var allLifetimeEndingInstructions = InstructionWorklist(context)\n allLifetimeEndingInstructions.pushIfNotVisited(contentsOf: collectedUses.destroys.lazy.map { $0 })\n allLifetimeEndingInstructions.pushIfNotVisited(contentsOf: collectedUses.nonDestroyingLiverangeExits)\n\n defer {\n allLifetimeEndingInstructions.deinitialize()\n }\n\n while let endInst = allLifetimeEndingInstructions.pop() {\n if !liverange.contains(endInst) {\n let builder = Builder(before: endInst, context)\n builder.createEndBorrow(of: beginBorrow)\n }\n }\n}\n\nprivate extension InstructionRange {\n func isFullyContainedIn(borrowScopeOf value: Value) -> Bool {\n guard let beginBorrow = BeginBorrowValue(value.lookThroughForwardingInstructions) else {\n return false\n }\n if case .functionArgument = beginBorrow {\n // The lifetime of a guaranteed function argument spans over the whole function.\n return true\n }\n for endOp in beginBorrow.scopeEndingOperands {\n if self.contains(endOp.instruction) {\n return false\n }\n }\n return true\n }\n}\n\nprivate extension Value {\n var singleMoveValueUser: MoveValueInst? {\n uses.ignoreDebugUses.singleUse?.instruction as? MoveValueInst\n }\n\n var lookThroughForwardingInstructions: Value {\n if let bfi = definingInstruction as? BorrowedFromInst,\n !bfi.borrowedPhi.isReborrow,\n bfi.enclosingValues.count == 1\n {\n // Return the single forwarded enclosingValue\n return bfi.enclosingValues[0]\n }\n if let fi = definingInstruction as? ForwardingInstruction,\n let forwardedOp = fi.singleForwardedOperand\n {\n return forwardedOp.value.lookThroughForwardingInstructions\n } else if let termResult = TerminatorResult(self),\n let fi = termResult.terminator as? ForwardingInstruction,\n let forwardedOp = fi.singleForwardedOperand\n {\n return forwardedOp.value.lookThroughForwardingInstructions\n }\n return self\n }\n}\n\nprivate extension ForwardingInstruction {\n var canChangeToGuaranteedOwnership: Bool {\n if !preservesReferenceCounts {\n return false\n }\n if !canForwardGuaranteedValues {\n return false\n }\n // For simplicity only support a single owned operand. Otherwise we would have to check if the other\n // owned operands stem from `load_borrow`s, too, which we can convert, etc.\n let numOwnedOperands = operands.lazy.filter({ $0.value.ownership == .owned }).count\n if numOwnedOperands > 1 {\n return false\n }\n return true\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_CopyToBorrowOptimization.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_CopyToBorrowOptimization.swift
Swift
13,864
0.95
0.1225
0.28
vue-tools
700
2024-02-06T15:21:46.892288
GPL-3.0
false
f5113b04528da14372c881649a496ba8
//===--- DeadStoreElimination.swift ----------------------------------------==//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\n\n/// Eliminates dead store instructions.\n///\n/// A store is dead if, after the store has occurred:\n///\n/// 1. The value in memory is not read until the memory object is deallocated:\n///\n/// %1 = alloc_stack\n/// ...\n/// store %2 to %1\n/// ... // no reads from %1\n/// dealloc_stack %1\n///\n/// 2. The value in memory is overwritten by another store before any potential read:\n///\n/// store %2 to %1\n/// ... // no reads from %1\n/// store %3 to %1\n///\n/// In case of a partial dead store, the store is split so that some of the new\n/// individual stores can be eliminated in the next round of the optimization:\n///\n/// store %2 to %1 // partially dead\n/// ... // no reads from %1\n/// %3 = struct_element_addr %1, #field1\n/// store %7 to %3\n/// ->\n/// %3 = struct_extract %2, #field1\n/// %4 = struct_element_addr %1, #field1\n/// store %3 to %4 // this store is dead now\n/// %5 = struct_extract %2, #field2\n/// %6 = struct_element_addr %1, #field2\n/// store %5 to %6\n/// ... // no reads from %1\n/// store %7 to %3\n///\n/// The algorithm is a data flow analysis which starts at the original store and searches\n/// for successive stores by following the control flow in forward direction.\n///\nlet deadStoreElimination = FunctionPass(name: "dead-store-elimination") {\n (function: Function, context: FunctionPassContext) in\n\n // Avoid quadratic complexity by limiting the number of visited instructions.\n // This limit is sufficient for most "real-world" functions, by far.\n var complexityBudget = 10_000\n\n for block in function.blocks {\n\n // We cannot use for-in iteration here because if the store is split, the new\n // individual stores are inserted right afterwards and they would be ignored by a for-in iteration.\n var inst = block.instructions.first\n while let i = inst {\n if let store = i as? StoreInst {\n if !context.continueWithNextSubpassRun(for: store) {\n return\n }\n tryEliminate(store: store, complexityBudget: &complexityBudget, context)\n }\n inst = i.next\n }\n }\n}\n\nprivate func tryEliminate(store: StoreInst, complexityBudget: inout Int, _ context: FunctionPassContext) {\n // Check if the type can be expanded without a significant increase to code\n // size. This pass splits values into its consitutent parts which effectively\n // expands the value into projections which can increase code size.\n if !store.hasValidOwnershipForDeadStoreElimination || !store.source.type.shouldExpand(context) {\n return\n }\n\n switch store.isDead(complexityBudget: &complexityBudget, context) {\n case .alive:\n break\n case .dead:\n context.erase(instruction: store)\n case .maybePartiallyDead(let subPath):\n // Check if the a partial store would really be dead to avoid unnecessary splitting.\n switch store.isDead(at: subPath, complexityBudget: &complexityBudget, context) {\n case .alive, .maybePartiallyDead:\n break\n case .dead:\n // The new individual stores are inserted right after the current store and\n // will be optimized in the following loop iterations.\n store.trySplit(context)\n }\n }\n}\n\nprivate extension StoreInst {\n\n enum DataflowResult {\n case alive\n case dead\n case maybePartiallyDead(AccessPath)\n\n init(aliveWith subPath: AccessPath?) {\n if let subPath = subPath {\n self = .maybePartiallyDead(subPath)\n } else {\n self = .alive\n }\n }\n }\n\n func isDead(complexityBudget: inout Int, _ context: FunctionPassContext) -> DataflowResult {\n return isDead(at: destination.accessPath, complexityBudget: &complexityBudget, context)\n }\n\n func isDead(at accessPath: AccessPath, complexityBudget: inout Int, _ context: FunctionPassContext) -> DataflowResult {\n var scanner = InstructionScanner(storePath: accessPath, storeAddress: self.destination, context.aliasAnalysis)\n let storageDefBlock = accessPath.base.reference?.referenceRoot.parentBlock\n\n switch scanner.scan(instructions: InstructionList(first: self.next), complexityBudget: &complexityBudget) {\n case .dead:\n return .dead\n\n case .alive:\n return DataflowResult(aliveWith: scanner.potentiallyDeadSubpath)\n\n case .transparent:\n // Continue with iterative data flow analysis starting at the block's successors.\n var worklist = BasicBlockWorklist(context)\n defer { worklist.deinitialize() }\n worklist.pushIfNotVisited(contentsOf: self.parentBlock.successors)\n\n while let block = worklist.pop() {\n\n // Abort if we find the storage definition of the access in case of a loop, e.g.\n //\n // bb1:\n // %storage_root = apply\n // %2 = ref_element_addr %storage_root\n // store %3 to %2\n // cond_br %c, bb1, bb2\n //\n // The storage root is different in each loop iteration. Therefore the store of a\n // successive loop iteration does not overwrite the store of the previous iteration.\n if let storageDefBlock = storageDefBlock, block == storageDefBlock {\n return DataflowResult(aliveWith: scanner.potentiallyDeadSubpath)\n }\n switch scanner.scan(instructions: block.instructions, complexityBudget: &complexityBudget) {\n case .transparent:\n worklist.pushIfNotVisited(contentsOf: block.successors)\n case .dead:\n break\n case .alive:\n return DataflowResult(aliveWith: scanner.potentiallyDeadSubpath)\n }\n }\n return .dead\n }\n }\n\n var hasValidOwnershipForDeadStoreElimination: Bool {\n switch storeOwnership {\n case .unqualified, .trivial:\n return true\n case .initialize, .assign:\n // In OSSA, non-trivial values cannot be dead-store eliminated because that could shrink\n // the lifetime of the original stored value (because it's not kept in memory anymore).\n return false\n }\n }\n}\n\nprivate struct InstructionScanner {\n private let storePath: AccessPath\n private let storeAddress: Value\n private let aliasAnalysis: AliasAnalysis\n\n private(set) var potentiallyDeadSubpath: AccessPath? = nil\n\n init(storePath: AccessPath, storeAddress: Value, _ aliasAnalysis: AliasAnalysis) {\n self.storePath = storePath\n self.storeAddress = storeAddress\n self.aliasAnalysis = aliasAnalysis\n }\n\n enum Result {\n case alive\n case dead\n case transparent\n }\n\n mutating func scan(instructions: InstructionList, complexityBudget: inout Int) -> Result {\n for inst in instructions {\n switch inst {\n case let successiveStore as StoreInst:\n let successivePath = successiveStore.destination.accessPath\n if successivePath.isEqualOrContains(storePath) {\n return .dead\n }\n if potentiallyDeadSubpath == nil,\n storePath.getMaterializableProjection(to: successivePath) != nil {\n // Storing to a sub-field of the original store doesn't make the original store dead.\n // But when we split the original store, then one of the new individual stores might be\n // overwritten by this store.\n // Requiring that the projection to the partial store path is materializable guarantees\n // that we can split the store.\n potentiallyDeadSubpath = successivePath\n }\n case is DeallocRefInst, is DeallocStackRefInst, is DeallocBoxInst:\n if (inst as! Deallocation).isDeallocation(of: storePath.base) {\n return .dead\n }\n case let ds as DeallocStackInst:\n if ds.isStackDeallocation(of: storePath.base) {\n return .dead\n }\n case is FixLifetimeInst, is EndAccessInst:\n break\n case let term as TermInst:\n if term.isFunctionExiting {\n return .alive\n }\n fallthrough\n default:\n complexityBudget -= 1\n if complexityBudget <= 0 {\n return .alive\n }\n if inst.mayRead(fromAddress: storeAddress, aliasAnalysis) {\n return .alive\n }\n // TODO: We might detect that this is a partial read of the original store which potentially\n // enables partial dead store elimination.\n }\n }\n return .transparent\n }\n}\n\nprivate extension Deallocation {\n func isDeallocation(of base: AccessBase) -> Bool {\n if let accessReference = base.reference,\n accessReference.referenceRoot == self.allocatedValue.referenceRoot {\n return true\n }\n return false\n }\n}\n\nprivate extension DeallocStackInst {\n func isStackDeallocation(of base: AccessBase) -> Bool {\n if case .stack(let allocStack) = base, allocstack == allocStack {\n return true\n }\n return false\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_DeadStoreElimination.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_DeadStoreElimination.swift
Swift
9,362
0.95
0.143939
0.330544
awesome-app
753
2024-09-02T13:29:42.026208
MIT
false
473ed92a8fe55e64f4305e7b4dc2950a
//===--- DeinitDevirtualizer.swift ----------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\n\n/// Devirtualizes destroys of non-copyable values.\n///\nlet deinitDevirtualizer = FunctionPass(name: "deinit-devirtualizer") {\n (function: Function, context: FunctionPassContext) in\n\n guard function.hasOwnership else {\n return\n }\n\n for inst in function.instructions {\n switch inst {\n case let destroyValue as DestroyValueInst:\n _ = devirtualizeDeinits(of: destroyValue, context)\n case let destroyAddr as DestroyAddrInst:\n _ = devirtualizeDeinits(of: destroyAddr, context)\n default:\n break\n }\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_DeinitDevirtualizer.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_DeinitDevirtualizer.swift
Swift
1,066
0.95
0.205882
0.433333
vue-tools
59
2023-08-12T20:35:13.761159
Apache-2.0
false
6ef14378ad2985e4c95c8141b7a88dc7
//===--- DestroyHoisting.swift ---------------------------------------------==//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2024 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\n\n/// Hoists `destroy_value` instructions without shrinking an object's lifetime.\n/// This is done if it can be proved that another copy of a value (either in an SSA value or in memory) keeps\n/// the referenced object(s) alive until the original position of the `destroy_value`.\n///\n/// ```\n/// %1 = copy_value %0\n/// ...\n/// last_use_of %0\n/// // other instructions\n/// destroy_value %0 // %1 is still alive here\n/// ```\n/// ->\n/// ```\n/// %1 = copy_value %0\n/// ...\n/// last_use_of %0\n/// destroy_value %0\n/// // other instructions\n/// ```\n///\n/// This also works if a copy of the value is kept alive in memory:\n///\n/// ```\n/// %1 = copy_value %0\n/// store %1 to [assign] %a\n/// ...\n/// last_use_of %0\n/// // other instructions\n/// destroy_value %0 // memory location %a is not modified since the store\n/// ```\n/// ->\n/// ```\n/// %1 = copy_value %0\n/// store %0 to [assign] %a\n/// ...\n/// last_use_of %0\n/// destroy_value %0\n/// // other instructions\n/// ```\n///\n/// The benefit of this optimization is that it can enable copy-propagation by moving\n/// destroys above deinit barries and access scopes.\n///\nlet destroyHoisting = FunctionPass(name: "destroy-hoisting") {\n (function: Function, context: FunctionPassContext) in\n\n if !function.hasOwnership {\n return\n }\n\n for block in function.blocks {\n for arg in block.arguments {\n optimize(value: arg, context)\n if !context.continueWithNextSubpassRun() {\n return\n }\n }\n for inst in block.instructions {\n for result in inst.results {\n optimize(value: result, context)\n if !context.continueWithNextSubpassRun(for: inst) {\n return\n }\n }\n }\n }\n}\n\nprivate func optimize(value: Value, _ context: FunctionPassContext) {\n guard value.ownership == .owned,\n // Avoid all the analysis effort if there are no destroys to hoist.\n !value.uses.filterUsers(ofType: DestroyValueInst.self).isEmpty\n else {\n return\n }\n\n var (foundDestroys, hoistableDestroys) = selectHoistableDestroys(of: value, context)\n defer { hoistableDestroys.deinitialize() }\n\n guard foundDestroys else {\n return\n }\n\n guard var minimalLiverange = InstructionRange(withLiverangeOf: value, ignoring: hoistableDestroys, context) else {\n return\n }\n defer { minimalLiverange.deinitialize() }\n\n hoistDestroys(of: value, toEndOf: minimalLiverange, restrictingTo: &hoistableDestroys, context)\n}\n\nprivate func selectHoistableDestroys(of value: Value, _ context: FunctionPassContext) -> (Bool, InstructionSet) {\n // Also includes liveranges of copied values and values stored to memory.\n var forwardExtendedLiverange = InstructionRange(withForwardExtendedLiverangeOf: value, context)\n defer { forwardExtendedLiverange.deinitialize() }\n\n let deadEndBlocks = context.deadEndBlocks\n var foundDestroys = false\n var hoistableDestroys = InstructionSet(context)\n\n for use in value.uses {\n if let destroy = use.instruction as? DestroyValueInst,\n // We can hoist all destroys for which another copy of the value is alive at the destroy.\n forwardExtendedLiverange.contains(destroy),\n // TODO: once we have complete OSSA lifetimes we don't need to handle dead-end blocks.\n !deadEndBlocks.isDeadEnd(destroy.parentBlock)\n {\n foundDestroys = true\n hoistableDestroys.insert(destroy)\n }\n }\n return (foundDestroys, hoistableDestroys)\n}\n\nprivate func hoistDestroys(of value: Value,\n toEndOf minimalLiverange: InstructionRange,\n restrictingTo hoistableDestroys: inout InstructionSet,\n _ context: FunctionPassContext)\n{\n createNewDestroys(for: value, atEndPointsOf: minimalLiverange, reusing: &hoistableDestroys, context)\n\n createNewDestroys(for: value, atExitPointsOf: minimalLiverange, reusing: &hoistableDestroys, context)\n\n removeDestroys(of: value, restrictingTo: hoistableDestroys, context)\n}\n\nprivate func createNewDestroys(\n for value: Value,\n atEndPointsOf liverange: InstructionRange,\n reusing hoistableDestroys: inout InstructionSet,\n _ context: FunctionPassContext\n) {\n let deadEndBlocks = context.deadEndBlocks\n\n for endInst in liverange.ends {\n if !endInst.endsLifetime(of: value) {\n Builder.insert(after: endInst, context) { builder in\n builder.createDestroy(of: value, reusing: &hoistableDestroys, notIn: deadEndBlocks)\n }\n }\n }\n}\n\nprivate func createNewDestroys(\n for value: Value,\n atExitPointsOf liverange: InstructionRange,\n reusing hoistableDestroys: inout InstructionSet,\n _ context: FunctionPassContext\n) {\n let deadEndBlocks = context.deadEndBlocks\n\n for exitBlock in liverange.exitBlocks {\n let builder = Builder(atBeginOf: exitBlock, context)\n builder.createDestroy(of: value, reusing: &hoistableDestroys, notIn: deadEndBlocks)\n }\n}\n\nprivate func removeDestroys(\n of value: Value,\n restrictingTo hoistableDestroys: InstructionSet,\n _ context: FunctionPassContext\n) {\n for use in value.uses {\n if let destroy = use.instruction as? DestroyValueInst,\n hoistableDestroys.contains(destroy)\n {\n context.erase(instruction: destroy)\n }\n }\n}\n\nprivate extension InstructionRange {\n\n init?(withLiverangeOf initialDef: Value, ignoring ignoreDestroys: InstructionSet, _ context: FunctionPassContext)\n {\n var liverange = InstructionRange(for: initialDef, context)\n var visitor = InteriorUseWalker(definingValue: initialDef, ignoreEscape: false, visitInnerUses: true, context) {\n if !ignoreDestroys.contains($0.instruction) {\n liverange.insert($0.instruction)\n }\n return .continueWalk\n }\n defer { visitor.deinitialize() }\n\n // This is important to visit begin_borrows which don't have an end_borrow in dead-end blocks.\n // TODO: we can remove this once we have complete lifetimes.\n visitor.innerScopeHandler = {\n if let inst = $0.definingInstruction {\n liverange.insert(inst)\n }\n return .continueWalk\n }\n\n guard visitor.visitUses() == .continueWalk else {\n liverange.deinitialize()\n return nil\n }\n self = liverange\n }\n\n // In addition to the forward-extended liverange, also follows copy_value's transitively.\n init(withForwardExtendedLiverangeOf initialDef: Value, _ context: FunctionPassContext) {\n self.init(for: initialDef, context)\n\n var worklist = ValueWorklist(context)\n defer { worklist.deinitialize() }\n\n worklist.pushIfNotVisited(initialDef)\n while let value = worklist.pop() {\n assert(value.ownership == .owned)\n\n for use in value.uses {\n let user = use.instruction\n if !use.endsLifetime {\n if let copy = user as? CopyValueInst {\n worklist.pushIfNotVisited(copy)\n }\n continue\n }\n\n switch user {\n case let store as StoreInst:\n extendLiverangeInMemory(of: initialDef, with: store, context)\n\n case let termInst as TermInst & ForwardingInstruction:\n worklist.pushIfNotVisited(contentsOf: termInst.forwardedResults.lazy.filter({ $0.ownership != .none }))\n\n case is ForwardingInstruction, is MoveValueInst:\n if let result = user.results.lazy.filter({ $0.ownership != .none }).singleElement {\n worklist.pushIfNotVisited(result)\n }\n\n default:\n // We cannot extend a lexical liverange with a non-lexical liverange, because afterwards the\n // non-lexical liverange could be shrunk over a deinit barrier which would let the original\n // lexical liverange to be shrunk, too.\n if !initialDef.isInLexicalLiverange(context) || value.isInLexicalLiverange(context) {\n self.insert(user)\n }\n }\n }\n }\n }\n\n private mutating func extendLiverangeInMemory(\n of initialDef: Value,\n with store: StoreInst,\n _ context: FunctionPassContext\n ) {\n let domTree = context.dominatorTree\n\n if initialDef.destroyUsers(dominatedBy: store.parentBlock, domTree).isEmpty {\n return\n }\n\n // We have to take care of lexical lifetimes. See comment above.\n if initialDef.isInLexicalLiverange(context) &&\n !store.destination.accessBase.isInLexicalOrGlobalLiverange(context)\n {\n return\n }\n\n if isTakeOrDestroy(ofAddress: store.destination, after: store, beforeDestroysOf: initialDef, context) {\n return\n }\n\n self.insert(contentsOf: initialDef.destroyUsers(dominatedBy: store.parentBlock, domTree).map { $0.next! })\n }\n}\n\nprivate func isTakeOrDestroy(\n ofAddress address: Value,\n after store: StoreInst,\n beforeDestroysOf initialDef: Value,\n _ context: FunctionPassContext\n) -> Bool {\n let aliasAnalysis = context.aliasAnalysis\n let domTree = context.dominatorTree\n var worklist = InstructionWorklist(context)\n defer { worklist.deinitialize() }\n\n worklist.pushIfNotVisited(store.next!)\n while let inst = worklist.pop() {\n if inst.endsLifetime(of: initialDef) {\n continue\n }\n if inst.mayTakeOrDestroy(address: address, aliasAnalysis) {\n return true\n }\n if let next = inst.next {\n worklist.pushIfNotVisited(next)\n } else {\n for succ in inst.parentBlock.successors where store.parentBlock.dominates(succ, domTree) {\n worklist.pushIfNotVisited(succ.instructions.first!)\n }\n }\n }\n return false\n}\n\nprivate extension Builder {\n func createDestroy(of value: Value,\n reusing hoistableDestroys: inout InstructionSet,\n notIn deadEndBlocks: DeadEndBlocksAnalysis) {\n guard case .before(let insertionPoint) = insertionPoint else {\n fatalError("unexpected kind of insertion point")\n }\n if deadEndBlocks.isDeadEnd(insertionPoint.parentBlock) {\n return\n }\n if hoistableDestroys.contains(insertionPoint) {\n hoistableDestroys.erase(insertionPoint)\n } else {\n createDestroyValue(operand: value)\n }\n }\n}\n\nprivate extension Value {\n func destroyUsers(dominatedBy domBlock: BasicBlock, _ domTree: DominatorTree) ->\n LazyMapSequence<LazyFilterSequence<LazyMapSequence<UseList, DestroyValueInst?>>, DestroyValueInst> {\n return uses.lazy.compactMap { use in\n if let destroy = use.instruction as? DestroyValueInst,\n domBlock.dominates(destroy.parentBlock, domTree)\n {\n return destroy\n }\n return nil\n }\n }\n}\n\nprivate extension Instruction {\n func endsLifetime(of value: Value) -> Bool {\n return operands.contains { $0.value == value && $0.endsLifetime }\n }\n\n func mayTakeOrDestroy(address: Value, _ aliasAnalysis: AliasAnalysis) -> Bool {\n switch self {\n case is BeginAccessInst, is EndAccessInst, is EndBorrowInst:\n return false\n default:\n return mayWrite(toAddress: address, aliasAnalysis)\n }\n }\n}\n\nprivate extension AccessBase {\n func isInLexicalOrGlobalLiverange(_ context: FunctionPassContext) -> Bool {\n switch self {\n case .box(let pbi): return pbi.box.isInLexicalLiverange(context)\n case .class(let rea): return rea.instance.isInLexicalLiverange(context)\n case .tail(let rta): return rta.instance.isInLexicalLiverange(context)\n case .stack(let asi): return asi.isLexical\n case .global: return true\n case .argument(let arg):\n switch arg.convention {\n case .indirectIn, .indirectInGuaranteed, .indirectInout, .indirectInoutAliasable:\n return arg.isLexical\n default:\n return false\n }\n case .yield, .storeBorrow, .pointer, .index, .unidentified:\n return false\n }\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_DestroyHoisting.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_DestroyHoisting.swift
Swift
12,114
0.95
0.141732
0.192878
awesome-app
347
2024-03-23T09:08:13.369657
MIT
false
626e5a8c6f1b1ef444ed7d04bb4a9af1
//===--- DiagnoseInfiniteRecursion.swift -----------------------------------==//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2025 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport AST\nimport SIL\n\n/// A diagnostic pass that detects infinite recursive function calls.\n///\n/// It detects simple forms of infinite recursions, like\n///\n/// func f() {\n/// f()\n/// }\n///\n/// and can also deal with invariant conditions, like availability checks\n///\n/// func f() {\n/// if #available(macOS 10.4.4, *) {\n/// f()\n/// }\n/// }\n///\n/// or invariant conditions due to forwarded arguments:\n///\n/// func f(_ x: Int) {\n/// if x > 0 {\n/// f(x)\n/// }\n/// }\n///\nlet diagnoseInfiniteRecursion = FunctionPass(name: "diagnose-infinite-recursion") {\n (function: Function, context: FunctionPassContext) in\n\n // Don't rerun diagnostics on deserialized functions.\n if function.wasDeserializedCanonical {\n return\n }\n\n // Try with different sets of invariants. To catch all cases we would need to try all\n // parameter/memory permutations. But in practice, it's good enough to collect a reasonable set by\n // finding all recursive calls and see what arguments they are forwarding.\n guard let invariantsToTry = collectInvariantsToTry(in: function, context) else {\n // There are no recursive calls in the function at all. This is the case for most functions.\n return\n }\n\n for invariants in invariantsToTry {\n if analizeAndDiagnose(function, with: invariants, context) {\n return\n }\n // Try again, assuming that memory is invariant.\n if analizeAndDiagnose(function, with: invariants.withInvariantMemory, context) {\n return\n }\n }\n}\n\n/// Collect invariants with which we should try the analysis and return true if\n/// there is at least one recursive call in the function.\nprivate func collectInvariantsToTry(in function: Function, _ context: FunctionPassContext) -> [Invariants]? {\n var invariants = [Invariants]()\n\n // Try with no invariants.\n invariants.append(Invariants());\n\n var recursiveCallsFound = false\n\n // Scan the function for recursive calls.\n for inst in function.instructions {\n if let applySite = inst as? FullApplySite, applySite.isRecursiveCall(context) {\n recursiveCallsFound = true\n\n // See what parameters the recursive call is forwarding and use that as invariants.\n let inv = Invariants(fromForwardingArgumentsOf: applySite)\n if !invariants.contains(inv) {\n invariants.append(inv)\n }\n\n // Limit the size of the set to avoid quadratic complexity in corner\n // cases. Usually 4 invariants are more than enough.\n if invariants.count >= 4 {\n return invariants;\n }\n }\n }\n if !recursiveCallsFound {\n return nil\n }\n return invariants;\n}\n\n\n/// Performs the analysis and issues a warnings for recursive calls.\n/// Returns true, if at least one recursive call is found.\nprivate func analizeAndDiagnose(_ function: Function,\n with invariants: Invariants,\n _ context: FunctionPassContext) -> Bool\n{\n var analysis = Analysis(function: function, with: invariants, context)\n defer { analysis.deinitialize() }\n\n analysis.compute()\n\n if analysis.isInfiniteRecursiveFunction {\n analysis.printWarningsForInfiniteRecursiveCalls()\n return true\n }\n return false\n}\n\n/// Describes what is expected to be invariant in an infinite recursion loop.\n///\n/// The dataflow analysis is done with a given set of `Invariants`. The correctness of the result (i.e.\n/// no false infinite recursion reported) does _not_ depend on the chosen invariants. But it's a trade-off:\n/// The more invariants we include, the more conditions might become invariant (which is good).\n/// On the other hand, we have to ignore recursive calls which don't forward all invariant arguments.\n///\n/// We don't know in advance which invariants will yield the best result, i.e. let us detect an\n/// infinite recursion. For example, in `f()` we can only detect the infinite recursion if we expect\n/// that the parameter `x` is invariant.\n/// ```\n/// func f(_ x: Int) {\n/// if x > 0 { // an invariant condition!\n/// f(x) // the call is forwarding the argument\n/// }\n/// }\n/// ```\n/// But in `g()` we can only detect the infinite recursion if we _don't_ expect that the parameter\n/// is invariant.\n/// ```\n/// func g(_ x: Int) {\n/// if x > 0 { // no invariant condition\n/// g(x - 1) // argument is not forwarded\n/// } else {\n/// g(x - 2) // argument is not forwarded\n/// }\n/// }\n/// ```\nprivate struct Invariants: Equatable {\n\n // Support up to 32 arguments, which should be enough in real world code.\n // As the definition of invariants does not need to be accurate for correctness, it's fine to only support\n // the common cases.\n typealias ArgumentBits = UInt32\n\n /// A bit mask of indices of arguments which are expected to be invariant.\n /// An argument is invariant if a recursive call forwards the incoming argument.\n /// For example:\n /// ```\n /// func f(_ x: Int, _ y: Int) {\n /// f(x, y - 1) // The first argument is invariant, the second is not\n /// }\n /// ```\n let arguments: ArgumentBits\n\n /// True, if all type arguments are invariant.\n /// In contrast to `arguments` we don't distinguish between individual type arguments but have a single\n /// flag for all type arguments.\n /// For example:\n /// ```\n /// func f<T: P>(_ t: T.Type) {\n /// f(T.self) // The type argument is invariant\n /// f(T.V.self) // The type argument is not invariant\n /// }\n /// ```\n let typeArguments: Bool\n\n /// True if memory content is invariant.\n /// Like `typeArguments`, it's all or nothing. Either all memory is expected to be invariant (= never\n /// written) or not. We could use AliasAnalysis to do a more fine-grained analysis, but in mandatory\n /// optimizations we want to keep things simple.\n let memory: Bool\n\n // Nothing is invariant.\n init() {\n self.memory = false\n self.typeArguments = false\n self.arguments = 0\n }\n\n init(fromForwardingArgumentsOf recursiveApply: FullApplySite) {\n let function = recursiveApply.parentFunction\n\n // Check which parameters are exactly passed 1:1 to the recursive call.\n var argMask: ArgumentBits = 0\n for (argIndex, arg) in recursiveApply.arguments.enumerated() {\n if argIndex >= MemoryLayout<ArgumentBits>.size * 8 {\n break\n }\n if arg.rootValue == function.arguments[argIndex] {\n argMask |= 1 << argIndex\n }\n }\n self.arguments = argMask\n\n // Check if the generic type parameters are exactly passed 1:1 to the recursive call.\n self.typeArguments = recursiveApply.substitutionMap == function.forwardingSubstitutionMap\n\n // Assume memory is not invariant\n self.memory = false\n }\n\n private init(arguments: ArgumentBits, genericArguments: Bool, memory: Bool) {\n self.arguments = arguments\n self.typeArguments = genericArguments\n self.memory = memory\n }\n\n var withInvariantMemory: Invariants {\n Invariants(arguments: arguments, genericArguments: typeArguments, memory: true)\n }\n\n func isArgumentInvariant(at index: Int) -> Bool {\n if index >= MemoryLayout<ArgumentBits>.size * 8 {\n return false\n }\n return (arguments & (1 << index)) != 0\n }\n}\n\n/// Performs the analysis to detect infinite recursion loops.\n///\n/// The basic idea is to see if there is a path from the entry block to a function return without\n/// going through an infinite recursive call.\n///\nprivate struct Analysis {\n\n /// All blocks which contain a recursive call.\n var haveRecursiveCall: BasicBlockSet\n\n /// All blocks which have a terminator with an invariant condition.\n ///\n /// Note: "invariant" means: invariant with respect to the expected invariants,\n /// which are passed to the initializer.\n var haveInvariantCondition: BasicBlockSet\n\n /// All blocks from which there is a path to a function exit, without going through a recursive call.\n ///\n /// Note that if memory is expected to be invariant, all memory-writing instructions are also\n /// considered as a "function exit".\n var reachingFunctionExit: BasicBlockSet\n\n /// All blocks from which there is a path to a recursive call.\n var reachingRecursiveCall: BasicBlockSet\n\n private let function: Function\n private let invariants: Invariants\n private let context: FunctionPassContext\n\n init(function: Function, with invariants: Invariants, _ context: FunctionPassContext) {\n self.haveRecursiveCall = BasicBlockSet(context)\n self.haveInvariantCondition = BasicBlockSet(context)\n self.reachingFunctionExit = BasicBlockSet(context)\n self.reachingRecursiveCall = BasicBlockSet(context)\n self.function = function\n self.context = context\n self.invariants = invariants\n }\n\n mutating func compute() {\n computeInitialSets()\n propagateReachingRecursiveCall()\n propagateReachingFunctionExit()\n }\n\n mutating func deinitialize() {\n haveRecursiveCall.deinitialize()\n haveInvariantCondition.deinitialize()\n reachingFunctionExit.deinitialize()\n reachingRecursiveCall.deinitialize()\n }\n\n var isInfiniteRecursiveFunction: Bool { isInInfiniteRecursionLoop(function.entryBlock) }\n\n func printWarningsForInfiniteRecursiveCalls() {\n var worklist = BasicBlockWorklist(context)\n defer { worklist.deinitialize() }\n\n // Print warnings for the first recursive call(s) we reach from the entry block.\n worklist.pushIfNotVisited(function.entryBlock)\n while let block = worklist.pop() {\n if case .recursive(let apply) = block.getKind(for: invariants, context) {\n context.diagnosticEngine.diagnose(.warn_infinite_recursive_call, at: apply.location)\n } else {\n for succ in block.successors where isInInfiniteRecursionLoop(succ) {\n worklist.pushIfNotVisited(succ)\n }\n }\n }\n }\n\n private mutating func computeInitialSets() {\n for block in function.blocks {\n switch block.getKind(for: invariants, context) {\n case .transparent:\n break\n case .functionExiting:\n reachingFunctionExit.insert(block)\n case .recursive:\n haveRecursiveCall.insert(block)\n reachingRecursiveCall.insert(block)\n case .invariantCondition:\n haveInvariantCondition.insert(block)\n }\n }\n }\n\n private mutating func propagateReachingRecursiveCall() {\n var worklist = Stack<BasicBlock>(context)\n defer { worklist.deinitialize() }\n\n worklist.append(contentsOf: function.blocks.filter { reachingRecursiveCall.contains($0) })\n\n while let block = worklist.pop() {\n for pred in block.predecessors {\n if reachingRecursiveCall.insert(pred) {\n worklist.push(pred)\n }\n }\n }\n }\n\n private mutating func propagateReachingFunctionExit() {\n var worklist = Stack<BasicBlock>(context)\n defer { worklist.deinitialize() }\n\n worklist.append(contentsOf: function.blocks.filter { reachingFunctionExit.contains($0) })\n\n while let block = worklist.pop() {\n for pred in block.predecessors where !reachingFunctionExit.contains(pred) {\n // Recursive calls block the propagation.\n if haveRecursiveCall.contains(pred) {\n continue\n }\n\n // This is the trick for handling invariant conditions: usually `reachingFunctionExit` is propagated\n // if _any_ of the successors reach a function exit.\n // For invariant conditions, it's only propagated if _all_ successors reach a function exit.\n // If at least one of the successors is in an infinite recursion loop and this successor is\n // taken once, it will be taken forever (because the condition is invariant).\n if haveInvariantCondition.contains(pred),\n pred.successors.contains(where: isInInfiniteRecursionLoop)\n {\n continue\n }\n\n reachingFunctionExit.insert(pred)\n worklist.push(pred)\n }\n }\n }\n\n private func isInInfiniteRecursionLoop(_ block: BasicBlock) -> Bool {\n return reachingRecursiveCall.contains(block) && !reachingFunctionExit.contains(block)\n }\n}\n\nprivate enum BlockKind {\n case functionExiting // the block is exiting the function (e.g. via a `return`)\n case recursive(FullApplySite) // the block contains a recursive call\n case invariantCondition // the block's terminator has an invariant condition\n case transparent // all other blocks\n}\n\nprivate extension BasicBlock {\n func getKind(for invariants: Invariants, _ context: FunctionPassContext) -> BlockKind {\n for inst in instructions {\n if let apply = inst as? FullApplySite {\n // Ignore blocks which call a @_semantics("programtermination_point").\n // This is an assert-like program termination and we explicitly don't\n // want this call to disqualify the warning for infinite recursion,\n // because they're reserved for exceptional circumstances.\n if let callee = apply.referencedFunction, callee.hasSemanticsAttribute("programtermination_point") {\n return .transparent\n }\n\n if apply.isRecursiveCall(context), apply.hasInvariantArguments(with: invariants) {\n return .recursive(apply)\n }\n }\n if invariants.memory, inst.mayReallyWriteToMemory {\n // If we are assuming that all memory is invariant, a memory-writing\n // instruction potentially breaks the infinite recursion loop. For the\n // sake of the analysis, it's like a function exit.\n return .functionExiting\n }\n }\n if terminator.isFunctionExiting ||\n // Also treat non-assert-like unreachables as returns, like "exit()".\n terminator is UnreachableInst\n {\n return .functionExiting\n }\n if terminator.isInvariant(accordingTo: invariants, context) {\n return .invariantCondition\n }\n return .transparent\n }\n}\n\nprivate extension FullApplySite {\n /// True if this apply calls its parent function.\n func isRecursiveCall(_ context: FunctionPassContext) -> Bool {\n if let calledFn = referencedFunction {\n return calledFn == parentFunction\n }\n\n switch callee {\n case let cmi as ClassMethodInst:\n let classType = cmi.operand.value.type.canonicalType.lookThroughMetatype\n guard let nominal = classType.nominal,\n let classDecl = nominal as? ClassDecl,\n // It's sufficient to handle class methods in the module where they are defined.\n // This aovids the need to de-serialized vtables from other modules.\n classDecl.parentModule == context.currentModuleContext,\n let vtable = context.lookupVTable(for: classDecl),\n let entry = vtable.lookup(method: cmi.member),\n entry.implementation == parentFunction\n else {\n return false\n }\n\n if cmi.member.calleesAreStaticallyKnowable(context),\n // The "statically knowable" check just means that we have all the\n // callee candidates available for analysis. We still need to check\n // if the current function has a known override point.\n !(cmi.member.decl as! AbstractFunctionDecl).isOverridden\n {\n return true\n }\n\n // Even if the method is (or could be) overridden, it's a recursive call if\n // it's called on the self argument:\n // ```\n // class X {\n // // Even if foo() is overridden in a derived class, it'll end up in an\n // // infinite recursion if initially called on an instance of `X`.\n // func foo() { foo() }\n // }\n // ```\n if let selfArgument = parentFunction.selfArgument, cmi.operand.value == selfArgument {\n return true\n }\n return false\n\n case let wmi as WitnessMethodInst:\n if wmi.conformance.isConcrete,\n let wTable = context.lookupWitnessTable(for: wmi.conformance.rootConformance),\n let method = wTable.lookup(method: wmi.member),\n method == parentFunction\n {\n return true\n }\n return false\n\n default:\n return false\n }\n }\n\n func hasInvariantArguments(with invariants: Invariants) -> Bool {\n return arguments.enumerated().allSatisfy { (argIndex, arg) in\n !invariants.isArgumentInvariant(at: argIndex) ||\n arg.rootValue == parentFunction.arguments[argIndex]\n }\n }\n}\n\nprivate extension CanonicalType {\n var lookThroughMetatype: CanonicalType {\n if self.isMetatype {\n return self.instanceTypeOfMetatype\n }\n return self\n }\n}\n\nprivate extension Value {\n /// Recursively walks the use-def chain starting at this value and returns\n /// true if all visited values are invariant.\n func isInvariant(accordingTo invariants: Invariants, visited: inout InstructionSet) -> Bool {\n if let inst = definingInstruction {\n // Avoid exponential complexity in case a value is used by multiple\n // operands.\n if !visited.insert(inst) {\n return true\n }\n\n if !invariants.memory, inst.mayReadFromMemory {\n return false\n }\n\n if !invariants.typeArguments, inst.mayDependOnTypeParameters {\n return false\n }\n\n for op in inst.operands {\n if !op.value.isInvariant(accordingTo: invariants, visited: &visited) {\n return false\n }\n }\n return true\n }\n\n if let funcArg = self as? FunctionArgument {\n return invariants.isArgumentInvariant(at: funcArg.index)\n }\n return false\n }\n\n var rootValue: Value {\n switch self {\n case let ba as BeginAccessInst:\n return ba.operand.value.rootValue\n default:\n return self\n }\n }\n}\n\nprivate extension Instruction {\n var mayReallyWriteToMemory: Bool {\n switch self {\n case is BeginAccessInst, is EndAccessInst,\n // A `load` is defined to write memory or have side effects in two cases:\n // * We don't care about retain instructions of a `load [copy]`.\n // * We don't care about a `load [take]` because it cannot occur in an\n // infinite recursion loop without another write (which re-initializes\n // the memory).\n is LoadInst:\n return false\n default:\n return mayWriteToMemory\n }\n }\n\n var mayDependOnTypeParameters: Bool {\n switch self {\n case let bi as BuiltinInst:\n return bi.substitutionMap.replacementTypes.contains { $0.hasArchetype }\n case let mt as MetatypeInst:\n return mt.type.hasArchetype\n default:\n return false\n }\n }\n}\n\nprivate extension TermInst {\n func isInvariant(accordingTo invariants: Invariants, _ context: FunctionPassContext) -> Bool {\n switch self {\n case is SwitchEnumAddrInst,\n is CheckedCastAddrBranchInst:\n if !invariants.memory {\n return false\n }\n fallthrough\n case is CondBranchInst,\n is SwitchValueInst,\n is SwitchEnumInst,\n is CheckedCastBranchInst:\n var visited = InstructionSet(context)\n defer { visited.deinitialize() }\n return operands[0].value.isInvariant(accordingTo: invariants, visited: &visited)\n default:\n return false\n }\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_DiagnoseInfiniteRecursion.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_DiagnoseInfiniteRecursion.swift
Swift
19,488
0.95
0.230104
0.316929
vue-tools
759
2025-02-08T12:08:51.328496
Apache-2.0
false
f00a65a32c898e6c11d460b483f17a46
//===--- InitializeStaticGlobals.swift -------------------------------------==//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport AST\nimport SIL\n\n/// Converts a lazily initialized global to a statically initialized global variable.\n///\n/// When this pass runs on a global initializer `[global_init_once_fn]` it tries to\n/// create a static initializer for the initialized global.\n///\n/// ```\n/// sil [global_init_once_fn] @globalinit {\n/// alloc_global @the_global\n/// %a = global_addr @the_global\n/// %i = some_const_initializer_insts\n/// store %i to %a\n/// }\n/// ```\n/// The pass creates a static initializer for the global:\n/// ```\n/// sil_global @the_global = {\n/// %initval = some_const_initializer_insts\n/// }\n/// ```\n/// and removes the allocation and store instructions from the initializer function:\n/// ```\n/// sil [global_init_once_fn] @globalinit {\n/// %a = global_addr @the_global\n/// %i = some_const_initializer_insts\n/// }\n/// ```\n/// The initializer then becomes a side-effect free function which let's the builtin-\n/// simplification remove the `builtin "once"` which calls the initializer.\n///\nlet initializeStaticGlobalsPass = FunctionPass(name: "initialize-static-globals") {\n (function: Function, context: FunctionPassContext) in\n\n if context.hadError {\n // In case of a preceding error, there is no guarantee that the SIL is valid.\n return\n }\n\n if !function.isGlobalInitOnceFunction {\n return\n }\n\n // Sometimes structs are not stored in one piece, but as individual elements.\n // Merge such individual stores to a single store of the whole struct.\n mergeStores(in: function, context)\n\n guard let (allocInst, storeToGlobal, inlineArrays) = getGlobalInitializerInfo(of: function, context) else {\n return\n }\n\n if !allocInst.global.canBeInitializedStatically {\n return\n }\n\n /// Replace inline arrays, which are allocated in stack locations with `vector` instructions.\n /// Note that `vector` instructions are only allowed in global initializers. Therefore it's important\n /// that the code in this global initializer is eventually completely removed after copying it to the global.\n for array in inlineArrays {\n lowerInlineArray(array: array, context)\n }\n \n var cloner = StaticInitCloner(cloneTo: allocInst.global, context)\n defer { cloner.deinitialize() }\n\n _ = cloner.clone(storeToGlobal.source)\n\n // The initial value can contain a `begin_access` if it references another global variable by address, e.g.\n // var p = Point(x: 10, y: 20)\n // let o = UnsafePointer(&p)\n //\n allocInst.global.stripAccessInstructionFromInitializer(context)\n\n context.erase(instruction: allocInst)\n context.erase(instruction: storeToGlobal)\n context.removeTriviallyDeadInstructionsIgnoringDebugUses(in: function)\n}\n\n/// Gets all info about a global initializer function if it can be converted to a statically initialized global.\nprivate func getGlobalInitializerInfo(\n of function: Function,\n _ context: FunctionPassContext\n) -> (allocInst: AllocGlobalInst, storeToGlobal: StoreInst, inlineArrays: [InlineArray])? {\n\n var arrayInitInstructions = InstructionSet(context)\n defer { arrayInitInstructions.deinitialize() }\n \n var inlineArrays = [InlineArray]()\n \n guard let (allocInst, storeToGlobal) = getGlobalInitialization(of: function, context,\n handleUnknownInstruction: { inst in\n if let asi = inst as? AllocStackInst {\n if let array = getInlineArrayInfo(of: asi) {\n inlineArrays.append(array)\n arrayInitInstructions.insertAllAddressUses(of: asi)\n return true\n }\n return false\n }\n // Accept all instructions which are part of inline array initialization, because we'll remove them anyway.\n return arrayInitInstructions.contains(inst)\n })\n else {\n return nil\n }\n\n return (allocInst, storeToGlobal, inlineArrays) \n}\n\n/// Represents an inline array which is initialized by a literal.\nprivate struct InlineArray {\n let elementType: Type\n \n /// In case the `elementType` is a tuple, the element values are flattened,\n /// i.e. `elements` contains elementcount * tupleelements values.\n let elements: [Value]\n \n /// The final load instruction which loads the initialized array from a temporary stack location.\n let finalArrayLoad: LoadInst\n \n /// The stack location which contains the initialized array.\n var stackLoocation: AllocStackInst { finalArrayLoad.address as! AllocStackInst }\n}\n\n/// Replaces an initialized inline array (which is allocated in a temporary stack location) with a\n/// `vector` instruction.\n/// The stack location of the array is removed.\nprivate func lowerInlineArray(array: InlineArray, _ context: FunctionPassContext) {\n let vector: VectorInst\n let builder = Builder(after: array.finalArrayLoad, context)\n if array.elementType.isTuple {\n let numTupleElements = array.elementType.tupleElements.count\n assert(array.elements.count % numTupleElements == 0)\n var tuples: [TupleInst] = []\n for tupleIdx in 0..<(array.elements.count / numTupleElements) {\n let range = (tupleIdx * numTupleElements) ..< ((tupleIdx + 1) * numTupleElements) \n let tuple = builder.createTuple(type: array.elementType, elements: Array(array.elements[range]))\n tuples.append(tuple)\n }\n vector = builder.createVector(type: array.elementType, arguments: tuples)\n } else {\n vector = builder.createVector(type: array.elementType, arguments: array.elements) \n }\n array.finalArrayLoad.uses.replaceAll(with: vector, context)\n context.erase(instructionIncludingAllUsers: array.stackLoocation)\n}\n\n/// An alloc_stack could be a temporary object which holds an initialized inline-array literal.\n/// It looks like:\n///\n/// %1 = alloc_stack $InlineArray<Count, ElementType>\n/// %2 = unchecked_addr_cast %1 to $*ElementType // the elementStorage\n/// store %firstElement to [trivial] %2\n/// %4 = integer_literal $Builtin.Word, 1\n/// %5 = index_addr %2, %4\n/// store %secondElement to [trivial] %5\n/// ...\n/// %10 = load [trivial] %1 // the final arrayLoad\n/// dealloc_stack %1\n///\n/// Returns nil if `allocStack` is not a properly initialized inline array.\n///\nprivate func getInlineArrayInfo(of allocStack: AllocStackInst) -> InlineArray? {\n var arrayLoad: LoadInst? = nil\n var elementStorage: UncheckedAddrCastInst? = nil\n\n for use in allocStack.uses {\n switch use.instruction {\n case let load as LoadInst:\n if arrayLoad != nil {\n return nil\n }\n // It's guaranteed that the array load is located after all element stores.\n // Otherwise it would load uninitialized memory.\n arrayLoad = load\n case is DeallocStackInst:\n break\n case let addrCastToElement as UncheckedAddrCastInst:\n if elementStorage != nil {\n return nil\n }\n elementStorage = addrCastToElement\n default:\n return nil\n }\n }\n guard let arrayLoad, let elementStorage else {\n return nil\n } \n \n var stores = Array<StoreInst?>()\n if !findArrayElementStores(toElementAddress: elementStorage, elementIndex: 0, stores: &stores) {\n return nil\n }\n if stores.isEmpty {\n // We cannot create an empty `vector` instruction, therefore we don't support empty inline arrays.\n return nil\n }\n // Usually there must be a store for each element. Otherwise the `arrayLoad` would load uninitialized memory.\n // We still check this to not crash in some weird corner cases, like the element type is an empty tuple.\n if stores.contains(nil) {\n return nil\n }\n\n return InlineArray(elementType: elementStorage.type.objectType,\n elements: stores.map { $0!.source },\n finalArrayLoad: arrayLoad)\n}\n\n/// Recursively traverses all uses of `elementAddr` and finds all stores to an inline array storage.\n/// The element store instructions are put into `stores` - one store for each element.\n/// In case the element type is a tuple, the tuples are flattened. See `InlineArray.elements`. \nprivate func findArrayElementStores(\n toElementAddress elementAddr: Value,\n elementIndex: Int,\n stores: inout [StoreInst?]\n) -> Bool {\n for use in elementAddr.uses {\n switch use.instruction {\n case let indexAddr as IndexAddrInst:\n guard let indexLiteral = indexAddr.index as? IntegerLiteralInst,\n let tailIdx = indexLiteral.value else\n {\n return false\n }\n if !findArrayElementStores(toElementAddress: indexAddr, elementIndex: elementIndex + tailIdx, stores: &stores) {\n return false\n }\n case let tea as TupleElementAddrInst:\n // The array elements are tuples. There is a separate store for each tuple element.\n let numTupleElements = tea.tuple.type.tupleElements.count\n let tupleIdx = tea.fieldIndex\n if !findArrayElementStores(toElementAddress: tea,\n elementIndex: elementIndex * numTupleElements + tupleIdx,\n stores: &stores) {\n return false\n }\n case let store as StoreInst:\n if store.source.type.isTuple {\n // This kind of SIL is never generated because tuples are stored with separated stores to tuple_element_addr.\n // Just to be on the safe side..\n return false\n }\n if elementIndex >= stores.count {\n stores += Array(repeating: nil, count: elementIndex - stores.count + 1)\n }\n if stores[elementIndex] != nil {\n // An element is stored twice.\n return false\n }\n stores[elementIndex] = store\n default:\n return false\n }\n }\n return true\n}\n\n/// Merges stores to individual struct fields to a single store of the whole struct.\n///\n/// store %element1 to %element1Addr\n/// store %element2 to %element2Addr\n/// ->\n/// %s = struct $S (%element1, %element2)\n/// store %s to @structAddr\nprivate func mergeStores(in function: Function, _ context: FunctionPassContext) {\n for inst in function.instructions {\n if let store = inst as? StoreInst {\n if let (elementStores, lastStore) = getSequenceOfElementStores(firstStore: store) {\n merge(elementStores: elementStores, lastStore: lastStore, context)\n }\n }\n }\n}\n\n/// Returns a sequence of individual stores to elements of a struct.\n///\n/// %addr1 = struct_element_addr %structAddr, #field1\n/// store %element1 to %addr1\n/// // ...\n/// %addr_n = struct_element_addr %structAddr, #field_n\n/// store %element_n to %addr_n\n///\nprivate func getSequenceOfElementStores(firstStore: StoreInst) -> ([StoreInst], lastStore: StoreInst)? {\n guard let elementAddr = firstStore.destination as? StructElementAddrInst else {\n return nil\n }\n let structAddr = elementAddr.struct\n let structType = structAddr.type\n if structType.isMoveOnly {\n return nil\n }\n if (structType.nominal as! StructDecl).hasUnreferenceableStorage {\n return nil\n }\n guard let fields = structType.getNominalFields(in: firstStore.parentFunction) else {\n return nil\n }\n let numElements = fields.count\n var elementStores = Array<StoreInst?>(repeating: nil, count: numElements)\n var numStoresFound = 0\n\n for inst in InstructionList(first: firstStore) {\n switch inst {\n case let store as StoreInst:\n guard store.storeOwnership == .trivial,\n let sea = store.destination as? StructElementAddrInst,\n sea.struct == structAddr,\n // Multiple stores to the same element?\n elementStores[sea.fieldIndex] == nil else {\n return nil\n }\n\n elementStores[sea.fieldIndex] = store\n numStoresFound += 1\n if numStoresFound == numElements {\n // If we saw `numElements` distinct stores, it implies that all elements in `elementStores` are not nil.\n return (elementStores.map { $0! }, lastStore: store)\n }\n default:\n if inst.mayReadOrWriteMemory {\n return nil\n }\n }\n }\n return nil\n}\n\nprivate func merge(elementStores: [StoreInst], lastStore: StoreInst, _ context: FunctionPassContext) {\n let builder = Builder(after: lastStore, context)\n\n let structAddr = (lastStore.destination as! StructElementAddrInst).struct\n let str = builder.createStruct(type: structAddr.type.objectType, elements: elementStores.map { $0.source })\n builder.createStore(source: str, destination: structAddr, ownership: lastStore.storeOwnership)\n\n for store in elementStores {\n let destAddr = store.destination as! StructElementAddrInst\n context.erase(instruction: store)\n if destAddr.uses.isEmpty {\n context.erase(instruction: destAddr)\n }\n }\n}\n\nprivate extension InstructionSet {\n mutating func insertAllAddressUses(of value: Value) {\n for use in value.uses {\n if insert(use.instruction) {\n for result in use.instruction.results where result.type.isAddress {\n insertAllAddressUses(of: result)\n }\n }\n }\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_InitializeStaticGlobals.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_InitializeStaticGlobals.swift
Swift
13,302
0.95
0.157609
0.312312
python-kit
16
2025-02-20T04:01:05.188469
BSD-3-Clause
false
3513cec5a7411b3ff43a01f2198b8071
//===--- LetPropertyLowering.swift -----------------------------------------==//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\n\n/// Lowers let property accesses of classes.\n///\n/// Lowering consists of two tasks:\n///\n/// * In class initializers, insert `end_init_let_ref` instructions at places where all let-fields are initialized.\n/// This strictly separates the life-range of the class into a region where let fields are still written during\n/// initialization and a region where let fields are truly immutable.\n///\n/// * Add the `[immutable]` flag to all `ref_element_addr` instructions (for let-fields) which are in the "immutable"\n/// region. This includes the region after an inserted `end_init_let_ref` in an class initializer, but also all\n/// let-field accesses in other functions than the initializer and the destructor.\n///\n/// This pass should run after DefiniteInitialization but before RawSILInstLowering (because it relies on\n/// `mark_uninitialized` still present in the class initializer).\n///\n/// Note that it's not mandatory to run this pass. If it doesn't run, SIL is still correct.\n///\n/// Simplified example (after lowering):\n///\n/// bb0(%0 : @owned C): // = self of the class initializer\n/// %1 = mark_uninitialized %0\n/// %2 = ref_element_addr %1, #C.l // a let-field\n/// store %init_value to %2\n/// %3 = end_init_let_ref %1 // inserted by lowering\n/// %4 = ref_element_addr [immutable] %3, #C.l // set to immutable by lowering\n/// %5 = load %4\n///\nlet letPropertyLowering = FunctionPass(name: "let-property-lowering") {\n (function: Function, context: FunctionPassContext) in\n\n assert(context.silStage == .raw, "let-property-lowering must run before RawSILInstLowering")\n\n if context.hadError {\n // If DefiniteInitialization (or other passes) already reported an error, we cannot assume valid SIL anymore.\n return\n }\n\n if function.isDestructor {\n // Let-fields are not immutable in the class destructor.\n return\n }\n\n for inst in function.instructions {\n switch inst {\n\n // First task of lowering: insert `end_init_let_ref` instructions in class initializers.\n case let markUninitialized as MarkUninitializedInst\n where markUninitialized.type.isClass &&\n // TODO: support move-only classes\n !markUninitialized.type.isMoveOnly &&\n // We only have to do that for root classes because derived classes call the super-initializer\n // _after_ all fields in the derived class are already initialized.\n markUninitialized.kind == .rootSelf:\n\n insertEndInitInstructions(for: markUninitialized, context)\n\n // Second task of lowering: set the `immutable` flags.\n case let rea as RefElementAddrInst\n where rea.fieldIsLet && !rea.isInUninitializedRegion &&\n // TODO: support move-only classes\n !rea.instance.type.isMoveOnly:\n rea.set(isImmutable: true, context)\n\n default:\n break\n }\n }\n}\n\nprivate func insertEndInitInstructions(for markUninitialized: MarkUninitializedInst, _ context: FunctionPassContext) {\n assert(!markUninitialized.type.isAddress, "self of class should not be an address")\n\n // The region which contains all let-field initializations, including any partial\n // let-field de-initializations (in case of a fail-able or throwing initializer).\n var initRegion = InstructionRange(begin: markUninitialized, context)\n defer { initRegion.deinitialize() }\n\n constructLetInitRegion(of: markUninitialized, result: &initRegion, context)\n\n insertEndInitInstructions(for: markUninitialized, atEndOf: initRegion, context)\n}\n\nprivate func insertEndInitInstructions(\n for markUninitialized: MarkUninitializedInst,\n atEndOf initRegion: InstructionRange,\n _ context: FunctionPassContext\n) {\n var ssaUpdater = SSAUpdater(function: markUninitialized.parentFunction,\n type: markUninitialized.type, ownership: .owned, context)\n ssaUpdater.addAvailableValue(markUninitialized, in: markUninitialized.parentBlock)\n\n for endInst in initRegion.ends {\n let builder = Builder(after: endInst, context)\n let newValue = builder.createEndInitLetRef(operand: markUninitialized)\n ssaUpdater.addAvailableValue(newValue, in: endInst.parentBlock)\n }\n\n for exitInst in initRegion.exits {\n let builder = Builder(before: exitInst, context)\n let newValue = builder.createEndInitLetRef(operand: markUninitialized)\n ssaUpdater.addAvailableValue(newValue, in: exitInst.parentBlock)\n }\n\n for use in markUninitialized.uses {\n if !initRegion.inclusiveRangeContains(use.instruction) &&\n !(use.instruction is EndInitLetRefInst)\n {\n use.set(to: ssaUpdater.getValue(atEndOf: use.instruction.parentBlock), context)\n }\n }\n}\n\nprivate func constructLetInitRegion(\n of markUninitialized: MarkUninitializedInst,\n result initRegion: inout InstructionRange,\n _ context: FunctionPassContext\n) {\n // Adding the initial `mark_uninitialized` ensures that a single `end_init_let_ref` is inserted (after the\n // `mark_uninitialized`) in case there are no let-field accesses at all.\n // Note that we have to insert an `end_init_let_ref` even if there are no let-field initializations, because\n // derived classes could have let-field initializations in their initializers (which eventually call the\n // root-class initializer).\n initRegion.insert(markUninitialized)\n\n var borrows = Stack<BorrowIntroducingInstruction>(context)\n defer { borrows.deinitialize() }\n\n for inst in markUninitialized.parentFunction.instructions {\n switch inst {\n case let assign as AssignInst\n where assign.destination.isLetFieldAddress(of: markUninitialized):\n assert(assign.assignOwnership == .initialize)\n initRegion.insert(inst)\n\n case let store as StoreInst\n where store.destination.isLetFieldAddress(of: markUninitialized):\n assert(store.storeOwnership != .assign)\n initRegion.insert(inst)\n\n case let copy as CopyAddrInst\n where copy.destination.isLetFieldAddress(of: markUninitialized):\n assert(copy.isInitializationOfDest)\n initRegion.insert(inst)\n\n case let beginAccess as BeginAccessInst\n where beginAccess.accessKind == .deinit &&\n beginAccess.address.isLetFieldAddress(of: markUninitialized):\n // Include let-field partial de-initializations in the region.\n initRegion.insert(inst)\n\n case let beginBorrow as BeginBorrowInst\n where beginBorrow.borrowedValue.isReferenceDerived(from: markUninitialized):\n borrows.append(beginBorrow)\n\n case let storeBorrow as StoreBorrowInst\n where storeBorrow.source.isReferenceDerived(from: markUninitialized):\n borrows.append(storeBorrow)\n\n default:\n break\n }\n }\n\n // Extend the region to whole borrow scopes to avoid that we insert an `end_init_let_ref` in the\n // middle of a borrow scope.\n for borrow in borrows where initRegion.contains(borrow) {\n initRegion.insert(borrowScopeOf: borrow, context)\n }\n}\n\nprivate extension RefElementAddrInst {\n var isInUninitializedRegion: Bool {\n var root = self.instance\n while true {\n switch root {\n case let beginBorrow as BeginBorrowInst:\n root = beginBorrow.borrowedValue\n case let loadBorrow as LoadBorrowInst:\n // Initializers of derived classes store `self` into a stack location from where\n // it's loaded via a `load_borrow`.\n root = loadBorrow.address\n case is MarkUninitializedInst:\n return true\n default:\n return false\n }\n }\n }\n}\n\nprivate extension Value {\n func isReferenceDerived(from root: Value) -> Bool {\n var parent: Value = self\n while true {\n if parent == root {\n return true\n }\n if let operand = parent.forwardingInstruction?.singleForwardedOperand {\n parent = operand.value\n continue\n }\n if let transition = parent.definingInstruction as? OwnershipTransitionInstruction {\n parent = transition.operand.value\n continue\n }\n return false\n }\n }\n\n func isLetFieldAddress(of markUninitialized: MarkUninitializedInst) -> Bool {\n if case .class(let rea) = self.accessBase,\n rea.fieldIsLet,\n rea.instance.isReferenceDerived(from: markUninitialized)\n {\n return true\n }\n return false\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_LetPropertyLowering.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_LetPropertyLowering.swift
Swift
8,837
0.95
0.180258
0.287129
react-lib
255
2024-04-06T20:21:11.430004
Apache-2.0
false
8cdad540436c704f7ecfe286cf91638c
//===--- LifetimeDependenceDiagnostics.swift - Lifetime dependence --------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2025 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n///\n/// Pass dependencies:\n///\n/// - After MoveOnly checking fixes non-Copyable lifetimes.\n///\n/// - Before MoveOnlyTypeEliminator removes ownership operations on trivial types, which loses variable information\n/// required for diagnostics.\n///\n//===----------------------------------------------------------------------===//\n\nimport AST\nimport SIL\n\nprivate let verbose = false\n\nprivate func log(prefix: Bool = true, _ message: @autoclosure () -> String) {\n if verbose {\n debugLog(prefix: prefix, message())\n }\n}\n\n/// Diagnostic pass.\n///\n/// Find the roots of all non-escapable values in this function. All\n/// non-escapable values either depend on a NonEscapingScope, or they\n/// are produced by a LifetimeDependentInstruction that has no\n/// dependence on a parent value (@_unsafeNonEscapableResult).\nlet lifetimeDependenceDiagnosticsPass = FunctionPass(\n name: "lifetime-dependence-diagnostics")\n{ (function: Function, context: FunctionPassContext) in\n log(prefix: false, "\n--- Diagnosing lifetime dependence in \(function.name)")\n log("\(function)")\n log("\(function.convention)")\n\n for argument in function.arguments\n where !argument.type.isEscapable(in: function)\n {\n // Indirect results are not checked here. Type checking ensures\n // that they have a lifetime dependence.\n if let lifetimeDep = LifetimeDependence(argument, context) {\n _ = analyze(dependence: lifetimeDep, context)\n }\n }\n for instruction in function.instructions {\n if let markDep = instruction as? MarkDependenceInst, markDep.isUnresolved {\n if let lifetimeDep = LifetimeDependence(markDep, context) {\n if analyze(dependence: lifetimeDep, context) {\n // Note: This promotes the mark_dependence flag but does not invalidate analyses; preserving analyses is good,\n // although the change won't appear in -sil-print-function. Ideally, we could notify context of a flag change\n // without invalidating analyses.\n lifetimeDep.resolve(context)\n continue\n }\n }\n // For now, if the mark_dependence wasn't recognized as a lifetime dependency, or if the dependencies uses are not\n // in scope, conservatively settle it as escaping. For example, it is not uncommon for the pointer value returned\n // by `unsafeAddress` to outlive its `self` argument. This will not be diagnosed as an error, but the\n // mark_dependence will hanceforth be treated as an unknown use by the optimizer. In the future, we should not\n // need to set this flag during diagnostics because, for escapable types, mark_dependence [unresolved] will all be\n // settled during an early LifetimeNormalization pass.\n markDep.settleToEscaping()\n continue\n }\n if let apply = instruction as? FullApplySite {\n // Handle ~Escapable results that do not have a lifetime dependence. This includes implicit initializers and\n // @_unsafeNonescapableResult.\n apply.resultOrYields.forEach {\n if let lifetimeDep = LifetimeDependence(unsafeApplyResult: $0,\n context) {\n _ = analyze(dependence: lifetimeDep, context)\n }\n }\n continue\n }\n }\n}\n\n/// Analyze a single Lifetime dependence and trigger diagnostics.\n///\n/// 1. Compute the LifetimeDependence scope.\n///\n/// 2. Walk down all dependent values checking that they are within range.\n///\n/// Return true on success.\nprivate func analyze(dependence: LifetimeDependence, _ context: FunctionPassContext) -> Bool {\n log("Dependence scope:\n\(dependence)")\n\n if dependence.parentValue.type.objectType.isTrivial(in: dependence.function) {\n // Briefly, some versions of Span in the standard library violated trivial lifetimes; versions of the compiler built\n // at that time simply ignored dependencies on trivial values. For now, disable trivial dependencies to allow newer\n // compilers to build against those older standard libraries. This check is only relevant for ~6 mo (until July\n // 2025).\n if let sourceFileKind = dependence.function.sourceFileKind, sourceFileKind == .interface {\n return true\n }\n }\n\n // Compute this dependence scope.\n var range = dependence.computeRange(context)\n defer { range?.deinitialize() }\n\n var error = false\n let diagnostics =\n DiagnoseDependence(dependence: dependence, range: range,\n onError: { error = true }, context: context)\n\n // Check each lifetime-dependent use via a def-use visitor\n var walker = DiagnoseDependenceWalker(diagnostics, context)\n defer { walker.deinitialize() }\n let result = walker.walkDown(dependence: dependence)\n // The walk may abort without a diagnostic error.\n assert(!error || result == .abortWalk)\n return result == .continueWalk\n}\n\n/// Analyze and diagnose a single LifetimeDependence.\nprivate struct DiagnoseDependence {\n let dependence: LifetimeDependence\n let range: InstructionRange?\n let onError: ()->()\n let context: FunctionPassContext\n\n var function: Function { dependence.function }\n\n func diagnose(_ position: SourceLoc?, _ id: DiagID,\n _ args: DiagnosticArgument...) {\n context.diagnosticEngine.diagnose(id, args, at: position)\n }\n\n /// Check that this use is inside the dependence scope.\n func checkInScope(operand: Operand) -> WalkResult {\n if let range, !range.inclusiveRangeContains(operand.instruction) {\n log(" out-of-range: \(operand.instruction)")\n reportError(operand: operand, diagID: .lifetime_outside_scope_use)\n return .abortWalk\n }\n log(" contains: \(operand.instruction)")\n return .continueWalk\n }\n\n func reportEscaping(operand: Operand) {\n log(" escaping: \(operand.instruction)")\n reportError(operand: operand, diagID: .lifetime_outside_scope_escape)\n }\n\n func reportUnknown(operand: Operand) {\n log("Unknown use: \(operand)\n\(function)")\n reportEscaping(operand: operand)\n }\n\n func checkInoutResult(argument inoutArg: FunctionArgument) -> WalkResult {\n // Check that the parameter dependence for this inout argument is the same as the current dependence scope.\n if let sourceArg = dependence.scope.parentValue as? FunctionArgument {\n // If the inout result is also the inout source, then it's always ok.\n if inoutArg == sourceArg {\n return .continueWalk\n }\n if function.argumentConventions.getDependence(target: inoutArg.index, source: sourceArg.index) != nil {\n // The inout result depends on a lifetime that is inherited or borrowed in the caller.\n log(" has dependent inout argument: \(inoutArg)")\n return .continueWalk\n }\n }\n return .abortWalk\n }\n\n func checkStoreToYield(address: Value) -> WalkResult {\n var walker = DependentAddressUseDefWalker(context: context, diagnostics: self)\n return walker.walkUp(address: address)\n }\n\n func checkYield(operand: Operand) -> WalkResult {\n switch dependence.scope {\n case .caller:\n return checkFunctionResult(operand: operand)\n default:\n // local scopes can be yielded without escaping.\n return .continueWalk\n }\n }\n\n func checkFunctionResult(operand: Operand) -> WalkResult {\n\n if function.hasUnsafeNonEscapableResult {\n return .continueWalk\n }\n // If the dependence scope is global, then it has immortal lifetime.\n if case .global = dependence.scope {\n return .continueWalk\n }\n // Check that the parameter dependence for this result is the same\n // as the current dependence scope.\n if let arg = dependence.scope.parentValue as? FunctionArgument,\n function.argumentConventions[resultDependsOn: arg.index] != nil {\n // The returned value depends on a lifetime that is inherited or\n // borrowed in the caller. The lifetime of the argument value\n // itself is irrelevant here.\n log(" has dependent function result")\n return .continueWalk\n }\n return .abortWalk\n }\n\n func reportError(operand: Operand, diagID: DiagID) {\n // If the dependent value is Escapable, then mark_dependence resolution fails, but this is not a diagnostic error.\n if dependence.dependentValue.isEscapable {\n return\n }\n onError()\n\n // Identify the escaping variable.\n let escapingVar = LifetimeVariable(dependent: operand.value, context)\n let varName = escapingVar.name\n if let varName {\n diagnose(escapingVar.sourceLoc, .lifetime_variable_outside_scope,\n varName)\n } else {\n diagnose(escapingVar.sourceLoc, .lifetime_value_outside_scope)\n }\n reportScope()\n // Identify the use point.\n let userSourceLoc = operand.instruction.location.sourceLoc\n diagnose(userSourceLoc, diagID)\n }\n\n // Identify the dependence scope.\n func reportScope() {\n if case let .access(beginAccess) = dependence.scope {\n let parentVar = LifetimeVariable(dependent: beginAccess, context)\n if let sourceLoc = beginAccess.location.sourceLoc ?? parentVar.sourceLoc {\n diagnose(sourceLoc, .lifetime_outside_scope_access,\n parentVar.name ?? "")\n }\n return\n }\n if let arg = dependence.parentValue as? Argument,\n let varDecl = arg.varDecl,\n let sourceLoc = arg.sourceLoc {\n diagnose(sourceLoc, .lifetime_outside_scope_argument,\n varDecl.userFacingName)\n return\n }\n let parentVar = LifetimeVariable(dependent: dependence.parentValue, context)\n if let parentLoc = parentVar.sourceLoc {\n if let parentName = parentVar.name {\n diagnose(parentLoc, .lifetime_outside_scope_variable, parentName)\n } else {\n diagnose(parentLoc, .lifetime_outside_scope_value)\n }\n }\n }\n}\n\n// Identify a best-effort variable declaration based on a defining SIL\n// value or any lifetime dependent use of that SIL value.\nprivate struct LifetimeVariable {\n var varDecl: VarDecl?\n var sourceLoc: SourceLoc?\n \n var name: StringRef? {\n return varDecl?.userFacingName\n }\n\n init(dependent value: Value, _ context: some Context) {\n if value.type.isAddress {\n self = Self(accessBase: value.accessBase, context)\n return\n }\n if let firstIntroducer = getFirstVariableIntroducer(of: value, context) {\n self = Self(introducer: firstIntroducer)\n return\n }\n self.varDecl = nil\n self.sourceLoc = nil\n }\n\n private func getFirstVariableIntroducer(of value: Value, _ context: some Context) -> Value? {\n var introducer: Value?\n var useDefVisitor = VariableIntroducerUseDefWalker(context, scopedValue: value) {\n introducer = $0\n return .abortWalk\n }\n defer { useDefVisitor.deinitialize() }\n _ = useDefVisitor.walkUp(newLifetime: value)\n return introducer\n }\n\n private init(introducer: Value) {\n if let arg = introducer as? Argument {\n self.varDecl = arg.varDecl\n } else {\n self.sourceLoc = introducer.definingInstruction?.location.sourceLoc\n self.varDecl = introducer.definingInstruction?.findVarDecl()\n }\n if let varDecl {\n sourceLoc = varDecl.nameLoc\n }\n }\n\n // Record the source location of the variable decl if possible. The\n // caller will already have a source location for the formal access,\n // which is more relevant for diagnostics.\n private init(accessBase: AccessBase, _ context: some Context) {\n switch accessBase {\n case .box(let projectBox):\n // Note: referenceRoot looks through `begin_borrow [var_decl]` and `move_value [var_decl]`. But the box should\n // never be produced by one of these, except when it is redundant with the `alloc_box` VarDecl. It does not seem\n // possible for a box to be moved/borrowed directly into another variable's box. Reassignment always loads/stores\n // the value.\n self = Self(introducer: projectBox.box.referenceRoot)\n case .stack(let allocStack):\n self = Self(introducer: allocStack)\n case .global(let globalVar):\n self.varDecl = globalVar.varDecl\n self.sourceLoc = varDecl?.nameLoc\n case .class(let refAddr):\n self.varDecl = refAddr.varDecl\n self.sourceLoc = refAddr.location.sourceLoc\n case .tail(let refTail):\n self = Self(introducer: refTail.instance)\n case .argument(let arg):\n self.varDecl = arg.varDecl\n self.sourceLoc = arg.sourceLoc\n case .yield(let result):\n // TODO: bridge VarDecl for FunctionConvention.Yields\n self.varDecl = nil\n self.sourceLoc = result.parentInstruction.location.sourceLoc\n case .storeBorrow(let sb):\n self = .init(dependent: sb.source, context)\n case .pointer(let ptrToAddr):\n self.varDecl = nil\n self.sourceLoc = ptrToAddr.location.sourceLoc\n case .index, .unidentified:\n self.varDecl = nil\n self.sourceLoc = nil\n }\n }\n}\n\n/// Walk up an address into which a dependent value has been stored. If any address in the use-def chain is a\n/// mark_dependence, follow the dependence base rather than the forwarded value. If any of the dependence bases in\n/// within the current scope is with (either local checkInoutResult), then storing a value into that address is\n/// nonescaping.\n///\n/// This supports store-to-yield. Storing to a yield is an escape unless the yielded memory location depends on another\n/// lifetime that already depends on the current scope. When setter depends on 'newValue', 'newValue' is stored to the\n/// yielded address, and the yielded addrses depends on the lifetime of 'self'. A mark_dependence should have already\n/// been inserted for that lifetime depenence:\n///\n/// (%a, %t) = begin_apply %f(%self)\n/// : $@yield_once @convention(method) (@inout Self) -> _inherit(0) @yields @inout Self.field\n/// %dep = mark_dependence [nonescaping] %yield_addr on %self\n/// store %newValue to [assign] %dep : $*Self.field\n///\nprivate struct DependentAddressUseDefWalker {\n let context: Context\n var diagnostics: DiagnoseDependence\n}\n\nextension DependentAddressUseDefWalker: AddressUseDefWalker {\n // Follow the dependence base, not the forwarded value. Similar to the way LifetimeDependenceUseDefWalker handles\n // MarkDependenceInst.\n mutating func walkUp(address: Value, path: UnusedWalkingPath = UnusedWalkingPath()) -> WalkResult {\n if let markDep = address as? MarkDependenceInst, let addressDep = LifetimeDependence(markDep, context) {\n switch addressDep.scope {\n case let .caller(arg):\n return diagnostics.checkInoutResult(argument: arg)\n case .owned, .initialized:\n // Storing a nonescaping value to local memory cannot escape.\n return .abortWalk\n default:\n break\n }\n }\n return walkUpDefault(address: address, path: UnusedWalkingPath())\n }\n\n mutating func rootDef(address: Value, path: UnusedWalkingPath) -> WalkResult {\n // This only searches for mark_dependence scopes.\n return .continueWalk\n }\n}\n\n/// Walk down lifetime depenence uses. For each check that all dependent\n/// leaf uses are non-escaping and within the dependence scope. The walk\n/// starts with add address for .access dependencies. The walk can\n/// transition from an address to a value at a load. The walk can\n/// transition from a value to an address as follows:\n///\n/// %dependent_addr = mark_dependence [nonescaping] %base_addr on %value\n///\n/// TODO: handle stores to singly initialized temporaries like copies using a standard reaching-def analysis.\nprivate struct DiagnoseDependenceWalker {\n let context: Context\n var diagnostics: DiagnoseDependence\n let localReachabilityCache = LocalVariableReachabilityCache()\n var visitedValues: ValueSet\n\n var function: Function { diagnostics.function }\n \n init(_ diagnostics: DiagnoseDependence, _ context: Context) {\n self.context = context\n self.diagnostics = diagnostics\n self.visitedValues = ValueSet(context)\n }\n \n mutating func deinitialize() {\n visitedValues.deinitialize()\n }\n}\n\nextension DiagnoseDependenceWalker : LifetimeDependenceDefUseWalker {\n mutating func needWalk(for value: Value) -> Bool {\n visitedValues.insert(value)\n }\n\n mutating func leafUse(of operand: Operand) -> WalkResult {\n return diagnostics.checkInScope(operand: operand)\n }\n\n mutating func deadValue(_ value: Value, using operand: Operand?)\n -> WalkResult {\n // Ignore a dead root value. It never escapes.\n if let operand {\n return diagnostics.checkInScope(operand: operand)\n }\n return .continueWalk\n }\n\n mutating func escapingDependence(on operand: Operand) -> WalkResult {\n diagnostics.reportEscaping(operand: operand)\n return .abortWalk\n }\n\n mutating func inoutDependence(argument: FunctionArgument, on operand: Operand) -> WalkResult {\n if diagnostics.checkInoutResult(argument: argument) == .abortWalk {\n diagnostics.reportEscaping(operand: operand)\n return .abortWalk\n }\n return .continueWalk\n }\n\n mutating func returnedDependence(result: Operand) -> WalkResult {\n if diagnostics.checkFunctionResult(operand: result) == .abortWalk {\n diagnostics.reportEscaping(operand: result)\n return .abortWalk\n }\n return .continueWalk\n }\n\n mutating func returnedDependence(address: FunctionArgument,\n on operand: Operand) -> WalkResult {\n if diagnostics.checkFunctionResult(operand: operand) == .abortWalk {\n diagnostics.reportEscaping(operand: operand)\n return .abortWalk\n }\n return .continueWalk\n }\n\n mutating func yieldedDependence(result: Operand) -> WalkResult {\n if diagnostics.checkYield(operand: result) == .abortWalk {\n diagnostics.reportEscaping(operand: result)\n return .abortWalk\n }\n return .continueWalk\n }\n\n mutating func storeToYieldDependence(address: Value, of operand: Operand) -> WalkResult {\n if diagnostics.checkStoreToYield(address: address) == .abortWalk {\n diagnostics.reportEscaping(operand: operand)\n return .abortWalk\n }\n return .continueWalk\n }\n\n // Override AddressUseVisitor here because LifetimeDependenceDefUseWalker\n // returns .abortWalk, and we want a more useful crash report.\n mutating func unknownAddressUse(of operand: Operand) -> WalkResult {\n diagnostics.reportUnknown(operand: operand)\n return .continueWalk\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_LifetimeDependenceDiagnostics.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_LifetimeDependenceDiagnostics.swift
Swift
18,705
0.95
0.166329
0.246637
react-lib
609
2023-11-04T17:40:17.266222
BSD-3-Clause
false
46b2570480234756a211f69223102919
//===--- LifetimeDependenceInsertion.swift - insert lifetime dependence ---===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2025 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n///\n/// Insert mark_dependence [nonescaping] markers on the owned returned\n/// or yielded value of a call whose return type is non-escaping.\n///\n/// Pass dependencies: This must run as a SILGen cleanup pass before\n/// any lifetime canonicalization or optimization can be performed.\n///\n//===----------------------------------------------------------------------===//\n\nimport SIL\n\nprivate let verbose = false\n\nprivate func log(prefix: Bool = true, _ message: @autoclosure () -> String) {\n if verbose {\n debugLog(prefix: prefix, message())\n }\n}\n\nlet lifetimeDependenceInsertionPass = FunctionPass(\n name: "lifetime-dependence-insertion")\n{ (function: Function, context: FunctionPassContext) in\n log(prefix: false, "\n--- Inserting lifetime dependence markers in \(function.name)")\n\n for instruction in function.instructions {\n if let dependentApply = LifetimeDependentApply(instruction) {\n for operand in dependentApply.applySite.parameterOperands {\n insertParameterDependencies(apply: dependentApply, target: operand, context)\n }\n insertResultDependencies(for: dependentApply, context)\n }\n }\n}\n\n/// An apply that produces a non-escapable value, linking it to a parent value.\nprivate struct LifetimeDependentApply {\n let applySite: FullApplySite\n\n init?(_ instruction: Instruction) {\n guard let apply = instruction as? FullApplySite else {\n return nil\n }\n if !apply.hasLifetimeDependence {\n return nil\n }\n self.applySite = apply\n }\n\n init?(withResult value: Value) {\n switch value {\n case let apply as ApplyInst:\n if let dependentApply = LifetimeDependentApply(apply) {\n self = dependentApply\n }\n case let arg as Argument:\n guard let termResult = TerminatorResult(arg) else { return nil }\n switch termResult.terminator {\n case let ta as TryApplyInst:\n if termResult.successor == ta.errorBlock {\n if let dependentApply = LifetimeDependentApply(ta) {\n self = dependentApply\n }\n }\n default:\n break\n }\n default:\n break\n }\n return nil\n }\n}\n\nextension LifetimeDependentApply {\n enum TargetKind {\n case result\n case inParameter\n case inoutParameter\n case yield\n case yieldAddress\n }\n \n /// A lifetime argument that either inherits or creates a new scope for the lifetime of the argument value.\n struct LifetimeSource {\n let targetKind: TargetKind\n let convention: LifetimeDependenceConvention\n let value: Value\n }\n\n /// List of lifetime dependencies for a single target.\n struct LifetimeSourceInfo {\n var sources = SingleInlineArray<LifetimeSource>()\n var bases = [Value]()\n }\n\n func getResultDependenceSources() -> LifetimeSourceInfo? {\n guard applySite.hasResultDependence else {\n return nil\n }\n var info = LifetimeSourceInfo()\n if let beginApply = applySite as? BeginApplyInst {\n return getYieldDependenceSources(beginApply: beginApply)\n }\n for operand in applySite.parameterOperands {\n guard let dep = applySite.resultDependence(on: operand) else {\n continue\n }\n info.sources.push(LifetimeSource(targetKind: .result, convention: dep, value: operand.value))\n }\n return info\n }\n\n func getYieldDependenceSources(beginApply: BeginApplyInst) -> LifetimeSourceInfo? {\n var info = LifetimeSourceInfo()\n let hasScopedYield = applySite.parameterOperands.contains {\n if let dep = applySite.resultDependence(on: $0) {\n return dep.isScoped\n }\n return false\n }\n if hasScopedYield {\n // for consistency, we use yieldAddress if any yielded value is an address.\n let targetKind = beginApply.yieldedValues.contains(where: { $0.type.isAddress })\n ? TargetKind.yieldAddress : TargetKind.yield\n info.sources.push(LifetimeSource(targetKind: targetKind,\n convention: .scope(addressable: false, addressableForDeps: false),\n value: beginApply.token))\n }\n for operand in applySite.parameterOperands {\n guard let dep = applySite.resultDependence(on: operand) else {\n continue\n }\n switch dep {\n case .inherit:\n continue\n case .scope:\n for yieldedValue in beginApply.yieldedValues {\n let targetKind = yieldedValue.type.isAddress ? TargetKind.yieldAddress : TargetKind.yield\n info.sources.push(LifetimeSource(targetKind: targetKind, convention: .inherit, value: operand.value))\n }\n }\n }\n return info\n }\n\n func getParameterDependenceSources(target: Operand) -> LifetimeSourceInfo? {\n guard let deps = applySite.parameterDependencies(target: target) else {\n return nil\n }\n var info = LifetimeSourceInfo()\n let targetKind = {\n let convention = applySite.convention(of: target)!\n switch convention {\n case .indirectInout, .indirectInoutAliasable, .packInout:\n return TargetKind.inoutParameter\n case .indirectIn, .indirectInGuaranteed, .indirectInCXX, .directOwned, .directUnowned, .directGuaranteed,\n .packOwned, .packGuaranteed:\n return TargetKind.inParameter\n case .indirectOut, .packOut:\n debugLog("\(applySite)")\n fatalError("Lifetime dependencies cannot target \(convention) parameter")\n }\n }()\n for (dep, operand) in zip(deps, applySite.parameterOperands) {\n guard let dep = dep else {\n continue\n }\n info.sources.push(LifetimeSource(targetKind: targetKind, convention: dep, value: operand.value))\n }\n return info\n }\n}\n\nprivate extension LifetimeDependentApply.LifetimeSourceInfo {\n mutating func initializeBases(_ context: FunctionPassContext) {\n for source in sources {\n // Inherited dependencies do not require a mark_dependence if the target is a result or yielded value. The\n // inherited lifetime is nonescapable, so either\n //\n // (a) the result or yield is never returned from this function\n //\n // (b) the inherited lifetime has a dependence root within this function (it comes from a dependent function\n // argument or scoped dependence). In this case, when that depedence root is diagnosed, the analysis will find\n // transtive uses of this apply's result.\n //\n // (c) the dependent value is passed to another call with a dependent inout argument, or it is stored to a yielded\n // address of a coroutine that has a dependent inout argument. In this case, a mark_dependence will already be\n // created for that inout argument.\n switch source.convention {\n case .inherit:\n break\n case .scope:\n initializeScopedBases(source: source, context)\n }\n }\n }\n\n // Scoped dependencies require a mark_dependence for every variable that introduces this scope.\n mutating func initializeScopedBases(source: LifetimeDependentApply.LifetimeSource, _ context: FunctionPassContext) {\n switch source.targetKind {\n case .yield, .yieldAddress:\n // A coroutine creates its own borrow scope, nested within its borrowed operand.\n bases.append(source.value)\n case .result, .inParameter, .inoutParameter:\n // addressable dependencies directly depend on the incoming address.\n if context.options.enableAddressDependencies() && source.convention.isAddressable(for: source.value) {\n bases.append(source.value)\n return\n }\n // Create a new dependence on the apply's access to the argument.\n for varIntoducer in gatherVariableIntroducers(for: source.value, context) {\n let scope = LifetimeDependence.Scope(base: varIntoducer, context)\n log("Scoped lifetime from \(source.value)")\n log(" scope: \(scope)")\n bases.append(scope.parentValue)\n }\n }\n }\n}\n\n/// If the result of this apply depends on the scope of one or more\n/// arguments, then insert a mark_dependence [unresolved] from the\n/// result on each argument so that the result is recognized as a\n/// dependent value within each scope.\nprivate func insertResultDependencies(for apply: LifetimeDependentApply, _ context: FunctionPassContext ) {\n guard var sources = apply.getResultDependenceSources() else {\n return\n }\n log("Creating result dependencies for \(apply.applySite)")\n\n // Find the dependence base for each source.\n sources.initializeBases(context)\n\n for dependentValue in apply.applySite.resultOrYields {\n let builder = Builder(before: dependentValue.nextInstruction, context)\n insertMarkDependencies(value: dependentValue, initializer: nil, bases: sources.bases, builder: builder, context)\n }\n for resultOper in apply.applySite.indirectResultOperands {\n let accessBase = resultOper.value.accessBase\n guard case let .store(initializingStore, initialAddress) = accessBase.findSingleInitializer(context) else {\n continue\n }\n assert(initializingStore == resultOper.instruction, "an indirect result is a store")\n Builder.insert(after: apply.applySite, context) { builder in\n insertMarkDependencies(value: initialAddress, initializer: initializingStore, bases: sources.bases,\n builder: builder, context)\n }\n }\n}\n\nprivate func insertParameterDependencies(apply: LifetimeDependentApply, target: Operand,\n _ context: FunctionPassContext ) {\n guard var sources = apply.getParameterDependenceSources(target: target) else {\n return\n }\n log("Creating parameter dependencies for \(apply.applySite)")\n\n sources.initializeBases(context)\n\n assert(target.value.type.isAddress,\n "lifetime-dependent parameter must be 'inout'")\n\n Builder.insert(after: apply.applySite, context) {\n insertMarkDependencies(value: target.value, initializer: nil, bases: sources.bases, builder: $0, context)\n }\n}\n\nprivate func insertMarkDependencies(value: Value, initializer: Instruction?,\n bases: [Value], builder: Builder,\n _ context: FunctionPassContext) {\n var currentValue = value\n for base in bases {\n if value.type.isAddress {\n // Address dependencies cannot be represented as SSA values, so it does not make sense to replace any uses of the\n // dependent address.\n _ = builder.createMarkDependenceAddr(value: currentValue, base: base, kind: .Unresolved)\n continue\n }\n let markDep = builder.createMarkDependence(value: currentValue, base: base, kind: .Unresolved)\n let uses = currentValue.uses.lazy.filter {\n if $0.isScopeEndingUse {\n return false\n }\n let inst = $0.instruction\n return inst != markDep && inst != initializer && !(inst is Deallocation)\n }\n uses.replaceAll(with: markDep, context)\n currentValue = markDep\n }\n}\n\n/// Walk up the value dependence chain to find the best-effort variable declaration. Typically called while diagnosing\n/// an error.\n///\n/// Returns an array with at least one introducer value.\n///\n/// The walk stops at:\n/// - a variable declaration (begin_borrow [var_decl], move_value [var_decl])\n/// - a begin_access for a mutable variable access\n/// - the value or address "root" of the dependence chain\nfunc gatherVariableIntroducers(for value: Value, _ context: Context)\n -> SingleInlineArray<Value>\n{\n var introducers = SingleInlineArray<Value>()\n var useDefVisitor = VariableIntroducerUseDefWalker(context, scopedValue: value) {\n introducers.push($0)\n return .continueWalk\n }\n defer { useDefVisitor.deinitialize() }\n _ = useDefVisitor.walkUp(newLifetime: value)\n assert(!introducers.isEmpty, "missing variable introducer")\n return introducers\n}\n\n// =============================================================================\n// VariableIntroducerUseDefWalker - upward walk\n// =============================================================================\n\n/// Walk up lifetime dependencies to the first value associated with a variable declaration.\n///\n/// To start walking:\n/// walkUp(newLifetime: Value) -> WalkResult\n///\n/// This utility finds the value or address associated with the lvalue (variable declaration) that is passed as the\n/// source of a lifetime dependent argument. If no lvalue is found, then it finds the "root" of the chain of temporary\n/// rvalues.\n///\n/// This "looks through" projections: a property that is either visible as a stored property or access via\n/// unsafe[Mutable]Address.\n///\n/// dependsOn(lvalue.field) // finds 'lvalue' when 'field' is a stored property\n///\n/// dependsOn(lvalue.computed) // finds the temporary value directly returned by a getter.\n///\n/// SILGen emits temporary copies that violate lifetime dependence semantcs. This utility looks through such temporary\n/// copies, stopping at a value that introduces an immutable variable: move_value [var_decl] or begin_borrow [var_decl],\n/// or at an access of a mutable variable: begin_access [read] or begin_access [modify].\n///\n/// In this example, the dependence "root" is copied, borrowed, and forwarded before being used as the base operand of\n/// `mark_dependence`. The dependence "root" is the parent of the outer-most dependence scope.\n///\n/// %root = apply // lifetime dependence root\n/// %copy = copy_value %root\n/// %parent = begin_borrow %copy // lifetime dependence parent value\n/// %base = struct_extract %parent // lifetime dependence base value\n/// %dependent = mark_dependence [nonescaping] %value on %base\n///\n/// VariableIntroducerUseDefWalker extends the ForwardingUseDefWalker to follow copies, moves, and\n/// borrows. ForwardingUseDefWalker treats these as forward-extended lifetime introducers. But they inherit a lifetime\n/// dependency from their operand because non-escapable values can be copied, moved, and borrowed. Nonetheless, all of\n/// their uses must remain within original dependence scope.\n///\n/// # owned lifetime dependence\n/// %parent = apply // begin dependence scope -+\n/// ... |\n/// %1 = mark_dependence [nonescaping] %value on %parent |\n/// ... |\n/// %2 = copy_value %1 -+ |\n/// # forwarding instruction | |\n/// %3 = struct $S (%2) | forward-extended lifetime |\n/// | | OSSA Lifetime\n/// %4 = move_value %3 -+ |\n/// ... | forward-extended lifetime |\n/// %5 = begin_borrow %4 | -+ |\n/// # dependent use of %1 | | forward-extended lifetime|\n/// end_borrow %5 | -+ |\n/// destroy_value %4 -+ |\n/// ... |\n/// destroy_value %parent // end dependence scope -+\n///\n/// All of the dependent uses including `end_borrow %5` and `destroy_value %4` must be before the end of the dependence\n/// scope: `destroy_value %parent`. In this case, the dependence parent is an owned value, so the scope is simply the\n/// value's OSSA lifetime.\nstruct VariableIntroducerUseDefWalker : LifetimeDependenceUseDefValueWalker, LifetimeDependenceUseDefAddressWalker {\n let context: Context\n\n // If the scoped value is trivial, then only the variable's lexical scope is relevant, and access scopes can be\n // ignored.\n let isTrivialScope: Bool\n\n // This visited set is only really needed for instructions with\n // multiple results, including phis.\n private var visitedValues: ValueSet\n\n // Call \p visit rather than calling this directly.\n private let visitorClosure: (Value) -> WalkResult\n\n init(_ context: Context, scopedValue: Value, _ visitor: @escaping (Value) -> WalkResult) {\n self.context = context\n self.isTrivialScope = scopedValue.type.isAddress\n ? scopedValue.type.objectType.isTrivial(in: scopedValue.parentFunction)\n : scopedValue.isTrivial(context)\n self.visitedValues = ValueSet(context)\n self.visitorClosure = visitor\n }\n\n mutating func deinitialize() {\n visitedValues.deinitialize()\n }\n \n mutating func introducer(_ value: Value, _ owner: Value?) -> WalkResult {\n return visitorClosure(value)\n }\n\n mutating func addressIntroducer(_ address: Value, access: AccessBaseAndScopes) -> WalkResult {\n return visitorClosure(address)\n }\n\n mutating func needWalk(for value: Value, _ owner: Value?) -> Bool {\n visitedValues.insert(value)\n }\n\n mutating func walkUp(newLifetime: Value) -> WalkResult {\n if newLifetime.type.isAddress {\n return walkUp(address: newLifetime)\n }\n let newOwner = newLifetime.ownership == .owned ? newLifetime : nil\n return walkUp(value: newLifetime, newOwner)\n }\n\n /// Override to check for variable introducers: move_value, begin_value, before following\n /// OwnershipTransitionInstruction.\n mutating func walkUp(value: Value, _ owner: Value?) -> WalkResult {\n if let inst = value.definingInstruction, VariableScopeInstruction(inst) != nil {\n return visitorClosure(value)\n }\n return walkUpDefault(value: value, owner)\n }\n\n /// Override to check for on-stack variables before following an initializer.\n mutating func walkUp(address: Value, access: AccessBaseAndScopes) -> WalkResult {\n // Check for stack locations that correspond to an lvalue.\n if case let .stack(allocStack) = access.base {\n if allocStack.varDecl != nil {\n // Report this variable's innermmost access scope.\n return addressIntroducer(access.enclosingAccess.address ?? address, access: access)\n }\n }\n return walkUpDefault(address: address, access: access)\n }\n}\n\nlet variableIntroducerTest = FunctionTest("variable_introducer") {\n function, arguments, context in\n let value = arguments.takeValue()\n print("Variable introducers of: \(value)")\n print(gatherVariableIntroducers(for: value, context))\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_LifetimeDependenceInsertion.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_LifetimeDependenceInsertion.swift
Swift
18,560
0.95
0.141612
0.292857
vue-tools
521
2023-12-04T05:07:51.329614
BSD-3-Clause
false
4ab90c42179122c1cb32b539117508fe
//===--- LifetimeDependenceScopeFixup.swift ----------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2025 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===---------------------------------------------------------------------===//\n///\n/// LifetimeDependenceScopeFixup pass dependencies:\n///\n/// - must run after OSSA lifetime completion (and before invalidation)\n///\n/// - must run after LifetimeDependenceInsertion\n///\n/// - must run before LifetimeDependenceDiagnostics\n///\n/// Step 1. LifetimeDependenceInsertion inserts 'mark_dependence [unresolved]' instructions for applies that return a\n/// lifetime dependent value.\n///\n/// Step 2. LifetimeDependenceScopeFixup visits each 'mark_dependence [unresolved]'. If the dependence base is an access\n/// scope, then it extends the access and any parent accesses to cover all uses of the dependent value.\n///\n/// Step 3. DiagnoseStaticExclusivity diagnoses an error for any overlapping access scopes. We prefer to diagnose a\n/// static exclusivity violation over a escaping violation. LifetimeDependenceScopeFixup is, therefore, allowed to\n/// create overlapping access scopes.\n///\n/// Step 4. LifetimeDependenceDiagnostics visits each 'mark_dependence [unresolved]' again and will report a violation\n/// for any dependent use that was not covered by the access scope.\n///\n/// This is conceptually a SILGen cleanup pass, because lifetime dependencies are invalid before it runs.\n///\n//===---------------------------------------------------------------------===//\n\nimport SIL\n\nprivate let verbose = false\n\nprivate func log(prefix: Bool = true, _ message: @autoclosure () -> String) {\n if verbose {\n debugLog(prefix: prefix, message())\n }\n}\n\n/// LifetimeDependenceScopeFixup visits each mark_dependence [unresolved]. It finds the access scope of the dependence\n/// base and extends it to cover the dependent uses.\n///\n/// If the base's access scope ends before a dependent use:\n///\n/// %dependentVal = mark_dependence [unresolved] %v on %innerAccess\n/// end_access %innerAccess\n/// apply %f(%dependentVal)\n///\n/// Then sink the end_access:\n///\n/// %dependentVal = mark_dependence [unresolved] %v on %innerAccess\n/// end_access %innerAccess\n/// apply %f(%dependentVal)\n///\n/// Recursively extend all enclosing access scopes up to an owned value or function argument. If the inner dependence is\n/// on a borrow scope, extend it first:\n///\n/// %outerAccess = begin_access %base\n/// %innerAccess = begin_access %outerAccess\n/// %innerBorrow = begin_borrow [var_decl] %innerAccess\n/// %dependentVal = mark_dependence [unresolved] %v on %innerBorrow\n/// end_borrow %innerBorrow\n/// end_access %innerAccess\n/// end_access %outerAccess\n/// apply %f(%dependentVal)\n///\n/// Is rewritten as:\n///\n/// apply %f(%dependentVal)\n/// end_borrow %innerBorrow\n/// end_access %innerAccess\n/// end_access %outerAccess\n///\n/// If the borrow scope is not marked [var_decl], then it has no meaningful scope for diagnostics. Rather than extending\n/// such scope, could redirect the dependence base to its operand:\n///\n/// %dependentVal = mark_dependence [unresolved] %v on %innerAccess\n///\n/// If a dependent use is on a function return:\n///\n/// sil @f $(@inout) -> () {\n/// bb0(%0: $*T)\n/// %outerAccess = begin_access [modify] %0\n/// %innerAccess = begin_access %outerAccess\n/// %dependentVal = mark_dependence [unresolved] %v on %innerAccess\n/// end_access %innerAccess\n/// end_access %outerAccess\n/// return %dependentVal\n///\n/// Then rewrite the mark_dependence base operand to a function argument:\n///\n/// %dependentVal = mark_dependence [unresolved] %v on %0\n///\nlet lifetimeDependenceScopeFixupPass = FunctionPass(\n name: "lifetime-dependence-scope-fixup")\n{ (function: Function, context: FunctionPassContext) in\n log(prefix: false, "\n--- Scope fixup for lifetime dependence in \(function.name)")\n\n let localReachabilityCache = LocalVariableReachabilityCache()\n\n for instruction in function.instructions {\n guard let markDep = instruction as? MarkDependenceInstruction else {\n continue\n }\n guard let innerLifetimeDep = LifetimeDependence(markDep, context) else {\n continue\n }\n // Redirect the dependence base to ignore irrelevant borrow scopes.\n let newLifetimeDep = markDep.rewriteSkippingBorrow(scope: innerLifetimeDep.scope, context)\n\n // Recursively sink enclosing end_access, end_borrow, end_apply, and destroy_value. If the scope can be extended\n // into the caller, return the function arguments that are the dependency sources.\n var scopeExtension = ScopeExtension(localReachabilityCache, context)\n let args = scopeExtension.extendScopes(dependence: newLifetimeDep)\n\n // Redirect the dependence base to the function arguments. This may create additional mark_dependence instructions.\n markDep.redirectFunctionReturn(to: args, context)\n }\n}\n\nprivate extension MarkDependenceInstruction {\n /// Rewrite the mark_dependence base operand to ignore inner borrow scopes (begin_borrow, load_borrow).\n ///\n /// Note: this could be done as a general simplification, e.g. after inlining. But currently this is only relevant for\n /// diagnostics.\n func rewriteSkippingBorrow(scope: LifetimeDependence.Scope, _ context: FunctionPassContext) -> LifetimeDependence {\n guard let newScope = scope.ignoreBorrowScope(context) else {\n return LifetimeDependence(scope: scope, markDep: self)!\n }\n let newBase = newScope.parentValue\n if newBase != self.baseOperand.value {\n self.baseOperand.set(to: newBase, context)\n }\n return LifetimeDependence(scope: newScope, markDep: self)!\n }\n\n func redirectFunctionReturn(to args: SingleInlineArray<FunctionArgument>, _ context: FunctionPassContext) {\n var updatedMarkDep: MarkDependenceInstruction?\n for arg in args {\n guard let currentMarkDep = updatedMarkDep else {\n self.baseOperand.set(to: arg, context)\n updatedMarkDep = self\n continue\n }\n switch currentMarkDep {\n case let mdi as MarkDependenceInst:\n updatedMarkDep = mdi.redirectFunctionReturnForward(to: arg, input: mdi, context)\n case let mdi as MarkDependenceAddrInst:\n updatedMarkDep = mdi.redirectFunctionReturnAddress(to: arg, context)\n default:\n fatalError("unexpected MarkDependenceInstruction")\n }\n }\n }\n}\n\nprivate extension MarkDependenceInst {\n /// Rewrite the mark_dependence base operand, setting it to a function argument.\n ///\n /// This is called when the dependent value is returned by the function and the dependence base is in the caller.\n func redirectFunctionReturnForward(to arg: FunctionArgument, input: MarkDependenceInst,\n _ context: FunctionPassContext) -> MarkDependenceInst {\n // To handle more than one function argument, new mark_dependence instructions will be chained.\n let newMarkDep = Builder(after: input, location: input.location, context)\n .createMarkDependence(value: input, base: arg, kind: .Unresolved)\n let uses = input.uses.lazy.filter {\n let inst = $0.instruction\n return inst != newMarkDep\n }\n uses.replaceAll(with: newMarkDep, context)\n return newMarkDep\n }\n}\n\nprivate extension MarkDependenceAddrInst {\n /// Rewrite the mark_dependence_addr base operand, setting it to a function argument.\n ///\n /// This is called when the dependent value is returned by the function and the dependence base is in the caller.\n func redirectFunctionReturnAddress(to arg: FunctionArgument, _ context: FunctionPassContext)\n -> MarkDependenceAddrInst {\n return Builder(after: self, location: self.location, context)\n .createMarkDependenceAddr(value: self.address, base: arg, kind: .Unresolved)\n }\n}\n\n/// A scope extension is a set of nested scopes and their owners. The owner is a value that represents ownerhip of\n/// the outermost scopes, which cannot be extended; it limits how far the nested scopes can be extended.\nprivate struct ScopeExtension {\n let context: FunctionPassContext\n let localReachabilityCache: LocalVariableReachabilityCache\n\n /// The ownership lifetime of the dependence base, which cannot be extended.\n var owners = SingleInlineArray<Value>()\n\n // Initialized after walking dependent uses. True if the scope can be extended into the caller.\n var dependsOnCaller: Bool?\n\n // Scopes listed in RPO over an upward walk. The outermost scope is first.\n var scopes = SingleInlineArray<ExtendableScope>()\n\n var innermostScope: ExtendableScope { get { scopes.last! } }\n\n var visitedValues: ValueSet?\n\n init(_ localReachabilityCache: LocalVariableReachabilityCache, _ context: FunctionPassContext) {\n self.localReachabilityCache = localReachabilityCache\n self.context = context\n }\n}\n\n/// Transitively extend nested scopes that enclose the dependence base.\n///\n/// If the parent function returns the dependent value, then this returns the function arguments that represent the\n/// caller's scope.\n///\n/// Note that we cannot simply rewrite the `mark_dependence` to depend on an outer access scope. Although that would be\n/// valid for a 'read' access, it would not accomplish anything useful. An inner 'read' can always be extended up to\n/// the end of its outer 'read'. A nested 'read' access can never interfere with another access in the same outer\n/// 'read', because it is impossible to nest a 'modify' access within a 'read'. For 'modify' accesses, however, the\n/// inner scope must be extended for correctness. A 'modify' access can interfere with other 'modify' access in the same\n/// scope. We rely on exclusivity diagnostics to report these interferences. For example:\n///\n/// sil @foo : $(@inout C) -> () {\n/// bb0(%0 : $*C):\n/// %a1 = begin_access [modify] %0\n/// %d = apply @getDependent(%a1)\n/// mark_dependence [unresolved] %d on %a1\n/// end_access %a1\n/// %a2 = begin_access [modify] %0\n/// ...\n/// end_access %a2\n/// apply @useDependent(%d) // exclusivity violation\n/// return\n/// }\n///\n// The above call to `@useDependent` is an exclusivity violation because it uses a value that depends on a 'modify'\n// access. This scope fixup pass must extend '%a1' to cover the `@useDependent` but must not extend the base of the\n// `mark_dependence` to the outer access `%0`. This ensures that exclusivity diagnostics correctly reports the\n// violation, and that subsequent optimizations do not shrink the inner access `%a1`.\nextension ScopeExtension {\n mutating func extendScopes(dependence: LifetimeDependence) -> SingleInlineArray<FunctionArgument> {\n log("Scope fixup for lifetime dependent instructions: \(dependence)")\n\n gatherExtensions(dependence: dependence)\n\n let noCallerScope = SingleInlineArray<FunctionArgument>()\n\n // computeDependentUseRange initializes scopeExtension.dependsOnCaller.\n guard var useRange = computeDependentUseRange(of: dependence) else {\n return noCallerScope\n }\n // tryExtendScopes deinitializes 'useRange'\n var scopesToExtend = SingleInlineArray<ExtendableScope>()\n guard canExtendScopes(over: &useRange, scopesToExtend: &scopesToExtend) else {\n useRange.deinitialize()\n return noCallerScope\n }\n // extend(over:) must receive the original unmodified `useRange`, without intermediate scope ending instructions.\n // This deinitializes `useRange` before erasing instructions.\n extend(scopesToExtend: scopesToExtend, over: &useRange, context)\n\n return dependsOnArgs\n }\n}\n\n// TODO: add parent and child indices to model a DAG of scopes. This will allow sibling scopes that do not follow a\n// stack discipline among them but still share the same parent and child scopes. This can occur with dependencies on\n// multiple call operands. Until then, scope extension may bail out unnecessarily while trying to extend over a sibling\n// scope.\nprivate struct ExtendableScope {\n enum Introducer {\n case scoped(ScopedInstruction)\n case owned(Value)\n }\n\n let scope: LifetimeDependence.Scope\n let introducer: Introducer\n\n var firstInstruction: Instruction {\n switch introducer {\n case let .scoped(scopedInst):\n return scopedInst.instruction\n case let .owned(value):\n if let definingInst = value.definingInstructionOrTerminator {\n return definingInst\n }\n return value.parentBlock.instructions.first!\n }\n }\n var endInstructions: LazyMapSequence<LazyFilterSequence<UseList>, Instruction> {\n switch introducer {\n case let .scoped(scopedInst):\n return scopedInst.endOperands.users\n case let .owned(value):\n return value.uses.endingLifetime.users\n }\n }\n\n // Allow scope extension as long as `beginInst` is scoped instruction and does not define a variable scope.\n init?(_ scope: LifetimeDependence.Scope, beginInst: Instruction?) {\n self.scope = scope\n guard let beginInst = beginInst, VariableScopeInstruction(beginInst) == nil else {\n return nil\n }\n guard let scopedInst = beginInst as? ScopedInstruction else {\n return nil\n }\n self.introducer = .scoped(scopedInst)\n }\n\n // Allow extension of owned temporaries that\n // (a) are Escapable\n // (b) do not define a variable scope\n // (c) are only consumed by destroy_value\n init?(_ scope: LifetimeDependence.Scope, owner: Value) {\n self.scope = scope\n // TODO: allow extension of lifetime dependent values by implementing a ScopeExtensionWalker that extends\n // LifetimeDependenceUseDefWalker.\n guard owner.type.isEscapable(in: owner.parentFunction),\n VariableScopeInstruction(owner.definingInstruction) == nil,\n owner.uses.endingLifetime.allSatisfy({ $0.instruction is DestroyValueInst }) else {\n return nil\n }\n self.introducer = .owned(owner)\n }\n}\n\n// Gather extendable scopes.\nextension ScopeExtension {\n mutating func gatherExtensions(dependence: LifetimeDependence) {\n visitedValues = ValueSet(context)\n defer {\n visitedValues!.deinitialize()\n visitedValues = nil\n }\n gatherExtensions(scope: dependence.scope)\n }\n\n mutating func gatherExtensions(valueOrAddress: Value) {\n if visitedValues!.insert(valueOrAddress) {\n gatherExtensions(scope: LifetimeDependence.Scope(base: valueOrAddress, context))\n }\n }\n\n mutating func nonExtendable(_ scope: LifetimeDependence.Scope) {\n owners.push(scope.parentValue)\n }\n\n // If `scope` is extendable, find its owner or outer scopes first, then push for extension.\n mutating func gatherExtensions(scope: LifetimeDependence.Scope) {\n switch scope {\n case let .access(beginAccess):\n gatherAccessExtensions(beginAccess: beginAccess)\n return\n\n case let .borrowed(beginBorrow):\n if let beginInst = beginBorrow.value.definingInstruction {\n if let extScope = ExtendableScope(scope, beginInst: beginInst) {\n gatherExtensions(valueOrAddress: beginBorrow.baseOperand!.value)\n scopes.push(extScope)\n return\n }\n }\n\n case let .yield(yieldedValue):\n let beginApply = yieldedValue.definingInstruction as! BeginApplyInst\n gatherYieldExtension(beginApply)\n scopes.push(ExtendableScope(scope, beginInst: beginApply)!)\n return\n\n case let .initialized(initializer):\n switch initializer {\n case let .store(initializingStore: store, initialAddress: _):\n if let sb = store as? StoreBorrowInst {\n // Follow the source for nested scopes.\n gatherExtensions(valueOrAddress: sb.source)\n scopes.push(ExtendableScope(scope, beginInst: sb)!)\n return\n }\n case .argument, .yield:\n // TODO: extend indirectly yielded scopes.\n break\n }\n case let .owned(value):\n if let extScope = ExtendableScope(scope, owner: value) {\n scopes.push(extScope)\n return\n }\n\n case let .local(varInst):\n switch varInst {\n case let .beginBorrow(beginBorrow):\n if let extScope = ExtendableScope(scope, beginInst: beginBorrow) {\n gatherExtensions(valueOrAddress: beginBorrow.operand.value)\n scopes.push(extScope)\n return\n }\n\n case let .moveValue(moveValue):\n if let extScope = ExtendableScope(scope, owner: moveValue) {\n scopes.push(extScope)\n return\n }\n }\n default:\n break\n }\n nonExtendable(scope)\n }\n\n /// Unlike LifetimeDependenceInsertion, this does not stop at an argument's "variable introducer" and does not stop at\n /// an addressable parameter. The purpose here is to extend any enclosing OSSA scopes as far as possible to achieve\n /// the longest possible owner lifetime, rather than to find the source-level lvalue for a call argument.\n mutating func gatherYieldExtension(_ beginApply: BeginApplyInst) {\n // Create a separate ScopeExtension for each operand that the yielded value depends on.\n for operand in beginApply.parameterOperands {\n guard let dep = beginApply.resultDependence(on: operand), dep.isScoped else {\n continue\n }\n gatherExtensions(valueOrAddress: operand.value)\n }\n }\n\n mutating func gatherAccessExtensions(beginAccess: BeginAccessInst) {\n let accessBaseAndScopes = beginAccess.accessBaseWithScopes\n if let baseAddress = accessBaseAndScopes.base.address {\n gatherExtensions(valueOrAddress: baseAddress)\n }\n for nestedScope in accessBaseAndScopes.scopes.reversed() {\n switch nestedScope {\n case let .access(nestedBeginAccess):\n scopes.push(ExtendableScope(.access(nestedBeginAccess), beginInst: nestedBeginAccess)!)\n case .dependence, .base:\n // ignore recursive mark_dependence base for the purpose of extending scopes. This pass will extend the base\n // of that mark_dependence (if it is unresolved) later as a separate LifetimeDependence.Scope.\n break\n }\n }\n }\n}\n\nextension ScopeExtension {\n /// Return all scope owners as long as they are all function arguments and all nested accesses are compatible with\n /// their argument convention. Then, if all nested accesses were extended to the return statement, it is valid to\n /// logically combine them into a single access for the purpose of diagnostic lifetime dependence.\n var dependsOnArgs: SingleInlineArray<FunctionArgument> {\n let noCallerScope = SingleInlineArray<FunctionArgument>()\n // Check that the dependent value is returned by this function.\n if !dependsOnCaller! {\n return noCallerScope\n }\n // Check that all nested scopes that it depends on can be covered by exclusive access in the caller.\n for extScope in scopes {\n switch extScope.scope {\n case .access:\n break\n default:\n return noCallerScope\n }\n }\n // All owners must be arguments with exclusive access to depend on the caller's scope (inout_aliasable arguments do\n // not have exclusivity).\n var compatibleArgs = SingleInlineArray<FunctionArgument>()\n for owner in owners {\n guard let arg = owner as? FunctionArgument else {\n return noCallerScope\n }\n guard arg.convention.isIndirectIn || arg.convention.isInout else {\n return noCallerScope\n }\n compatibleArgs.push(arg)\n }\n return compatibleArgs\n }\n}\n\n/// Compute the range of the a scope owner. Nested scopes must stay within this range.\n///\n/// Abstracts over lifetimes for both addresses and values.\nextension ScopeExtension {\n enum Range {\n case fullRange\n case addressRange(AddressOwnershipLiveRange)\n case valueRange(InstructionRange)\n\n func coversUse(_ inst: Instruction) -> Bool {\n switch self {\n case .fullRange:\n return true\n case let .addressRange(range):\n return range.coversUse(inst)\n case let .valueRange(range):\n return range.inclusiveRangeContains(inst)\n }\n }\n\n mutating func deinitialize() {\n switch self {\n case .fullRange:\n break\n case var .addressRange(range):\n return range.deinitialize()\n case var .valueRange(range):\n return range.deinitialize()\n }\n }\n\n var description: String {\n switch self {\n case .fullRange:\n return "full range"\n case let .addressRange(range):\n return range.description\n case let .valueRange(range):\n return range.description\n }\n }\n }\n\n /// Return nil if the scope's owner is valid across the function, such as a guaranteed function argument.\n func computeSingleOwnerRange(owner: Value) -> Range? {\n if owner.type.isAddress {\n // Get the range of the accessBase lifetime at the point where the outermost extendable scope begins.\n if let range = AddressOwnershipLiveRange.compute(for: owner, at: scopes.first!.firstInstruction,\n localReachabilityCache, context) {\n return .addressRange(range)\n }\n return nil\n }\n switch owner.ownership {\n case .owned:\n return .valueRange(computeLinearLiveness(for: owner, context))\n case .guaranteed:\n if let bbv = BeginBorrowValue(owner) {\n if case .functionArgument = bbv {\n return .fullRange\n }\n return .valueRange(computeLinearLiveness(for: bbv.value, context))\n }\n return nil\n case .none:\n return .fullRange\n case .unowned:\n return nil\n }\n }\n\n /// Return an InstructionRange covering all the dependent uses of 'dependence'.\n ///\n /// Initialize dependsOnCaller.\n mutating func computeDependentUseRange(of dependence: LifetimeDependence) -> InstructionRange? {\n if scopes.isEmpty {\n return nil\n }\n let function = dependence.function\n var inRangeUses = [Instruction]()\n do {\n // The innermost scope that must be extended must dominate all uses.\n var walker = LifetimeDependentUseWalker(function, localReachabilityCache, context) {\n inRangeUses.append($0.instruction)\n return .continueWalk\n }\n defer {walker.deinitialize()}\n _ = walker.walkDown(dependence: dependence)\n dependsOnCaller = walker.dependsOnCaller\n }\n for owner in owners {\n guard var ownershipRange = computeSingleOwnerRange(owner: owner) else {\n return nil\n }\n defer { ownershipRange.deinitialize() }\n\n inRangeUses = inRangeUses.filter { ownershipRange.coversUse($0) }\n }\n var useRange = InstructionRange(begin: innermostScope.firstInstruction, context)\n useRange.insert(contentsOf: inRangeUses)\n\n log("Scope fixup for dependent uses:\n\(useRange)")\n\n // Lifetime dependenent uses may not be dominated by `innermostScope`. The dependent value may be used by a phi or\n // stored into a memory location. The access may be conditional relative to such uses. If any use was not dominated,\n // then `useRange` will include the function entry. There is no way to directly check if `useRange` is\n // valid. `useRange.blockRange.isValid` is not a strong enough check because it will always succeed when\n // `useRange.begin == entryBlock` even if a use is above `useRange.begin`. Instead check if `useRange` contains the\n // first instruction, and the first instruction does not itself start `innermostScope`.\n let firstInst = function.entryBlock.instructions.first!\n if firstInst != useRange.begin, useRange.contains(firstInst) {\n useRange.deinitialize()\n return nil\n }\n return useRange\n }\n}\n\nextension ScopeExtension {\n /// Return true if all nested scopes were extended across `useRange`. `useRange` has already been pruned to be a\n /// subset of the ranges of the owners.\n ///\n /// Note: the scopes may not be strictly nested. Two adjacent scopes in the nested scopes array may have begin at the\n /// same nesting level. Their begin instructions may occur in any order relative to the nested scopes array, but we\n /// order the end instructions according to the arbitrary order that the scopes were inserted in the array. This is\n /// conservative and could extend some scopes longer than strictly necessary. To improve this, `scopes` must be\n /// represnted as a DAG by recording parent and child indices.\n func canExtendScopes(over useRange: inout InstructionRange,\n scopesToExtend: inout SingleInlineArray<ExtendableScope>) -> Bool {\n var extendedUseRange = InstructionRange(begin: useRange.begin!, ends: useRange.ends, context)\n\n // Insert the first instruction of the exit blocks to mimic `useRange`. There is no way to directly copy\n // `useRange`. Inserting the exit block instructions is innacurate, but for the purpose of canExtend() below, it has\n // the same effect as a copy of `useRange`.\n extendedUseRange.insert(contentsOf: useRange.exits)\n defer { extendedUseRange.deinitialize() }\n\n // Append each scope that needs extention to scopesToExtend from the inner to the outer scope.\n for extScope in scopes.reversed() {\n // An outer scope might not originally cover one of its inner scopes. Therefore, extend 'extendedUseRange' to to\n // cover this scope's end instructions. The extended scope must at least cover the original scopes because the\n // original scopes may protect other operations.\n var mustExtend = false\n for scopeEndInst in extScope.endInstructions {\n switch extendedUseRange.overlaps(pathBegin: extScope.firstInstruction, pathEnd: scopeEndInst, context) {\n case .containsPath, .containsEnd, .disjoint:\n // containsPath can occur when the extendable scope has the same begin as the use range.\n // disjoint is unexpected, but if it occurs then `extScope` must be before the useRange.\n mustExtend = true\n break\n case .containsBegin, .overlappedByPath:\n // containsBegin can occur when the extendable scope has the same begin as the use range.\n extendedUseRange.insert(scopeEndInst)\n break\n }\n }\n if !mustExtend {\n continue\n }\n scopesToExtend.push(extScope)\n if !extScope.canExtend(over: &extendedUseRange, context) {\n // Scope ending instructions cannot be inserted at the 'range' boundary. Ignore all nested scopes.\n //\n // Note: We could still extend previously prepared inner scopes up to this scope. To do that, we would\n // need to repeat the steps above: treat 'extScope' as the new owner, and recompute `useRange`. But this\n // scenario could only happen with nested coroutine, where the range boundary is reachable from the outer\n // coroutine's EndApply and AbortApply--it is vanishingly unlikely if not impossible.\n return false\n }\n }\n return true\n }\n \n // Extend the scopes that actually required extension.\n //\n // Consumes 'useRange'\n private func extend(scopesToExtend: SingleInlineArray<ExtendableScope>,\n over useRange: inout InstructionRange,\n _ context: some MutatingContext) {\n var deadInsts = [Instruction]()\n for extScope in scopesToExtend {\n // Extend 'useRange' to to cover this scope's end instructions. 'useRange' cannot be extended until the\n // inner scopes have been extended.\n useRange.insert(contentsOf: extScope.endInstructions)\n\n // Note, we could Skip extension here if we have a fully overlapping scope. But that requires computing the scope\n // of [beginInst : beginInst.endInstructions) because an outer scope may be disjoint from the inner scope but\n // still requires extension:\n // %access = begin_access [read] %owner // <=== outer scoope\n // %temp = load [copy] %access\n // end_access %access\n // (%dependent, %token) = begin_apply (%temp) // <=== inner scope\n // end_apply %token\n //\n deadInsts += extScope.extend(over: &useRange, context)\n\n // Continue checking enclosing scopes for extension even if 'mustExtend' is false. Multiple ScopeExtensions may\n // share the same inner scope, so this inner scope may already have been extended while handling a previous\n // ScopeExtension. Nonetheless, some enclosing scopes may still require extension. This only happens when a\n // yielded value depends on multiple begin_apply operands.\n }\n // 'useRange' is invalid as soon as instructions are deleted.\n useRange.deinitialize()\n\n // Delete original end instructions.\n for deadInst in deadInsts {\n context.erase(instruction: deadInst)\n }\n }\n}\n\n// Extend a dependence scope to cover the dependent uses.\nextension ExtendableScope {\n /// Return true if new scope-ending instruction can be inserted at the range boundary.\n func canExtend(over range: inout InstructionRange, _ context: some Context) -> Bool {\n switch self.scope {\n case let .yield(yieldedValue):\n return canExtend(beginApply: yieldedValue.definingInstruction as! BeginApplyInst, over: &range, context)\n case let .initialized(initializer):\n switch initializer {\n case .argument, .yield:\n // A yield is already considered nested within the coroutine.\n break\n case let .store(initializingStore, _):\n if let sb = initializingStore as? StoreBorrowInst {\n return canExtend(storeBorrow: sb, over: &range)\n }\n }\n return true\n default:\n // non-yield scopes can always be ended at any point.\n return true\n }\n }\n\n func canExtend(beginApply: BeginApplyInst, over range: inout InstructionRange, _ context: some Context) -> Bool {\n let canEndAtBoundary = { (boundaryInst: Instruction) in\n switch beginApply.endReaches(block: boundaryInst.parentBlock, context) {\n case .abortReaches, .endReaches:\n return true\n case .none:\n return false\n }\n }\n for end in range.ends {\n if (!canEndAtBoundary(end)) {\n return false\n }\n }\n for exit in range.exits {\n if (!canEndAtBoundary(exit)) {\n return false\n }\n }\n return true\n }\n\n /// A store borrow is considered to be nested within the scope of its stored values. It is, however, also\n /// restricted to the range of its allocation.\n ///\n /// TODO: consider rewriting the dealloc_stack instructions if we ever find that SILGen emits them sooner that\n /// we need for lifetime dependencies.\n func canExtend(storeBorrow: StoreBorrowInst, over range: inout InstructionRange) -> Bool {\n // store_borrow can be extended if all deallocations occur after the use range.\n return storeBorrow.allocStack.deallocations.allSatisfy({ !range.contains($0) })\n }\n\n /// Extend this scope over the 'range' boundary. Return the old scope ending instructions to be deleted.\n func extend(over range: inout InstructionRange, _ context: some MutatingContext) -> [Instruction] {\n // Collect the original end instructions and extend the range to to cover them. The resulting access scope\n // must cover the original scope because it may protect other memory operations.\n let endsToErase = self.endInstructions\n var unusedEnds = InstructionSet(context)\n for end in endsToErase {\n assert(range.inclusiveRangeContains(end))\n unusedEnds.insert(end)\n }\n defer { unusedEnds.deinitialize() }\n for end in range.ends {\n let location = end.location.autoGenerated\n switch end {\n case is BranchInst:\n assert(end.parentBlock.singleSuccessor!.terminator is ReturnInst,\n "a phi only ends a use range if it is a returned value")\n fallthrough\n case is ReturnInst:\n // End this inner scope just before the return. The mark_dependence base operand will be redirected to a\n // function argument.\n let builder = Builder(before: end, location: location, context)\n // Insert newEnd so that this scope will be nested in any outer scopes.\n range.insert(createEndInstruction(builder, context))\n continue\n default:\n break\n }\n if unusedEnds.contains(end) {\n unusedEnds.erase(end)\n assert(!unusedEnds.contains(end))\n continue\n }\n Builder.insert(after: end, location: location, context) {\n range.insert(createEndInstruction($0, context))\n }\n }\n for exitInst in range.exits {\n let location = exitInst.location.autoGenerated\n let builder = Builder(before: exitInst, location: location, context)\n range.insert(createEndInstruction(builder, context))\n }\n return endsToErase.filter { unusedEnds.contains($0) }\n }\n\n /// Create a scope-ending instruction at 'builder's insertion point.\n func createEndInstruction(_ builder: Builder, _ context: some Context) -> Instruction {\n switch self.scope {\n case let .access(beginAccess):\n return builder.createEndAccess(beginAccess: beginAccess)\n case let .borrowed(beginBorrow):\n return builder.createEndBorrow(of: beginBorrow.value)\n case let .yield(yieldedValue):\n let beginApply = yieldedValue.definingInstruction as! BeginApplyInst\n // createEnd() returns non-nil because beginApply.endReaches() was checked by canExtend()\n return beginApply.createEnd(builder, context)!\n case let .initialized(initializer):\n switch initializer {\n case let .store(initializingStore: store, initialAddress: _):\n if let sb = store as? StoreBorrowInst {\n // FIXME: we may need to rewrite the dealloc_stack.\n return builder.createEndBorrow(of: sb)\n }\n break\n case .argument, .yield:\n // TODO: extend indirectly yielded scopes.\n break\n }\n case let .owned(value):\n return builder.createDestroyValue(operand: value)\n case let .local(varInst):\n switch varInst {\n case let .beginBorrow(beginBorrow):\n // FIXME: we may need to rewrite the dealloc_stack.\n return builder.createEndBorrow(of: beginBorrow)\n case let .moveValue(moveValue):\n return builder.createDestroyValue(operand: moveValue)\n }\n default:\n break\n }\n fatalError("Unsupported scoped extension: \(self)")\n }\n}\n\nprivate extension BeginApplyInst {\n /// Create either an end_apply or abort_apply at the builder's insertion point.\n /// Return nil if it isn't possible.\n func createEnd(_ builder: Builder, _ context: some Context) -> Instruction? {\n guard let insertionBlock = builder.insertionBlock else {\n return nil\n }\n switch endReaches(block: insertionBlock, context) {\n case .none:\n return nil\n case .endReaches:\n return builder.createEndApply(beginApply: self)\n case .abortReaches:\n return builder.createAbortApply(beginApply: self)\n }\n }\n\n enum EndReaches {\n case endReaches\n case abortReaches\n }\n\n /// Return the single kind of coroutine termination that reaches 'reachableBlock' or nil.\n func endReaches(block reachableBlock: BasicBlock, _ context: some Context) -> EndReaches? {\n var endBlocks = BasicBlockSet(context)\n var abortBlocks = BasicBlockSet(context)\n defer {\n endBlocks.deinitialize()\n abortBlocks.deinitialize()\n }\n for endInst in endInstructions {\n switch endInst {\n case let endApply as EndApplyInst:\n // Cannot extend the scope of a coroutine when the resume produces a value.\n if !endApply.type.isEmpty(in: parentFunction) {\n return nil\n }\n endBlocks.insert(endInst.parentBlock)\n case is AbortApplyInst:\n abortBlocks.insert(endInst.parentBlock)\n default:\n fatalError("invalid begin_apply ending instruction")\n }\n }\n var endReaches: EndReaches?\n var backwardWalk = BasicBlockWorklist(context)\n defer { backwardWalk.deinitialize() }\n\n let backwardVisit = { (block: BasicBlock) -> WalkResult in\n if endBlocks.contains(block) {\n switch endReaches {\n case .none:\n endReaches = .endReaches\n break\n case .endReaches:\n break\n case .abortReaches:\n return .abortWalk\n }\n return .continueWalk\n }\n if abortBlocks.contains(block) {\n switch endReaches {\n case .none:\n endReaches = .abortReaches\n break\n case .abortReaches:\n break\n case .endReaches:\n return .abortWalk\n }\n return .continueWalk\n }\n if block == self.parentBlock {\n // the insertion point is not dominated by the coroutine\n return .abortWalk\n }\n backwardWalk.pushIfNotVisited(contentsOf: block.predecessors)\n return .continueWalk\n }\n\n if backwardVisit(reachableBlock) == .abortWalk {\n return nil\n }\n while let block = backwardWalk.pop() {\n if backwardVisit(block) == .abortWalk {\n return nil\n }\n }\n return endReaches\n }\n}\n\n/// Visit all dependent uses.\n///\n/// Set 'dependsOnCaller' if a use escapes the function.\nprivate struct LifetimeDependentUseWalker : LifetimeDependenceDefUseWalker {\n let function: Function\n let context: Context\n let visitor: (Operand) -> WalkResult\n let localReachabilityCache: LocalVariableReachabilityCache\n var visitedValues: ValueSet\n\n /// Set to true if the dependence is returned from the current function.\n var dependsOnCaller = false\n\n init(_ function: Function, _ localReachabilityCache: LocalVariableReachabilityCache, _ context: Context,\n visitor: @escaping (Operand) -> WalkResult) {\n self.function = function\n self.context = context\n self.visitor = visitor\n self.localReachabilityCache = localReachabilityCache\n self.visitedValues = ValueSet(context)\n }\n\n mutating func deinitialize() {\n visitedValues.deinitialize()\n }\n\n mutating func needWalk(for value: Value) -> Bool {\n visitedValues.insert(value)\n }\n\n mutating func deadValue(_ value: Value, using operand: Operand?)\n -> WalkResult {\n if let operand {\n return visitor(operand)\n }\n return .continueWalk\n }\n\n mutating func leafUse(of operand: Operand) -> WalkResult {\n return visitor(operand)\n }\n\n mutating func escapingDependence(on operand: Operand) -> WalkResult {\n log(">>> Escaping dependence: \(operand)")\n _ = visitor(operand)\n // Make a best-effort attempt to extend the access scope regardless of escapes. It is possible that some mandatory\n // pass between scope fixup and diagnostics will make it possible for the LifetimeDependenceDefUseWalker to analyze\n // this use.\n return .continueWalk\n }\n\n mutating func inoutDependence(argument: FunctionArgument, on operand: Operand) -> WalkResult {\n dependsOnCaller = true\n return visitor(operand)\n }\n\n mutating func returnedDependence(result operand: Operand) -> WalkResult {\n dependsOnCaller = true\n return visitor(operand)\n }\n\n mutating func returnedDependence(address: FunctionArgument,\n on operand: Operand) -> WalkResult {\n dependsOnCaller = true\n return visitor(operand)\n }\n\n mutating func yieldedDependence(result: Operand) -> WalkResult {\n return .continueWalk\n }\n\n mutating func storeToYieldDependence(address: Value, of operand: Operand) -> WalkResult {\n return .continueWalk\n }\n}\n\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_LifetimeDependenceScopeFixup.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_LifetimeDependenceScopeFixup.swift
Swift
39,479
0.95
0.152239
0.284632
react-lib
491
2024-09-10T20:40:58.853662
MIT
false
9ffcedf42d93523d20fa711b37a9880f
//===--- MergeCondFail.swift - Merge cond_fail instructions --------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2021 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\n\nlet mergeCondFailsPass = FunctionPass(name: "merge-cond_fails", runMergeCondFails)\n\n/// Return true if the operand of the cond_fail instruction looks like\n/// the overflow bit of an arithmetic instruction.\nprivate func hasOverflowConditionOperand(_ cfi: CondFailInst) -> Bool {\n if let tei = cfi.condition as? TupleExtractInst {\n return tei.operand.value is BuiltinInst\n }\n return false\n}\n\n/// Merge cond_fail instructions.\n///\n/// We can merge cond_fail instructions if there is no side-effect or memory\n/// write in between them.\n/// This pass merges cond_fail instructions by building the disjunction of\n/// their operands.\nprivate func runMergeCondFails(function: Function, context: FunctionPassContext) {\n\n // Merge cond_fail instructions if there is no side-effect or read in\n // between them.\n for block in function.blocks {\n // Per basic block list of cond_fails to merge.\n var condFailToMerge = Stack<CondFailInst>(context)\n\n for inst in block.instructions {\n if let cfi = inst as? CondFailInst {\n let messageIsSame = condFailToMerge.isEmpty || cfi.message == condFailToMerge.first!.message\n let forceAllowMerge = context.options.enableMergeableTraps\n\n // Do not process arithmetic overflow checks. We typically generate more\n // efficient code with separate jump-on-overflow.\n if !hasOverflowConditionOperand(cfi) && (messageIsSame || forceAllowMerge) {\n condFailToMerge.push(cfi)\n }\n } else if inst.mayHaveSideEffects || inst.mayReadFromMemory {\n // Stop merging at side-effects or reads from memory.\n mergeCondFails(&condFailToMerge, context: context)\n }\n }\n // Process any remaining cond_fail instructions in the current basic\n // block.\n mergeCondFails(&condFailToMerge, context: context)\n }\n}\n\n/// Try to merge the cond_fail instructions. Returns true if any could\n/// be merge.\nprivate func mergeCondFails(_ condFailToMerge: inout Stack<CondFailInst>,\n context: FunctionPassContext) {\n guard let lastCFI = condFailToMerge.last else {\n return\n }\n var mergedCond: Value? = nil\n var didMerge = false\n let builder = Builder(after: lastCFI, location: lastCFI.location, context)\n\n // Merge conditions and remove the merged cond_fail instructions.\n for cfi in condFailToMerge {\n if let prevCond = mergedCond {\n mergedCond = builder.createBuiltinBinaryFunction(name: "or",\n operandType: prevCond.type,\n resultType: prevCond.type,\n arguments: [prevCond, cfi.condition])\n didMerge = true\n } else {\n mergedCond = cfi.condition\n }\n }\n if !didMerge {\n condFailToMerge.removeAll()\n return\n }\n\n // Create a new cond_fail using the merged condition.\n _ = builder.createCondFail(condition: mergedCond!,\n message: lastCFI.message.string)\n\n while let cfi = condFailToMerge.pop() {\n context.erase(instruction: cfi)\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_MergeCondFails.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_MergeCondFails.swift
Swift
3,581
0.95
0.1875
0.364706
vue-tools
37
2025-06-21T21:03:20.349286
Apache-2.0
false
2d47e5c1c5cb21b23628e67028cf1266
//===--- NamedReturnValueOptimization.swift --------------------------------==//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\n\n/// Removes a `copy_addr` to an indirect out argument by replacing the source of the copy\n/// (which must be an `alloc_stack`) with the out argument itself.\n///\n/// The following SIL pattern will be optimized:\n///\n/// sil @foo : $@convention(thin) <T> () -> @out T {\n/// bb0(%0 : $*T):\n/// %2 = alloc_stack $T\n/// ...\n/// copy_addr %some_value to [init] %2 // or any other writes to %2\n/// ...\n/// bbN:\n/// copy_addr [take] %2 to [init] %0 : $*T // the only use of %0\n/// ... // no writes\n/// return\n///\n/// to:\n///\n/// sil @foo : $@convention(thin) <T> (@out T) -> () {\n/// bb0(%0 : $*T):\n/// %2 = alloc_stack $T // is dead now\n/// ...\n/// copy_addr %some_value to [init] %0\n/// ...\n/// bbN:\n/// ...\n/// return\n///\n/// This optimization can be done because we know that:\n/// * The out argument dominates all uses of the copy_addr's source (because it's a function argument).\n/// * It's not aliased (by definition). We can't allow aliases to be accessed between the initialization and the return.\n///\n/// This pass shouldn't run before serialization. It might prevent predictable memory optimizations\n/// in a caller after inlining, because the memory location (the out argument = an alloc_stack in the caller)\n/// might be written multiple times after this optimization.\n///\nlet namedReturnValueOptimization = FunctionPass(name: "named-return-value-optimization") {\n (function: Function, context: FunctionPassContext) in\n\n for outArg in function.arguments[0..<function.numIndirectResultArguments] {\n if let copyToArg = findCopyForNRVO(for: outArg) {\n performNRVO(with: copyToArg, context)\n }\n }\n}\n\n/// Returns a copy_addr which copies from an alloc_stack to the `outArg` at the end of the function.\n///\nprivate func findCopyForNRVO(for outArg: FunctionArgument) -> CopyAddrInst? {\n guard let singleArgUse = outArg.uses.ignoreDebugUses.singleUse,\n let copyToArg = singleArgUse.instruction as? CopyAddrInst else {\n return nil\n }\n\n assert(singleArgUse == copyToArg.destinationOperand,\n "single use of out-argument cannot be the source of a copy")\n\n // Don't perform NRVO unless the copy is a [take]. This is the easiest way\n // to determine that the local variable has ownership of its value and ensures\n // that removing a copy is a reference count neutral operation. For example,\n // this copy can't be trivially eliminated without adding a retain.\n // sil @f : $@convention(thin) (@guaranteed T) -> @out T\n // bb0(%in : $*T, %out : $T):\n // %local = alloc_stack $T\n // store %in to %local : $*T\n // copy_addr %local to [init] %out : $*T\n if !copyToArg.isTakeOfSrc {\n return nil\n }\n\n guard let sourceStackAlloc = copyToArg.source as? AllocStackInst else {\n return nil\n }\n\n // NRVO for alloc_stack [dynamic_lifetime] will invalidate OSSA invariants.\n if sourceStackAlloc.hasDynamicLifetime && copyToArg.parentFunction.hasOwnership {\n return nil\n }\n\n if !(copyToArg.parentBlock.terminator is ReturnInst) {\n return nil\n }\n\n // This check is overly conservative, because we only need to check if the source\n // of the copy is not written to. But the copy to the out argument is usually the last\n // instruction of the function, so it doesn't matter.\n if isAnyInstructionWritingToMemory(after: copyToArg) {\n return nil\n }\n\n return copyToArg\n}\n\nprivate func performNRVO(with copy: CopyAddrInst, _ context: FunctionPassContext) {\n copy.source.replaceAllUsesExceptDealloc(with: copy.destination, context)\n assert(copy.source == copy.destination)\n context.erase(instruction: copy)\n}\n\nprivate func isAnyInstructionWritingToMemory(after: Instruction) -> Bool {\n var followingInst = after.next\n while let fi = followingInst {\n if fi.mayWriteToMemory && !(fi is DeallocStackInst) {\n return true\n }\n followingInst = fi.next\n }\n return false\n}\n\nprivate extension Value {\n func replaceAllUsesExceptDealloc(with replacement: Value, _ context: some MutatingContext) {\n uses.lazy.filter{!($0.instruction is Deallocation)}.replaceAll(with: replacement, context)\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_NamedReturnValueOptimization.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_NamedReturnValueOptimization.swift
Swift
4,688
0.95
0.155039
0.53913
vue-tools
762
2023-11-22T09:05:15.158610
MIT
false
5dd20100665be092e0c707814ebd00da
//===--- ObjCBridgingOptimization.swift - optimize ObjC bridging ----------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2022 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\n\n/// Removes redundant ObjectiveC <-> Swift bridging calls.\n///\n/// Basically, if a value is bridged from ObjectiveC to Swift an then back to ObjectiveC\n/// again, then just re-use the original ObjectiveC value.\n///\n/// Things get a little bit more complicated in case of optionals (Nullable pointers).\n/// In this case both bridging calls are embedded in an `switch_enum` CFG diamond, like\n/// ```\n/// switch_enum %originalOptionalObjcValue\n/// some_bb(%1):\n/// %2 = enum #some(%1)\n/// %3 = apply %bridgeFromObjc(%2)\n/// %4 = enum #some(%3)\n/// br continue_bb(%4)\n/// none_bb:\n/// %5 = enum #none\n/// br continue_bb(%5)\n/// continue_bb(%bridgedOptionalSwiftValue):\n/// ```\nlet objCBridgingOptimization = FunctionPass(name: "objc-bridging-opt") {\n (function: Function, context: FunctionPassContext) in\n\n if !function.hasOwnership { return }\n\n // First try to optimize the optional -> optional case.\n // We need to do this before handling the non-optional case to prevent\n // sub-optimal optimization of bridging calls inside a switch_enum.\n for block in function.blocks {\n // Start at a block argument, which is the "result" of the switch_enum CFG diamond.\n if !optimizeOptionalBridging(forArgumentOf: block, context) {\n return\n }\n }\n\n // Now try to optimize non-optional and optional -> non-optional bridging.\n for inst in function.instructions {\n if let apply = inst as? ApplyInst {\n if !optimizeNonOptionalBridging(apply, context) {\n return\n }\n }\n }\n}\n\n//===----------------------------------------------------------------------===//\n// Top-level optimization functions\n//===----------------------------------------------------------------------===//\n\n/// Optimizes redundant bridging calls where both calls are within `switch_enum` diamonds.\n///\n/// For example:\n/// ```\n/// let s = returnOptionalNSString()\n/// useOptionalNSString(s)\n/// ```\n///\n/// The `block` is the continue-block of the second `switch_enum` diamond.\n/// Returns true if the pass should continue running.\nprivate func optimizeOptionalBridging(forArgumentOf block: BasicBlock,\n _ context: FunctionPassContext) -> Bool {\n if block.arguments.count != 1 {\n // For simplicity only handle the common case: there is only one phi-argument which\n // is the result of the bridging operation.\n return true\n }\n // Check for the second swift -> ObjC bridging operation.\n let finalObjCValue = block.arguments[0]\n guard let swiftValueSwitch = isOptionalBridging(of: finalObjCValue, isBridging: isBridgeToObjcCall) else {\n return true\n }\n\n // Check for the first ObjC -> swift bridging operation.\n let swiftValue = swiftValueSwitch.enumOp.lookThoughOwnershipInstructions\n guard let originalObjCValueSwitch = isOptionalBridging(of: swiftValue, isBridging: isBridgeToSwiftCall) else {\n return true\n }\n \n let originalObjCValue = originalObjCValueSwitch.enumOp\n if finalObjCValue.type != originalObjCValue.type {\n return true\n }\n\n if !context.continueWithNextSubpassRun(for: originalObjCValueSwitch) {\n return false\n }\n\n // The second bridging operation can be in a different control region than the first one,\n // e.g. it can be in a loop whereas the first is not in that loop. Therefore we have to\n // copy + makeAvailable.\n let replacement = originalObjCValue.copy(at: originalObjCValueSwitch,\n andMakeAvailableIn: block, context)\n\n finalObjCValue.uses.replaceAll(with: replacement, context)\n block.eraseArgument(at: 0, context)\n \n // The swift -> ObjC bridging call has no `readonly` attribute, therefore we have to\n // explicitly delete it. The ObjC -> swift call has such an attribute and will be removed\n // buy a later dead-code elimination pass.\n removeBridgingCodeInPredecessors(of: block, context)\n return true\n}\n\n/// Optimizes redundant bridging calls where the second call is a non-optional bridging operation,\n/// i.e. is _not_ within `switch_enum` diamond.\n///\n/// The `apply` is the second (swift -> ObjC) bridging call.\n/// Returns true if the pass should continue running.\nprivate func optimizeNonOptionalBridging(_ apply: ApplyInst,\n _ context: FunctionPassContext) -> Bool {\n \n guard let bridgeToObjcCall = isBridgeToObjcCall(apply) else {\n return true\n }\n\n let swiftValue = bridgeToObjcCall.arguments[0].lookThoughOwnershipInstructions\n\n // Handle the first case: the ObjC -> swift bridging operation is optional and the swift -> ObjC\n // bridging is within a test for Optional.some, e.g.\n // ```\n // if let s = returnOptionalNSString() {\n // useNonOptionalNSString(s)\n // }\n // ```\n if let (se, someCase) = isPayloadOfSwitchEnum(swiftValue),\n let originalObjCValueSwitch = isOptionalBridging(of: se.enumOp, isBridging: isBridgeToSwiftCall) {\n\n if !context.continueWithNextSubpassRun(for: originalObjCValueSwitch) {\n return false\n }\n\n let originalObjCValue = originalObjCValueSwitch.enumOp\n let optionalReplacement = originalObjCValue.copy(at: originalObjCValueSwitch,\n andMakeAvailableIn: bridgeToObjcCall.parentBlock,\n context)\n let builder = Builder(before: bridgeToObjcCall, context)\n \n // We know that it's the some-case.\n let replacement = builder.createUncheckedEnumData(enum: optionalReplacement,\n caseIndex: someCase,\n resultType: bridgeToObjcCall.type)\n bridgeToObjcCall.replace(with: replacement, context)\n return true\n }\n\n // Handle the second case: both bridging calls are non-optional, e.g.\n // ```\n // let s = returnNonOptionalNSString()\n // useNonOptionalNSString(s)\n // ```\n guard let bridgeToSwiftCall = isBridgeToSwiftCall(swiftValue) else {\n return true\n }\n\n if !context.continueWithNextSubpassRun(for: bridgeToSwiftCall) {\n return false\n }\n\n let originalObjCValue = bridgeToSwiftCall.arguments[0]\n let optionalObjCType = originalObjCValue.type\n \n // The bridging functions from ObjC -> Swift take an optional argument and return a\n // non-optional Swift value. In the nil-case they return an empty (e.g. empty String,\n // empty Array, etc.) swift value.\n // We have to replicate that behavior here.\n\n guard let someCase = optionalObjCType.getIndexOfEnumCase(withName: "some") else { return true }\n guard let noneCase = optionalObjCType.getIndexOfEnumCase(withName: "none") else { return true }\n\n // Creates a `switch_enum` on `originalObjCValue` and in the nil-case return a bridged\n // empty value.\n // Create the needed blocks of the `switch_enum` CFG diamond.\n let origBlock = bridgeToSwiftCall.parentBlock\n let someBlock = context.splitBlock(before: bridgeToSwiftCall)\n let noneBlock = context.splitBlock(before: bridgeToSwiftCall)\n let continueBlock = context.splitBlock(before: bridgeToSwiftCall)\n\n\n let builder = Builder(atEndOf: origBlock, location: bridgeToSwiftCall.location, context)\n let copiedValue = builder.createCopyValue(operand: originalObjCValue)\n builder.createSwitchEnum(enum: copiedValue, cases: [(someCase, someBlock),\n (noneCase, noneBlock)])\n\n // The nil case: call the ObjC -> Swift bridging function, which will return\n // an empty swift value.\n let noneBuilder = Builder(atEndOf: noneBlock, location: bridgeToSwiftCall.location, context)\n let subst = bridgeToObjcCall.substitutionMap\n let emptySwiftValue = noneBuilder.createApply(\n function: bridgeToSwiftCall.callee,\n bridgeToSwiftCall.substitutionMap, arguments: Array(bridgeToSwiftCall.arguments))\n // ... and bridge that to ObjectiveC.\n let emptyObjCValue = noneBuilder.createApply(\n function: noneBuilder.createFunctionRef(bridgeToObjcCall.referencedFunction!),\n subst, arguments: [emptySwiftValue])\n noneBuilder.createDestroyValue(operand: emptySwiftValue)\n noneBuilder.createBranch(to: continueBlock, arguments: [emptyObjCValue])\n\n // In the some-case just forward the original NSString.\n let objCType = emptyObjCValue.type\n let forwardedValue = someBlock.addArgument(type: objCType, ownership: .owned, context)\n let someBuilder = Builder(atEndOf: someBlock, location: bridgeToSwiftCall.location, context)\n someBuilder.createBranch(to: continueBlock, arguments: [forwardedValue])\n\n let s = continueBlock.addArgument(type: objCType, ownership: .owned, context)\n \n // Now replace the bridged value with the original value in the destination block.\n let replacement = s.makeAvailable(in: bridgeToObjcCall.parentBlock, context)\n bridgeToObjcCall.replace(with: replacement, context)\n return true\n}\n\n//===----------------------------------------------------------------------===//\n// Utility functions\n//===----------------------------------------------------------------------===//\n\n/// Removes `enum` instructions and bridging calls in all predecessors of `block`.\nprivate func removeBridgingCodeInPredecessors(of block: BasicBlock, _ context: FunctionPassContext) {\n for pred in block.predecessors {\n let branch = pred.terminator as! BranchInst\n let builder = Builder(atEndOf: branch.parentBlock, location: branch.location, context)\n builder.createBranch(to: block)\n \n let en = branch.operands[0].value as! EnumInst\n context.erase(instruction: branch)\n let payload = en.payload\n context.erase(instruction: en)\n if let bridgingCall = payload {\n context.erase(instruction: bridgingCall as! ApplyInst)\n }\n }\n}\n\n/// Checks for an optional bridging `switch_enum` diamond.\n///\n/// ```\n/// switch_enum %0 // returned instruction\n/// some_bb(%1):\n/// %2 = enum #some(%1) // only in case of ObjC -> Swift briding\n/// %3 = apply %bridging(%2) // returned by `isBridging`\n/// %4 = enum #some(%3)\n/// br continue_bb(%4)\n/// none_bb:\n/// %5 = enum #none\n/// br continue_bb(%5)\n/// continue_bb(%value): // passed value\n/// ```\nprivate func isOptionalBridging(of value: Value, isBridging: (Value) -> ApplyInst?) -> SwitchEnumInst? {\n guard let phi = Phi(value) else { return nil }\n \n var noneSwitch: SwitchEnumInst?\n var someSwitch: SwitchEnumInst?\n \n // Check if one incoming value is the none-case and the other is the some-case.\n for incomingVal in phi.incomingValues {\n // In both branches, the result must be an `enum` which is passed to the\n // continue_bb's phi-argument.\n guard let enumInst = incomingVal as? EnumInst,\n let singleEnumUse = enumInst.uses.singleUse,\n singleEnumUse.instruction is BranchInst else {\n return nil\n }\n if let payload = enumInst.payload {\n // The some-case\n if someSwitch != nil { return nil }\n guard let bridgingCall = isBridging(payload),\n bridgingCall.uses.isSingleUse else {\n return nil\n }\n let callArgument = bridgingCall.arguments[0]\n \n // If it's an ObjC -> Swift bridging call the argument is wrapped into an optional enum.\n if callArgument.type.isEnum {\n guard let sourceEnum = callArgument as? EnumInst,\n let sourcePayload = sourceEnum.payload,\n let (se, someCase) = isPayloadOfSwitchEnum(sourcePayload),\n enumInst.caseIndex == someCase,\n sourceEnum.caseIndex == someCase,\n sourceEnum.type == se.enumOp.type else {\n return nil\n }\n someSwitch = se\n } else {\n guard let (se, someCase) = isPayloadOfSwitchEnum(callArgument),\n enumInst.caseIndex == someCase else {\n return nil\n }\n someSwitch = se\n }\n } else {\n // The none-case\n if noneSwitch != nil { return nil }\n guard let singlePred = enumInst.parentBlock.singlePredecessor,\n let se = singlePred.terminator as? SwitchEnumInst,\n se.getUniqueSuccessor(forCaseIndex: enumInst.caseIndex) === enumInst.parentBlock else {\n return nil\n }\n noneSwitch = se\n }\n }\n guard let noneSwitch = noneSwitch,\n let someSwitch = someSwitch,\n noneSwitch == someSwitch else {\n return nil\n }\n return someSwitch\n}\n\n/// Returns the `switch_enum` together with the enum case index, if `value` is\n/// the payload block argument of the `switch_enum`.\nprivate func isPayloadOfSwitchEnum(_ value: Value) -> (SwitchEnumInst, case: Int)? {\n if let payloadArg = TerminatorResult(value),\n let se = payloadArg.terminator as? SwitchEnumInst,\n let caseIdx = se.getUniqueCase(forSuccessor: payloadArg.successor) {\n return (se, caseIdx)\n }\n return nil\n}\n\n/// Returns the apply instruction if `value` is an ObjC -> Swift bridging call.\nfunc isBridgeToSwiftCall(_ value: Value) -> ApplyInst? {\n guard let bridgingCall = value as? ApplyInst,\n let bridgingFunc = bridgingCall.referencedFunction else {\n return nil\n }\n let funcName = bridgingFunc.name\n guard bridgingFunc.hasSemanticsAttribute("bridgeFromObjectiveC") ||\n // Currently the semantics attribute is not used, so test for specific functions, too.\n // TODO: remove those checks once the briding functions are annotate with "bridgeFromObjectiveC"\n // in Foundation.\n //\n // String._unconditionallyBridgeFromObjectiveC(_:)\n funcName == "$sSS10FoundationE36_unconditionallyBridgeFromObjectiveCySSSo8NSStringCSgFZ" ||\n // Array._unconditionallyBridgeFromObjectiveC(_:)\n funcName == "$sSa10FoundationE36_unconditionallyBridgeFromObjectiveCySayxGSo7NSArrayCSgFZ" else {\n return nil\n }\n guard bridgingCall.arguments.count == 2,\n bridgingCall.calleeArgumentConventions[0] == .directGuaranteed else {\n return nil\n }\n return bridgingCall\n}\n\n/// Returns the apply instruction if `value` is a Swift -> ObjC bridging call.\nfunc isBridgeToObjcCall(_ value: Value) -> ApplyInst? {\n guard let bridgingCall = value as? ApplyInst,\n let bridgingFunc = bridgingCall.referencedFunction,\n bridgingFunc.hasSemanticsAttribute("convertToObjectiveC"),\n bridgingCall.arguments.count == 1,\n bridgingCall.calleeArgumentConventions[0] == .directGuaranteed else {\n return nil\n }\n return bridgingCall\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_ObjCBridgingOptimization.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_ObjCBridgingOptimization.swift
Swift
14,980
0.95
0.128065
0.375385
python-kit
217
2024-10-13T01:14:24.341486
BSD-3-Clause
false
9753b124e4b0aec40cd9f34333b3c6af
//===--- ObjectOutliner.swift ----------------------------------------------==//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport AST\nimport SIL\n\n/// Outlines class objects from functions into statically initialized global variables.\n/// This is currently done for Arrays and for global let variables.\n///\n/// If a function constructs an Array literal with constant elements (done by storing\n/// the element values into the array buffer), a new global variable is created which\n/// contains the constant elements in its static initializer.\n/// For example:\n/// ```\n/// public func arrayLookup(_ i: Int) -> Int {\n/// let lookupTable = [10, 11, 12]\n/// return lookupTable[i]\n/// }\n/// ```\n/// is turned into\n/// ```\n/// private let outlinedVariable = [10, 11, 12] // statically initialized and allocated in the data section\n///\n/// public func arrayLookup(_ i: Int) -> Int {\n/// return outlinedVariable[i]\n/// }\n/// ```\n///\n/// Similar with global let variables:\n/// ```\n/// let c = SomeClass()\n/// ```\n/// is turned into\n/// ```\n/// private let outlinedVariable = SomeClass() // statically initialized and allocated in the data section\n///\n/// let c = outlinedVariable\n/// ```\n///\n/// As a second optimization, if an array is a string literal which is a parameter to the\n/// `_findStringSwitchCase` library function and the array has many elements (> 16), the\n/// call is redirected to `_findStringSwitchCaseWithCache`. This function builds a cache\n/// (e.g. a Dictionary) and stores it into a global variable.\n/// Then subsequent calls to this function can do a fast lookup using the cache.\n///\nlet objectOutliner = FunctionPass(name: "object-outliner") {\n (function: Function, context: FunctionPassContext) in\n\n if function.hasOwnership && !function.isSwift51RuntimeAvailable {\n // Since Swift 5.1 global objects have immortal ref counts. And that's required for ownership.\n return\n }\n\n var allocRefs = Stack<AllocRefInstBase>(context)\n defer { allocRefs.deinitialize() }\n\n allocRefs.append(contentsOf: function.instructions.lazy.compactMap { $0 as? AllocRefInstBase })\n\n // Try multiple iterations to handle multi-dimensional arrays.\n var changed: Bool\n repeat {\n changed = false\n for ari in allocRefs where !ari.isDeleted {\n if !context.continueWithNextSubpassRun(for: ari) {\n return\n }\n if let globalValue = optimizeObjectAllocation(allocRef: ari, context) {\n optimizeFindStringCall(stringArray: globalValue, context)\n changed = true\n }\n }\n } while changed\n}\n\nprivate func optimizeObjectAllocation(allocRef: AllocRefInstBase, _ context: FunctionPassContext) -> GlobalValueInst? {\n if !allocRef.fieldsKnownStatically {\n return nil\n }\n\n guard let endOfInitInst = findEndOfInitialization(\n of: allocRef,\n // An object with tail allocated elements is in risk of being passed to malloc_size, which does\n // not work for non-heap allocated objects. Conservatively, disable objects with tail allocations.\n // Note, that this does not affect Array because Array always has an end_cow_mutation at the end of\n // initialization.\n canStoreToGlobal: allocRef.tailAllocatedCounts.count == 0)\n else {\n return nil\n }\n\n guard let (storesToClassFields, storesToTailElements) = getInitialization(of: allocRef,\n ignore: endOfInitInst,\n context) else\n {\n return nil\n }\n\n let outlinedGlobal = context.createGlobalVariable(\n name: context.mangleOutlinedVariable(from: allocRef.parentFunction),\n type: allocRef.type, linkage: .private,\n // Only if it's a COW object we can be sure that the object allocated in the global is not mutated.\n // If someone wants to mutate it, it has to be copied first.\n isLet: endOfInitInst is EndCOWMutationInst)\n\n constructObject(of: allocRef, inInitializerOf: outlinedGlobal, storesToClassFields, storesToTailElements, context)\n context.erase(instructions: storesToClassFields)\n context.erase(instructions: storesToTailElements)\n\n return replace(object: allocRef, with: outlinedGlobal, context)\n}\n\n// The end-of-initialization is either an end_cow_mutation, because it guarantees that the originally initialized\n// object is not mutated (it must be copied before mutation).\n// Or it is the store to a global let variable in the global's initializer function.\nprivate func findEndOfInitialization(of object: Value, canStoreToGlobal: Bool) -> Instruction? {\n for use in object.uses {\n let user = use.instruction\n switch user {\n case is UpcastInst,\n is UncheckedRefCastInst,\n is MoveValueInst,\n is EndInitLetRefInst:\n if let ecm = findEndOfInitialization(of: user as! SingleValueInstruction, canStoreToGlobal: canStoreToGlobal) {\n return ecm\n }\n case let ecm as EndCOWMutationInst:\n if ecm.doKeepUnique {\n return nil\n }\n return ecm\n case let store as StoreInst:\n if canStoreToGlobal,\n let ga = store.destination as? GlobalAddrInst,\n ga.global.isLet,\n ga.parentFunction.initializedGlobal == ga.global\n {\n return store\n }\n default:\n break\n }\n }\n return nil\n}\n\nprivate func getInitialization(of allocRef: AllocRefInstBase, ignore ignoreInst: Instruction,\n _ context: FunctionPassContext)\n -> (storesToClassFields: [StoreInst], storesToTailElements: [StoreInst])?\n{\n guard let numTailElements = allocRef.numTailElements else {\n return nil\n }\n var fieldStores = Array<StoreInst?>(repeating: nil, count: allocRef.numClassFields)\n\n // If the tail element is a tuple, then its tuple elements are initialized with separate stores.\n // E.g:\n // %2 = ref_tail_addr\n // %3 = tuple_element_addr %2, 0\n // store %0 to %3\n // %4 = tuple_element_addr %2, 1\n // store %1 to %4\n let tailCount = numTailElements != 0 ? numTailElements * allocRef.numStoresPerTailElement : 0\n var tailStores = Array<StoreInst?>(repeating: nil, count: tailCount)\n\n if !findInitStores(of: allocRef, &fieldStores, &tailStores, ignore: ignoreInst, context) {\n return nil\n }\n\n // Check that all fields and tail elements are initialized.\n if fieldStores.contains(nil) || tailStores.contains(nil) {\n return nil\n }\n return (fieldStores.map { $0! }, tailStores.map { $0! })\n}\n\nprivate func findInitStores(of object: Value,\n _ fieldStores: inout [StoreInst?],\n _ tailStores: inout [StoreInst?],\n ignore ignoreInst: Instruction,\n _ context: FunctionPassContext) -> Bool\n{\n for use in object.uses {\n let user = use.instruction\n switch user {\n case is UpcastInst,\n is UncheckedRefCastInst,\n is MoveValueInst,\n is EndInitLetRefInst,\n is BeginBorrowInst:\n if !findInitStores(of: user as! SingleValueInstruction, &fieldStores, &tailStores, ignore: ignoreInst, context) {\n return false\n }\n case let rea as RefElementAddrInst:\n if !findStores(inUsesOf: rea, index: rea.fieldIndex, stores: &fieldStores, context) {\n return false\n }\n case let rta as RefTailAddrInst:\n if !findStores(toTailAddress: rta, tailElementIndex: 0, stores: &tailStores, context) {\n return false\n }\n case ignoreInst,\n is EndBorrowInst:\n break\n default:\n if !isValidUseOfObject(use) {\n return false\n }\n }\n }\n return true\n}\n\nprivate func findStores(toTailAddress tailAddr: Value, tailElementIndex: Int, stores: inout [StoreInst?],\n _ context: FunctionPassContext) -> Bool {\n for use in tailAddr.uses {\n switch use.instruction {\n case let indexAddr as IndexAddrInst:\n guard let indexLiteral = indexAddr.index as? IntegerLiteralInst,\n let tailIdx = indexLiteral.value else\n {\n return false\n }\n if !findStores(toTailAddress: indexAddr, tailElementIndex: tailElementIndex + tailIdx, stores: &stores, context) {\n return false\n }\n case let tea as TupleElementAddrInst:\n // The tail elements are tuples. There is a separate store for each tuple element.\n let numTupleElements = tea.tuple.type.tupleElements.count\n let tupleIdx = tea.fieldIndex\n if !findStores(inUsesOf: tea, index: tailElementIndex * numTupleElements + tupleIdx, stores: &stores, context) {\n return false\n }\n case let atp as AddressToPointerInst:\n if !findStores(toTailAddress: atp, tailElementIndex: tailElementIndex, stores: &stores, context) {\n return false\n }\n case let mdi as MarkDependenceInst:\n if !findStores(toTailAddress: mdi, tailElementIndex: tailElementIndex, stores: &stores, context) {\n return false\n }\n case let pta as PointerToAddressInst:\n if !findStores(toTailAddress: pta, tailElementIndex: tailElementIndex, stores: &stores, context) {\n return false\n }\n case let store as StoreInst:\n if store.source.type.isTuple {\n // This kind of SIL is never generated because tuples are stored with separated stores to tuple_element_addr.\n // Just to be on the safe side..\n return false\n }\n if !handleStore(store, index: tailElementIndex, stores: &stores, context) {\n return false\n }\n default:\n if !isValidUseOfObject(use) {\n return false\n }\n }\n }\n return true\n}\n\nprivate func findStores(inUsesOf address: Value, index: Int, stores: inout [StoreInst?],\n _ context: FunctionPassContext) -> Bool\n{\n for use in address.uses {\n if let store = use.instruction as? StoreInst {\n if !handleStore(store, index: index, stores: &stores, context) {\n return false\n }\n } else if !isValidUseOfObject(use) {\n return false\n }\n }\n return true\n}\n\nprivate func handleStore(_ store: StoreInst, index: Int, stores: inout [StoreInst?],\n _ context: FunctionPassContext) -> Bool\n{\n if index >= 0 && index < stores.count,\n store.source.isValidGlobalInitValue(context),\n stores[index] == nil {\n stores[index] = store\n return true\n }\n return false\n}\n\nprivate func isValidUseOfObject(_ use: Operand) -> Bool {\n let inst = use.instruction\n switch inst {\n case is DebugValueInst,\n is LoadInst,\n is DeallocRefInst,\n is DeallocStackRefInst,\n is StrongRetainInst,\n is StrongReleaseInst,\n is FixLifetimeInst,\n is MarkDependenceAddrInst:\n return true\n\n case let mdi as MarkDependenceInst:\n if (use == mdi.baseOperand) {\n return true;\n }\n for mdiUse in mdi.uses {\n if !isValidUseOfObject(mdiUse) {\n return false\n }\n }\n return true\n\n case is StructElementAddrInst,\n is AddressToPointerInst,\n is StructInst,\n is TupleInst,\n is TupleExtractInst,\n is EnumInst,\n is StructExtractInst,\n is UncheckedRefCastInst,\n is UpcastInst,\n is BeginDeallocRefInst,\n is RefTailAddrInst,\n is RefElementAddrInst:\n for instUse in (inst as! SingleValueInstruction).uses {\n if !isValidUseOfObject(instUse) {\n return false\n }\n }\n return true\n\n case let bi as BuiltinInst:\n switch bi.id {\n case .ICMP_EQ, .ICMP_NE:\n // Handle the case for comparing addresses. This occurs when the Array\n // comparison function is inlined.\n return true\n case .DestroyArray:\n // We must not try to delete the tail allocated values. Although this would be a no-op\n // (because we only handle trivial types), it would be semantically wrong to apply this\n // builtin on the outlined object.\n return true\n default:\n return false\n }\n\n default:\n return false\n }\n}\n\nprivate func constructObject(of allocRef: AllocRefInstBase,\n inInitializerOf global: GlobalVariable,\n _ storesToClassFields: [StoreInst], _ storesToTailElements: [StoreInst],\n _ context: FunctionPassContext) {\n var cloner = StaticInitCloner(cloneTo: global, context)\n defer { cloner.deinitialize() }\n\n // Create the initializers for the fields\n var objectArgs = [Value]()\n for store in storesToClassFields {\n objectArgs.append(cloner.clone(store.source as! SingleValueInstruction))\n }\n let globalBuilder = Builder(staticInitializerOf: global, context)\n\n if !storesToTailElements.isEmpty {\n // Create the initializers for the tail elements.\n let numTailTupleElems = allocRef.numStoresPerTailElement\n if numTailTupleElems > 1 {\n // The elements are tuples: combine numTailTupleElems elements to a single tuple instruction.\n for elementIdx in 0..<allocRef.numTailElements! {\n let tupleElems = (0..<numTailTupleElems).map { tupleIdx in\n let store = storesToTailElements[elementIdx * numTailTupleElems + tupleIdx]\n return cloner.clone(store.source as! SingleValueInstruction)\n }\n let tuple = globalBuilder.createTuple(type: allocRef.tailAllocatedTypes[0], elements: tupleElems)\n objectArgs.append(tuple)\n }\n } else {\n // The non-tuple element case.\n for store in storesToTailElements {\n objectArgs.append(cloner.clone(store.source as! SingleValueInstruction))\n }\n }\n }\n globalBuilder.createObject(type: allocRef.type, arguments: objectArgs, numBaseElements: storesToClassFields.count)\n\n // The initial value can contain a `begin_access` if it references another global variable by address, e.g.\n // var p = Point(x: 10, y: 20)\n // let a = [UnsafePointer(&p)]\n //\n global.stripAccessInstructionFromInitializer(context)\n}\n\nprivate func replace(object allocRef: AllocRefInstBase,\n with global: GlobalVariable,\n _ context: FunctionPassContext) -> GlobalValueInst {\n\n // Replace the alloc_ref by global_value + strong_retain instructions.\n let builder = Builder(before: allocRef, context)\n let globalValue = builder.createGlobalValue(global: global, isBare: false)\n if !allocRef.parentFunction.hasOwnership {\n builder.createStrongRetain(operand: globalValue)\n }\n\n rewriteUses(of: allocRef, context)\n allocRef.replace(with: globalValue, context)\n return globalValue\n}\n\nprivate func rewriteUses(of startValue: Value, _ context: FunctionPassContext) {\n var worklist = InstructionWorklist(context)\n defer { worklist.deinitialize() }\n worklist.pushIfNotVisited(usersOf: startValue)\n\n while let inst = worklist.pop() {\n switch inst {\n case let beginDealloc as BeginDeallocRefInst:\n worklist.pushIfNotVisited(usersOf: beginDealloc)\n let builder = Builder(before: beginDealloc, context)\n if !beginDealloc.parentFunction.hasOwnership {\n builder.createStrongRelease(operand: beginDealloc.reference)\n }\n beginDealloc.replace(with: beginDealloc.reference, context)\n case is EndCOWMutationInst, is EndInitLetRefInst, is MoveValueInst:\n let svi = inst as! SingleValueInstruction\n worklist.pushIfNotVisited(usersOf: svi)\n svi.replace(with: svi.operands[0].value, context)\n case let upCast as UpcastInst:\n worklist.pushIfNotVisited(usersOf: upCast)\n case let refCast as UncheckedRefCastInst:\n worklist.pushIfNotVisited(usersOf: refCast)\n case let moveValue as MoveValueInst:\n worklist.pushIfNotVisited(usersOf: moveValue)\n case is DeallocRefInst, is DeallocStackRefInst:\n context.erase(instruction: inst)\n default:\n break\n }\n }\n}\n\nprivate extension InstructionWorklist {\n mutating func pushIfNotVisited(usersOf value: Value) {\n pushIfNotVisited(contentsOf: value.users)\n }\n}\n\nprivate extension AllocRefInstBase {\n var fieldsKnownStatically: Bool {\n if let allocDynamic = self as? AllocRefDynamicInst,\n !allocDynamic.isDynamicTypeDeinitAndSizeKnownEquivalentToBaseType {\n return false\n }\n if isObjC {\n return false\n }\n return true\n }\n\n var numTailElements: Int? {\n\n if tailAllocatedCounts.count == 0 {\n return 0\n }\n\n // We only support a single tail allocated array.\n // Stdlib's tail allocated arrays don't have any side-effects in the constructor if the element type is trivial.\n // TODO: also exclude custom tail allocated arrays which might have side-effects in the destructor.\n if tailAllocatedCounts.count != 1 {\n return nil\n }\n\n // The number of tail allocated elements must be constant.\n if let tailCountLiteral = tailAllocatedCounts[0].value as? IntegerLiteralInst,\n let count = tailCountLiteral.value\n {\n return count\n }\n return nil\n }\n\n var numClassFields: Int {\n assert(type.isClass)\n return type.getNominalFields(in: parentFunction)!.count\n }\n\n var numStoresPerTailElement: Int {\n let tailType = tailAllocatedTypes[0]\n if tailType.isTuple {\n return tailType.tupleElements.count\n }\n return 1\n }\n}\n\nprivate func optimizeFindStringCall(stringArray: GlobalValueInst, _ context: FunctionPassContext) {\n if stringArray.numArrayElements > 16,\n let findStringCall = findFindStringCall(stringArray: stringArray),\n let cachedFindStringFunc = getFindStringSwitchCaseWithCacheFunction(context) {\n replace(findStringCall: findStringCall, with: cachedFindStringFunc, context)\n }\n}\n\n/// Finds a call to findStringSwitchCase which takes `stringArray` as parameter.\nprivate func findFindStringCall(stringArray: Value) -> ApplyInst? {\n for use in stringArray.uses {\n switch use.instruction {\n case let apply as ApplyInst:\n // There should only be a single call to findStringSwitchCase. But even\n // if there are multiple calls, it's not problem - we'll just optimize the\n // last one we find.\n if apply.hasSemanticsAttribute("findStringSwitchCase") {\n return apply\n }\n case is StructInst,\n is TupleInst,\n is UncheckedRefCastInst,\n is UpcastInst:\n if let foundCall = findFindStringCall(stringArray: use.instruction as! SingleValueInstruction) {\n return foundCall\n }\n default:\n break\n }\n }\n return nil\n}\n\nprivate func getFindStringSwitchCaseWithCacheFunction(_ context: FunctionPassContext) -> Function? {\n if let f = context.lookupStdlibFunction(name: "_findStringSwitchCaseWithCache"),\n f.argumentTypes.count == 3 {\n return f\n }\n return nil\n}\n\nprivate func replace(findStringCall: ApplyInst,\n with cachedFindStringFunc: Function,\n _ context: FunctionPassContext) {\n let cacheType = cachedFindStringFunc.argumentTypes[2].objectType\n let wordTy = cacheType.getNominalFields(in: findStringCall.parentFunction)![0]\n\n let name = context.mangleOutlinedVariable(from: findStringCall.parentFunction)\n\n // Create an "opaque" global variable which is passed as inout to\n // _findStringSwitchCaseWithCache and into which the function stores the "cache".\n let cacheVar = context.createGlobalVariable(name: name, type: cacheType, linkage: .private, isLet: false)\n\n let varBuilder = Builder(staticInitializerOf: cacheVar, context)\n let zero = varBuilder.createIntegerLiteral(0, type: wordTy)\n _ = varBuilder.createStruct(type: cacheType, elements: [zero, zero])\n\n let builder = Builder(before: findStringCall, context)\n let cacheAddr = builder.createGlobalAddr(global: cacheVar, dependencyToken: nil)\n let findStringRef = builder.createFunctionRef(cachedFindStringFunc)\n let newCall = builder.createApply(function: findStringRef, SubstitutionMap(),\n arguments: [findStringCall.arguments[0],\n findStringCall.arguments[1],\n cacheAddr])\n\n findStringCall.replace(with: newCall, context)\n}\n\nprivate extension GlobalValueInst {\n /// Assuming the global is an Array, returns the number of elements = tail elements.\n var numArrayElements: Int {\n (global.staticInitValue! as! ObjectInst).tailOperands.count\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_ObjectOutliner.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_ObjectOutliner.swift
Swift
20,585
0.95
0.158621
0.184061
react-lib
235
2023-08-23T14:27:36.975033
Apache-2.0
false
b1e0dbcbe6f3b53b45bfe304ccd828ee
//===--- RedundantLoadElimination.swift ------------------------------------==//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\n\n/// Replaces redundant `load` or `copy_addr` instructions with already available values.\n///\n/// A load is redundant if the loaded value is already available at that point.\n/// This can be via a preceding store to the same address:\n///\n/// store %1 to %addr\n/// ... // no writes to %addr\n/// %2 = load %addr\n/// ->\n/// store %1 to %addr\n/// ... // no writes to %addr\n/// // replace uses of %2 with the available value %1\n///\n/// or a preceding load from the same address:\n///\n/// %1 = load %addr\n/// ... // no writes to %addr\n/// %2 = load %addr\n/// ->\n/// %1 = load %addr\n/// ... // no writes to %addr\n/// // replace uses of %2 with the available value %1\n///\n/// In case of a partial redundant load, the load is split so that some of the new\n/// individual loads can be eliminated in the next round of the optimization:\n///\n/// %fa1 = struct_element_addr %addr, #field1\n/// store %1 to %fa1\n/// ... // no writes to %fa1\n/// %2 = load %addr // partially redundant\n/// ->\n/// %fa1 = struct_extract %addr, #field1\n/// store %1 to %fa1\n/// ... // no writes to %fa1\n/// %fa1 = struct_element_addr %addr, #field1\n/// %f1 = load %fa1 // this load is redundant now\n/// %fa2 = struct_element_addr %addr, #field2\n/// %f2 = load %fa2\n/// %2 = struct (%f1, %f2)\n///\n/// This works in a similar fashion for `copy_addr`. If the source value of the `copy_addr` is\n/// already available, the `copy_addr` is replaced by a `store` of the available value.\n///\n/// The algorithm is a data flow analysis which starts at the original load and searches\n/// for preceding stores or loads by following the control flow in backward direction.\n/// The preceding stores and loads provide the "available values" with which the original\n/// load can be replaced.\n///\n/// If the function is in OSSA, redundant loads are replaced in a way that no additional\n/// copies of the loaded value are introduced. If this is not possible, the redundant load\n/// is not replaced.\n///\nlet redundantLoadElimination = FunctionPass(name: "redundant-load-elimination") {\n (function: Function, context: FunctionPassContext) in\n _ = eliminateRedundantLoads(in: function, variant: .regular, context)\n}\n\n// Early RLE does not touch loads from Arrays. This is important because later array optimizations,\n// like ABCOpt, get confused if an array load in a loop is converted to a pattern with a phi argument.\nlet earlyRedundantLoadElimination = FunctionPass(name: "early-redundant-load-elimination") {\n (function: Function, context: FunctionPassContext) in\n _ = eliminateRedundantLoads(in: function, variant: .early, context)\n}\n\nlet mandatoryRedundantLoadElimination = FunctionPass(name: "mandatory-redundant-load-elimination") {\n (function: Function, context: FunctionPassContext) in\n _ = eliminateRedundantLoads(in: function, variant: .mandatory, context)\n}\n\nenum RedundantLoadEliminationVariant {\n case mandatory, mandatoryInGlobalInit, early, regular\n}\n\nfunc eliminateRedundantLoads(in function: Function,\n variant: RedundantLoadEliminationVariant,\n _ context: FunctionPassContext) -> Bool\n{\n // Avoid quadratic complexity by limiting the number of visited instructions.\n // This limit is sufficient for most "real-world" functions, by far.\n var complexityBudget = 50_000\n var changed = false\n\n for block in function.blocks.reversed() {\n\n // We cannot use for-in iteration here because if the load is split, the new\n // individual loads are inserted right before and they would be ignored by a for-in iteration.\n var inst = block.instructions.reversed().first\n while let i = inst {\n defer { inst = i.previous }\n\n if let load = inst as? LoadingInstruction {\n if !context.continueWithNextSubpassRun(for: load) {\n return changed\n }\n if complexityBudget < 20 {\n complexityBudget = 20\n }\n if !load.isEligibleForElimination(in: variant, context) {\n continue;\n }\n changed = tryEliminate(load: load, complexityBudget: &complexityBudget, context) || changed\n }\n }\n }\n return changed\n}\n\n/// Either a `load` or a `copy_addr` (which is equivalent to a load+store).\nprivate protocol LoadingInstruction: Instruction {\n var address: Value { get }\n var type: Type { get }\n var ownership: Ownership { get }\n var loadOwnership: LoadInst.LoadOwnership { get }\n var canLoadValue: Bool { get }\n func trySplit(_ context: FunctionPassContext) -> Bool\n func materializeLoadForReplacement(_ context: FunctionPassContext) -> LoadInst\n}\n\nextension LoadInst : LoadingInstruction {\n // We know that the type is loadable because - well - this is a load.\n var canLoadValue: Bool { true }\n\n // Nothing to materialize, because this is already a `load`.\n func materializeLoadForReplacement(_ context: FunctionPassContext) -> LoadInst { return self }\n}\n\nextension CopyAddrInst : LoadingInstruction {\n var address: Value { source }\n var type: Type { address.type.objectType }\n var typeIsLoadable: Bool { type.isLoadable(in: parentFunction) }\n\n var ownership: Ownership {\n if !parentFunction.hasOwnership || type.isTrivial(in: parentFunction) {\n return .none\n }\n // Regardless of if the copy is taking or copying, the loaded value is an owned value.\n return .owned\n }\n\n var canLoadValue: Bool {\n if !source.type.isLoadable(in: parentFunction) {\n // Although the original load's type is loadable (obviously), it can be projected-out\n // from the copy_addr's type which might be not loadable.\n return false\n }\n if !parentFunction.hasOwnership {\n if !isTakeOfSrc || !isInitializationOfDest {\n // For simplicity, bail if we would have to insert compensating retains and releases.\n return false\n }\n }\n return true\n }\n\n func materializeLoadForReplacement(_ context: FunctionPassContext) -> LoadInst {\n return replaceWithLoadAndStore(context).load\n }\n}\n\nprivate func tryEliminate(load: LoadingInstruction, complexityBudget: inout Int, _ context: FunctionPassContext) -> Bool {\n switch load.isRedundant(complexityBudget: &complexityBudget, context) {\n case .notRedundant:\n return false\n case .redundant(let availableValues):\n replace(load: load, with: availableValues, context)\n return true\n case .maybePartiallyRedundant(let subPath):\n // Check if the a partial load would really be redundant to avoid unnecessary splitting.\n switch load.isRedundant(at: subPath, complexityBudget: &complexityBudget, context) {\n case .notRedundant, .maybePartiallyRedundant:\n return false\n case .redundant:\n // The new individual loads are inserted right before the current load and\n // will be optimized in the following loop iterations.\n return load.trySplit(context)\n }\n }\n}\n\nprivate extension LoadingInstruction {\n\n func isEligibleForElimination(in variant: RedundantLoadEliminationVariant, _ context: FunctionPassContext) -> Bool {\n if !canLoadValue {\n return false\n }\n switch variant {\n case .mandatory, .mandatoryInGlobalInit:\n if loadOwnership == .take {\n // load [take] would require to shrinkMemoryLifetime. But we don't want to do this in the mandatory\n // pipeline to not shrink or remove an alloc_stack which is relevant for debug info.\n return false\n }\n switch address.accessBase {\n case .box, .stack:\n break\n default:\n return false\n }\n case .early:\n // See the comment of `earlyRedundantLoadElimination`.\n if let nominal = self.type.nominal, nominal == context.swiftArrayDecl {\n return false\n }\n case .regular:\n break\n }\n // Check if the type can be expanded without a significant increase to code size.\n // We block redundant load elimination because it might increase register pressure for large values.\n // Furthermore, this pass also splits values into its projections (e.g shrinkMemoryLifetimeAndSplit).\n // But: it is required to remove loads, even of large structs, in global init functions to ensure\n // that globals (containing large structs) can be statically initialized.\n if variant != .mandatoryInGlobalInit, !self.type.shouldExpand(context) {\n return false\n }\n return true\n }\n\n func isRedundant(complexityBudget: inout Int, _ context: FunctionPassContext) -> DataflowResult {\n return isRedundant(at: address.constantAccessPath, complexityBudget: &complexityBudget, context)\n }\n\n func isRedundant(at accessPath: AccessPath, complexityBudget: inout Int, _ context: FunctionPassContext) -> DataflowResult {\n var scanner = InstructionScanner(load: self, accessPath: accessPath, context.aliasAnalysis)\n\n switch scanner.scan(instructions: ReverseInstructionList(first: self.previous),\n in: parentBlock,\n complexityBudget: &complexityBudget)\n {\n case .overwritten:\n return DataflowResult(notRedundantWith: scanner.potentiallyRedundantSubpath)\n case .available:\n return .redundant(scanner.availableValues)\n case .transparent:\n return self.isRedundantInPredecessorBlocks(scanner: &scanner, complexityBudget: &complexityBudget, context)\n }\n }\n\n private func isRedundantInPredecessorBlocks(\n scanner: inout InstructionScanner,\n complexityBudget: inout Int,\n _ context: FunctionPassContext\n ) -> DataflowResult {\n\n var liverange = Liverange(endBlock: self.parentBlock, context)\n defer { liverange.deinitialize() }\n liverange.pushPredecessors(of: self.parentBlock)\n\n while let block = liverange.pop() {\n switch scanner.scan(instructions: block.instructions.reversed(),\n in: block,\n complexityBudget: &complexityBudget)\n {\n case .overwritten:\n return DataflowResult(notRedundantWith: scanner.potentiallyRedundantSubpath)\n case .available:\n liverange.add(beginBlock: block)\n case .transparent:\n liverange.pushPredecessors(of: block)\n }\n }\n if !self.canReplaceWithoutInsertingCopies(liverange: liverange, context) {\n return DataflowResult(notRedundantWith: scanner.potentiallyRedundantSubpath)\n }\n return .redundant(scanner.availableValues)\n }\n\n func canReplaceWithoutInsertingCopies(liverange: Liverange,_ context: FunctionPassContext) -> Bool {\n switch self.loadOwnership {\n case .trivial, .unqualified:\n return true\n\n case .copy, .take:\n let deadEndBlocks = context.deadEndBlocks\n\n // The liverange of the value has an "exit", i.e. a path which doesn't lead to the load,\n // it means that we would have to insert a destroy on that exit to satisfy ownership rules.\n // But an inserted destroy also means that we would need to insert copies of the value which\n // were not there originally. For example:\n //\n // store %1 to [init] %addr\n // cond_br bb1, bb2\n // bb1:\n // %2 = load [take] %addr\n // bb2: // liverange exit\n //\n // TODO: we could extend OSSA to transfer ownership to support liverange exits without copying. E.g.:\n //\n // %b = store_and_borrow %1 to [init] %addr // %b is borrowed from %addr\n // cond_br bb1, bb2\n // bb1:\n // %o = borrowed_to_owned %b take_ownership_from %addr\n // // replace %2 with %o\n // bb2:\n // end_borrow %b\n //\n if liverange.hasExits(deadEndBlocks) {\n return false\n }\n\n // Handle a corner case: if the load is in an infinite loop, the liverange doesn't have an exit,\n // but we still would need to insert a copy. For example:\n //\n // store %1 to [init] %addr\n // br bb1\n // bb1:\n // %2 = load [copy] %addr // would need to insert a copy here\n // br bb1 // no exit from the liverange\n //\n // For simplicity, we don't handle this in OSSA.\n if deadEndBlocks.isDeadEnd(parentBlock) {\n return false\n }\n return true\n }\n }\n}\n\nprivate func replace(load: LoadingInstruction, with availableValues: [AvailableValue], _ context: FunctionPassContext) {\n var ssaUpdater = SSAUpdater(function: load.parentFunction,\n type: load.type, ownership: load.ownership, context)\n\n for availableValue in availableValues.replaceCopyAddrsWithLoadsAndStores(context) {\n let block = availableValue.instruction.parentBlock\n let availableValue = provideValue(for: load, from: availableValue, context)\n ssaUpdater.addAvailableValue(availableValue, in: block)\n }\n\n let newValue: Value\n if availableValues.count == 1 {\n // A single available value means that this available value is located _before_ the load. E.g.:\n //\n // store %1 to %addr // a single available value\n // ...\n // %2 = load %addr // The load\n //\n newValue = ssaUpdater.getValue(atEndOf: load.parentBlock)\n } else {\n // In case of multiple available values, if an available value is defined in the same basic block\n // as the load, this available is located _after_ the load. E.g.:\n //\n // store %1 to %addr // an available value\n // br bb1\n // bb1:\n // %2 = load %addr // The load\n // store %3 to %addr // another available value\n // cond_br bb1, bb2\n //\n newValue = ssaUpdater.getValue(inMiddleOf: load.parentBlock)\n }\n\n let originalLoad = load.materializeLoadForReplacement(context)\n\n // Make sure to keep dependencies valid after replacing the load\n insertMarkDependencies(for: originalLoad, context)\n\n originalLoad.replace(with: newValue, context)\n}\n\nprivate func provideValue(\n for load: LoadingInstruction,\n from availableValue: AvailableValue,\n _ context: FunctionPassContext\n) -> Value {\n let projectionPath = availableValue.address.constantAccessPath.getMaterializableProjection(to: load.address.constantAccessPath)!\n\n switch load.loadOwnership {\n case .unqualified:\n return availableValue.value.createProjection(path: projectionPath,\n builder: availableValue.getBuilderForProjections(context))\n case .copy, .trivial:\n // Note: even if the load is trivial, the available value may be projected out of a non-trivial value.\n return availableValue.value.createProjectionAndCopy(path: projectionPath,\n builder: availableValue.getBuilderForProjections(context))\n case .take:\n if projectionPath.isEmpty {\n return shrinkMemoryLifetime(to: availableValue, context)\n } else {\n return shrinkMemoryLifetimeAndSplit(to: availableValue, projectionPath: projectionPath, context)\n }\n }\n}\n\n/// If the memory location depends on something, insert a dependency for the loaded value:\n///\n/// %2 = mark_dependence %1 on %0\n/// %3 = load %2\n/// ->\n/// %2 = mark_dependence %1 on %0 // not needed anymore, can be removed eventually\n/// %3 = load %2\n/// %4 = mark_dependence %3 on %0\n/// // replace %3 with %4\n///\nprivate func insertMarkDependencies(for load: LoadInst, _ context: FunctionPassContext) {\n var inserter = MarkDependenceInserter(load: load, context: context)\n _ = inserter.walkUp(address: load.address, path: UnusedWalkingPath())\n}\n\nprivate struct MarkDependenceInserter : AddressUseDefWalker {\n let load: LoadInst\n let context: FunctionPassContext\n\n mutating func walkUp(address: Value, path: UnusedWalkingPath) -> WalkResult {\n if let mdi = address as? MarkDependenceInst {\n let builder = Builder(after: load, context)\n let newMdi = builder.createMarkDependence(value: load, base: mdi.base, kind: mdi.dependenceKind)\n load.uses.ignore(user: newMdi).replaceAll(with: newMdi, context)\n }\n return walkUpDefault(address: address, path: path)\n }\n\n mutating func rootDef(address: Value, path: UnusedWalkingPath) -> WalkResult {\n return .continueWalk\n }\n}\n\n/// In case of a `load [take]` shrink lifetime of the value in memory back to the `availableValue`\n/// and return the (possibly projected) available value. For example:\n///\n/// store %1 to [assign] %addr\n/// ...\n/// %2 = load [take] %addr\n/// ->\n/// destroy_addr %addr\n/// ...\n/// // replace %2 with %1\n///\nprivate func shrinkMemoryLifetime(to availableValue: AvailableValue, _ context: FunctionPassContext) -> Value {\n switch availableValue {\n case .viaLoad(let availableLoad):\n assert(availableLoad.loadOwnership == .copy)\n let builder = Builder(after: availableLoad, context)\n availableLoad.set(ownership: .take, context)\n return builder.createCopyValue(operand: availableLoad)\n case .viaStore(let availableStore):\n let builder = Builder(after: availableStore, context)\n let valueToAdd = availableStore.source\n switch availableStore.storeOwnership {\n case .assign:\n builder.createDestroyAddr(address: availableStore.destination)\n context.erase(instruction: availableStore)\n case .initialize,\n // It can be the case that e non-payload case is stored as trivial enum and the enum is loaded as [take], e.g.\n // %1 = enum $Optional<Class>, #Optional.none\n // store %1 to [trivial] %addr : $*Optional<Class>\n // %2 = load [take] %addr : $*Optional<Class>\n .trivial:\n context.erase(instruction: availableStore)\n case .unqualified:\n fatalError("unqualified store in ossa function?")\n }\n return valueToAdd\n case .viaCopyAddr:\n fatalError("copy_addr must be lowered before shrinking lifetime")\n }\n}\n\n/// Like `shrinkMemoryLifetime`, but the available value must be projected.\n/// In this case we cannot just shrink the lifetime and reuse the available value.\n/// Therefore, we split the available load or store and load the projected available value.\n/// The inserted load can be optimized with the split value in the next iteration.\n///\n/// store %1 to [assign] %addr\n/// ...\n/// %2 = struct_element_addr %addr, #field1\n/// %3 = load [take] %2\n/// ->\n/// %f1 = struct_extract %1, #field1\n/// %fa1 = struct_element_addr %addr, #field1\n/// store %f1 to [assign] %fa1\n/// %f2 = struct_extract %1, #field2\n/// %fa2 = struct_element_addr %addr, #field2\n/// store %f2 to [assign] %fa2\n/// %1 = load [take] %fa1 // will be combined with `store %f1 to [assign] %fa1` in the next iteration\n/// ...\n/// // replace %3 with %1\n///\nprivate func shrinkMemoryLifetimeAndSplit(to availableValue: AvailableValue, projectionPath: SmallProjectionPath, _ context: FunctionPassContext) -> Value {\n switch availableValue {\n case .viaLoad(let availableLoad):\n assert(availableLoad.loadOwnership == .copy)\n let builder = Builder(after: availableLoad, context)\n let addr = availableLoad.address.createAddressProjection(path: projectionPath, builder: builder)\n let valueToAdd = builder.createLoad(fromAddress: addr, ownership: .take)\n availableLoad.trySplit(context)\n return valueToAdd\n case .viaStore(let availableStore):\n let builder = Builder(after: availableStore, context)\n let addr = availableStore.destination.createAddressProjection(path: projectionPath, builder: builder)\n let valueToAdd = builder.createLoad(fromAddress: addr, ownership: .take)\n availableStore.trySplit(context)\n return valueToAdd\n case .viaCopyAddr:\n fatalError("copy_addr must be lowered before shrinking lifetime")\n }\n}\n\nprivate enum DataflowResult {\n case notRedundant\n case redundant([AvailableValue])\n case maybePartiallyRedundant(AccessPath)\n\n init(notRedundantWith subPath: AccessPath?) {\n if let subPath = subPath {\n self = .maybePartiallyRedundant(subPath)\n } else {\n self = .notRedundant\n }\n }\n}\n\n/// Either a `load` or `store` which is preceding the original load and provides the loaded value.\nprivate enum AvailableValue {\n case viaLoad(LoadInst)\n case viaStore(StoreInst)\n case viaCopyAddr(CopyAddrInst)\n\n var value: Value {\n switch self {\n case .viaLoad(let load): return load\n case .viaStore(let store): return store.source\n case .viaCopyAddr: fatalError("copy_addr must be lowered")\n }\n }\n\n var address: Value {\n switch self {\n case .viaLoad(let load): return load.address\n case .viaStore(let store): return store.destination\n case .viaCopyAddr(let copyAddr): return copyAddr.destination\n }\n }\n\n var instruction: Instruction {\n switch self {\n case .viaLoad(let load): return load\n case .viaStore(let store): return store\n case .viaCopyAddr(let copyAddr): return copyAddr\n }\n }\n\n func getBuilderForProjections(_ context: FunctionPassContext) -> Builder {\n switch self {\n case .viaLoad(let load): return Builder(after: load, context)\n case .viaStore(let store): return Builder(before: store, context)\n case .viaCopyAddr: fatalError("copy_addr must be lowered")\n }\n }\n}\n\nprivate extension Array where Element == AvailableValue {\n func replaceCopyAddrsWithLoadsAndStores(_ context: FunctionPassContext) -> [AvailableValue] {\n return map {\n if case .viaCopyAddr(let copyAddr) = $0 {\n return .viaStore(copyAddr.replaceWithLoadAndStore(context).store)\n } else {\n return $0\n }\n }\n }\n}\n\nprivate struct InstructionScanner {\n private let load: LoadingInstruction\n private let accessPath: AccessPath\n private let storageDefBlock: BasicBlock?\n private let aliasAnalysis: AliasAnalysis\n\n private(set) var potentiallyRedundantSubpath: AccessPath? = nil\n private(set) var availableValues = Array<AvailableValue>()\n\n init(load: LoadingInstruction, accessPath: AccessPath, _ aliasAnalysis: AliasAnalysis) {\n self.load = load\n self.accessPath = accessPath\n self.storageDefBlock = accessPath.base.reference?.referenceRoot.parentBlock\n self.aliasAnalysis = aliasAnalysis\n }\n\n enum ScanResult {\n case overwritten\n case available\n case transparent\n }\n\n mutating func scan(instructions: ReverseInstructionList,\n in block: BasicBlock,\n complexityBudget: inout Int) -> ScanResult\n {\n for inst in instructions {\n complexityBudget -= 1\n if complexityBudget <= 0 {\n return .overwritten\n }\n\n switch visit(instruction: inst) {\n case .available: return .available\n case .overwritten: return .overwritten\n case .transparent: break\n }\n }\n\n // Abort if we find the storage definition of the access in case of a loop, e.g.\n //\n // bb1:\n // %storage_root = apply\n // %2 = ref_element_addr %storage_root\n // %3 = load %2\n // cond_br %c, bb1, bb2\n //\n // The storage root is different in each loop iteration. Therefore the load in a\n // successive loop iteration does not load from the same address as in the previous iteration.\n if let storageDefBlock = storageDefBlock,\n block == storageDefBlock {\n return .overwritten\n }\n if block.predecessors.isEmpty {\n // We reached the function entry without finding an available value.\n return .overwritten\n }\n return .transparent\n }\n\n private mutating func visit(instruction: Instruction) -> ScanResult {\n switch instruction {\n case is FixLifetimeInst, is BeginAccessInst, is EndAccessInst, is EndBorrowInst:\n // Those scope-ending instructions are only irrelevant if the preceding load is not changed.\n // If it is changed from `load [copy]` -> `load [take]` the memory effects of those scope-ending\n // instructions prevent that the `load [take]` will illegally mutate memory which is protected\n // from mutation by the scope.\n if load.loadOwnership != .take {\n return .transparent\n }\n case let precedingLoad as LoadInst:\n if precedingLoad == load {\n // We need to stop the data flow analysis when we visit the original load again.\n // This happens if the load is in a loop.\n return .available\n }\n let precedingLoadPath = precedingLoad.address.constantAccessPath\n if precedingLoadPath.getMaterializableProjection(to: accessPath) != nil {\n availableValues.append(.viaLoad(precedingLoad))\n return .available\n }\n if accessPath.getMaterializableProjection(to: precedingLoadPath) != nil,\n potentiallyRedundantSubpath == nil {\n potentiallyRedundantSubpath = precedingLoadPath\n }\n if load.loadOwnership != .take {\n return .transparent\n }\n\n case let precedingStore as StoreInst:\n if precedingStore.source is Undef {\n return .overwritten\n }\n let precedingStorePath = precedingStore.destination.constantAccessPath\n if precedingStorePath.getMaterializableProjection(to: accessPath) != nil {\n availableValues.append(.viaStore(precedingStore))\n return .available\n }\n if accessPath.getMaterializableProjection(to: precedingStorePath) != nil,\n potentiallyRedundantSubpath == nil {\n potentiallyRedundantSubpath = precedingStorePath\n }\n\n case let preceedingCopy as CopyAddrInst where preceedingCopy.canLoadValue:\n let copyPath = preceedingCopy.destination.constantAccessPath\n if copyPath.getMaterializableProjection(to: accessPath) != nil {\n availableValues.append(.viaCopyAddr(preceedingCopy))\n return .available\n }\n if accessPath.getMaterializableProjection(to: copyPath) != nil, potentiallyRedundantSubpath == nil {\n potentiallyRedundantSubpath = copyPath\n }\n\n default:\n break\n }\n if load.loadOwnership == .take {\n // In case of `take`, don't allow reading instructions in the liverange.\n // Otherwise we cannot shrink the memory liverange afterwards.\n if instruction.mayReadOrWrite(address: load.address, aliasAnalysis) {\n return .overwritten\n }\n } else {\n if instruction.mayWrite(toAddress: load.address, aliasAnalysis) {\n return .overwritten\n }\n }\n return .transparent\n }\n}\n\n/// Represents the liverange (in terms of basic blocks) of the loaded value.\n///\n/// In contrast to a BlockRange, this liverange has multiple begin blocks (containing the\n/// available values) and a single end block (containing the original load). For example:\n///\n/// bb1:\n/// store %1 to %addr // begin block\n/// br bb3\n/// bb2:\n/// store %2 to %addr // begin block\n/// br bb3\n/// bb3:\n/// %3 = load %addr // end block\n///\nprivate struct Liverange {\n private var worklist: BasicBlockWorklist\n private var containingBlocks: Stack<BasicBlock> // doesn't include the end-block\n private var beginBlocks: BasicBlockSet\n private let endBlock: BasicBlock\n\n init(endBlock: BasicBlock, _ context: FunctionPassContext) {\n self.worklist = BasicBlockWorklist(context)\n self.containingBlocks = Stack(context)\n self.beginBlocks = BasicBlockSet(context)\n self.endBlock = endBlock\n pushPredecessors(of: endBlock)\n }\n\n mutating func deinitialize() {\n worklist.deinitialize()\n containingBlocks.deinitialize()\n beginBlocks.deinitialize()\n }\n\n mutating func pushPredecessors(of block: BasicBlock) {\n worklist.pushIfNotVisited(contentsOf: block.predecessors)\n containingBlocks.append(contentsOf: block.predecessors)\n }\n\n mutating func pop() -> BasicBlock? { worklist.pop() }\n\n mutating func add(beginBlock: BasicBlock) {\n beginBlocks.insert(beginBlock)\n }\n\n /// Returns true if there is some path from a begin block to a function exit which doesn't\n /// go through the end-block. For example:\n ///\n /// store %1 to %addr // begin\n /// cond_br bb1, bb2\n /// bb1:\n /// %2 = load %addr // end\n /// bb2:\n /// ... // exit\n ///\n func hasExits(_ deadEndBlocks: DeadEndBlocksAnalysis) -> Bool {\n for block in containingBlocks {\n for succ in block.successors {\n if succ != endBlock,\n (!worklist.hasBeenPushed(succ) || beginBlocks.contains(succ)),\n !deadEndBlocks.isDeadEnd(succ) {\n return true\n }\n }\n }\n return false\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_RedundantLoadElimination.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_RedundantLoadElimination.swift
Swift
28,956
0.95
0.133075
0.319149
python-kit
214
2023-12-26T15:47:18.576551
Apache-2.0
false
ce9eb3c052cde779c588f18b6308550f
//===--- ReleaseDevirtualizer.swift - Devirtualizes release-instructions --===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2022 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport AST\nimport SIL\n\n/// Devirtualizes release instructions which are known to destruct the object.\n///\n/// This means, it replaces a sequence of\n/// %x = alloc_ref [stack] $X\n/// ...\n/// strong_release %x\n/// dealloc_stack_ref %x\n/// with\n/// %x = alloc_ref [stack] $X\n/// ...\n/// set_deallocating %x\n/// %d = function_ref @dealloc_of_X\n/// %a = apply %d(%x)\n/// dealloc_stack_ref %x\n///\n/// The optimization is only done for stack promoted objects because they are\n/// known to have no associated objects (which are not explicitly released\n/// in the deinit method).\nlet releaseDevirtualizerPass = FunctionPass(name: "release-devirtualizer") {\n (function: Function, context: FunctionPassContext) in\n\n for inst in function.instructions {\n if let dealloc = inst as? DeallocStackRefInst {\n if !context.continueWithNextSubpassRun(for: dealloc) {\n return\n }\n tryDevirtualizeRelease(of: dealloc, context)\n }\n }\n}\n\nprivate func tryDevirtualizeRelease(of dealloc: DeallocStackRefInst, _ context: FunctionPassContext) {\n guard let (lastRelease, pathToRelease) = findLastRelease(of: dealloc, context) else {\n return\n }\n\n if !pathToRelease.isMaterializable {\n return\n }\n\n let allocRef = dealloc.allocRef\n var upWalker = FindAllocationWalker(allocation: allocRef)\n if upWalker.walkUp(value: lastRelease.operand.value, path: pathToRelease) == .abortWalk {\n return\n }\n\n let type = allocRef.type\n\n guard let dealloc = context.calleeAnalysis.getDestructor(ofExactType: type) else {\n return\n }\n\n let builder = Builder(before: lastRelease, location: lastRelease.location, context)\n\n var object = lastRelease.operand.value.createProjection(path: pathToRelease, builder: builder)\n if object.type != type {\n object = builder.createUncheckedRefCast(from: object, to: type)\n }\n\n // Do what a release would do before calling the deallocator: set the object\n // in deallocating state, which means set the RC_DEALLOCATING_FLAG flag.\n let beginDealloc = builder.createBeginDeallocRef(reference: object, allocation: allocRef)\n\n // Create the call to the destructor with the allocated object as self\n // argument.\n let functionRef = builder.createFunctionRef(dealloc)\n\n let substitutionMap: SubstitutionMap\n if dealloc.isGeneric {\n substitutionMap = context.getContextSubstitutionMap(for: type)\n } else {\n // In embedded Swift, dealloc might be a specialized deinit, so the substitution map on the old apply isn't valid for the new apply\n substitutionMap = SubstitutionMap()\n }\n\n builder.createApply(function: functionRef, substitutionMap, arguments: [beginDealloc])\n context.erase(instruction: lastRelease)\n}\n\nprivate func findLastRelease(\n of dealloc: DeallocStackRefInst,\n _ context: FunctionPassContext\n) -> (lastRelease: RefCountingInst, pathToRelease: SmallProjectionPath)? {\n let allocRef = dealloc.allocRef\n\n // Search for the final release in the same basic block of the dealloc.\n for instruction in ReverseInstructionList(first: dealloc.previous) {\n switch instruction {\n case let strongRelease as StrongReleaseInst:\n if let pathToRelease = getPathToRelease(from: allocRef, to: strongRelease) {\n return (strongRelease, pathToRelease)\n }\n case let releaseValue as ReleaseValueInst:\n if releaseValue.value.type.containsSingleReference(in: dealloc.parentFunction) {\n if let pathToRelease = getPathToRelease(from: allocRef, to: releaseValue) {\n return (releaseValue, pathToRelease)\n }\n }\n case is BeginDeallocRefInst, is DeallocRefInst:\n // Check if the last release was already de-virtualized.\n if allocRef.escapes(to: instruction, context) {\n return nil\n }\n default:\n break\n }\n if instruction.mayRelease && allocRef.escapes(to: instruction, context) {\n // This instruction may release the allocRef, which means that any release we find\n // earlier in the block is not guaranteed to be the final release.\n return nil\n }\n }\n return nil\n}\n\n// If the release is a release_value it might release a struct which _contains_ the allocated object.\n// Return a projection path to the contained object in this case.\nprivate func getPathToRelease(from allocRef: AllocRefInstBase, to release: RefCountingInst) -> SmallProjectionPath? {\n var downWalker = FindReleaseWalker(release: release)\n if downWalker.walkDownUses(ofValue: allocRef, path: SmallProjectionPath()) == .continueWalk {\n return downWalker.result\n }\n return nil\n}\n\nprivate struct FindReleaseWalker : ValueDefUseWalker {\n private let release: RefCountingInst\n private(set) var result: SmallProjectionPath? = nil\n\n var walkDownCache = WalkerCache<SmallProjectionPath>()\n\n init(release: RefCountingInst) {\n self.release = release\n }\n\n mutating func leafUse(value: Operand, path: SmallProjectionPath) -> WalkResult {\n if value.instruction == release {\n if let existingResult = result {\n result = existingResult.merge(with: path)\n } else {\n result = path\n }\n }\n return .continueWalk\n }\n}\n\nprivate extension AllocRefInstBase {\n func escapes(to instruction: Instruction, _ context: FunctionPassContext) -> Bool {\n return self.isEscaping(using: EscapesToInstructionVisitor(target: instruction), context)\n }\n}\n\nprivate struct EscapesToInstructionVisitor : EscapeVisitor {\n let target: Instruction\n\n mutating func visitUse(operand: Operand, path: EscapePath) -> UseResult {\n if operand.instruction == target {\n return .abort\n }\n return .continueWalk\n }\n}\n\n// Up-walker to find the root of a release instruction.\nprivate struct FindAllocationWalker : ValueUseDefWalker {\n private let allocInst: AllocRefInstBase\n\n var walkUpCache = WalkerCache<SmallProjectionPath>()\n\n init(allocation: AllocRefInstBase) { allocInst = allocation }\n\n mutating func rootDef(value: Value, path: SmallProjectionPath) -> WalkResult {\n return value == allocInst && path.isEmpty ? .continueWalk : .abortWalk\n }\n}\n\nprivate extension Type {\n func containsSingleReference(in function: Function) -> Bool {\n if isClass {\n return true\n }\n if isStruct {\n return getNominalFields(in: function)?.containsSingleReference(in: function) ?? false\n } else if isTuple {\n return tupleElements.containsSingleReference(in: function)\n } else {\n return false\n }\n }\n}\n\nprivate extension Collection where Element == Type {\n func containsSingleReference(in function: Function) -> Bool {\n var nonTrivialFieldFound = false\n for elementTy in self {\n if !elementTy.isTrivial(in: function) {\n if nonTrivialFieldFound {\n return false\n }\n if !elementTy.containsSingleReference(in: function) {\n return false\n }\n nonTrivialFieldFound = true\n }\n }\n return nonTrivialFieldFound\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_ReleaseDevirtualizer.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_ReleaseDevirtualizer.swift
Swift
7,409
0.95
0.191964
0.21134
react-lib
768
2025-05-20T07:27:56.467858
BSD-3-Clause
false
ade470c8407b156f1c72bc841ed2f2c8
//===--- SimplificationPasses.swift ----------------------------------------==//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\n\n//===--------------------------------------------------------------------===//\n// Instruction protocols\n//===--------------------------------------------------------------------===//\n\n/// Instructions which can be simplified at all optimization levels\nprotocol Simplifiable : Instruction {\n func simplify(_ context: SimplifyContext)\n}\n\n/// Instructions which can be simplified at -Onone\nprotocol OnoneSimplifiable : Simplifiable {\n}\n\n/// Instructions which can only be simplified at the end of the -Onone pipeline\nprotocol LateOnoneSimplifiable : Instruction {\n func simplifyLate(_ context: SimplifyContext)\n}\n\n//===--------------------------------------------------------------------===//\n// Simplification passes\n//===--------------------------------------------------------------------===//\n\nlet ononeSimplificationPass = FunctionPass(name: "onone-simplification") {\n (function: Function, context: FunctionPassContext) in\n\n runSimplification(on: function, context, preserveDebugInfo: true) {\n if let i = $0 as? OnoneSimplifiable {\n i.simplify($1)\n }\n }\n}\n\nlet simplificationPass = FunctionPass(name: "simplification") {\n (function: Function, context: FunctionPassContext) in\n\n runSimplification(on: function, context, preserveDebugInfo: false) {\n if let i = $0 as? Simplifiable {\n i.simplify($1)\n }\n }\n}\n\nlet lateOnoneSimplificationPass = FunctionPass(name: "late-onone-simplification") {\n (function: Function, context: FunctionPassContext) in\n\n runSimplification(on: function, context, preserveDebugInfo: true) {\n if let i = $0 as? LateOnoneSimplifiable {\n i.simplifyLate($1)\n } else if let i = $0 as? OnoneSimplifiable {\n i.simplify($1)\n }\n }\n}\n\n//===--------------------------------------------------------------------===//\n// Pass implementation\n//===--------------------------------------------------------------------===//\n\n@discardableResult\nfunc runSimplification(on function: Function, _ context: FunctionPassContext,\n preserveDebugInfo: Bool,\n _ simplify: (Instruction, SimplifyContext) -> ()) -> Bool {\n var worklist = InstructionWorklist(context)\n defer { worklist.deinitialize() }\n\n var changed = false\n let simplifyCtxt = context.createSimplifyContext(preserveDebugInfo: preserveDebugInfo,\n notifyInstructionChanged: {\n worklist.pushIfNotVisited($0)\n changed = true\n })\n\n // Push in reverse order so that popping from the tail of the worklist visits instruction in forward order again.\n worklist.pushIfNotVisited(contentsOf: function.reversedInstructions)\n\n // Run multiple iterations because cleanupDeadCode can add new candidates to the worklist.\n repeat {\n\n // The core worklist-loop.\n while let instruction = worklist.popAndForget() {\n if instruction.isDeleted {\n continue\n }\n if !context.options.enableSimplification(for: instruction) {\n continue\n }\n if !context.continueWithNextSubpassRun(for: instruction) {\n return changed\n }\n simplify(instruction, simplifyCtxt)\n }\n\n cleanupDeadInstructions(in: function, preserveDebugInfo, context)\n cleanupDeadBlocks(in: function, pushNewCandidatesTo: &worklist, context)\n\n } while !worklist.isEmpty\n\n if context.needFixStackNesting {\n function.fixStackNesting(context)\n }\n \n return changed\n}\n\nprivate func cleanupDeadInstructions(in function: Function,\n _ preserveDebugInfo: Bool,\n _ context: FunctionPassContext) {\n if preserveDebugInfo {\n context.removeTriviallyDeadInstructionsPreservingDebugInfo(in: function)\n } else {\n context.removeTriviallyDeadInstructionsIgnoringDebugUses(in: function)\n }\n}\n\nprivate func cleanupDeadBlocks(in function: Function,\n pushNewCandidatesTo worklist: inout InstructionWorklist,\n _ context: FunctionPassContext) {\n if context.removeDeadBlocks(in: function) {\n // After deleting dead blocks their (still alive) successor blocks may become eligible for block merging.\n // Therefore we re-run simplification for all branch instructions.\n for block in function.blocks.reversed() {\n if let bi = block.terminator as? BranchInst {\n worklist.pushIfNotVisited(bi)\n }\n }\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_SimplificationPasses.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_SimplificationPasses.swift
Swift
4,977
0.95
0.262411
0.239316
vue-tools
427
2023-12-12T08:48:15.986761
MIT
false
990b0b0ab4f6c32cdda828a57414fed2
//===--- StackPromotion.swift - Stack promotion optimization --------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2022 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport AST\nimport SIL\n\n/// Promotes heap allocated objects to the stack.\n///\n/// It handles `alloc_ref` and `alloc_ref_dynamic` instructions of native swift\n/// classes: if promoted, the `[stack]` attribute is set in the allocation\n/// instruction and a `dealloc_stack_ref` is inserted at the end of the object's\n/// lifetime.\n\n/// The main criteria for stack promotion is that the allocated object must not\n/// escape its function.\n///\n/// Example:\n/// %k = alloc_ref $Klass\n/// // .. some uses of %k\n/// destroy_value %k // The end of %k's lifetime\n///\n/// is transformed to:\n///\n/// %k = alloc_ref [stack] $Klass\n/// // .. some uses of %k\n/// destroy_value %k\n/// dealloc_stack_ref %k\n///\n/// The destroy/release of the promoted object remains in the SIL, but is effectively\n/// a no-op, because a stack promoted object is initialized with an "immortal"\n/// reference count.\n/// Later optimizations can clean that up.\nlet stackPromotion = FunctionPass(name: "stack-promotion") {\n (function: Function, context: FunctionPassContext) in\n \n let deadEndBlocks = context.deadEndBlocks\n\n var needFixStackNesting = false\n for inst in function.instructions {\n if let allocRef = inst as? AllocRefInstBase {\n if !context.continueWithNextSubpassRun(for: allocRef) {\n break\n }\n if tryPromoteAlloc(allocRef, deadEndBlocks, context) {\n needFixStackNesting = true\n }\n }\n }\n if needFixStackNesting {\n // Make sure that all stack allocating instructions are nested correctly.\n function.fixStackNesting(context)\n }\n}\n\n// Returns true if the allocation is promoted.\nprivate func tryPromoteAlloc(_ allocRef: AllocRefInstBase,\n _ deadEndBlocks: DeadEndBlocksAnalysis,\n _ context: FunctionPassContext) -> Bool {\n if allocRef.isObjC || allocRef.canAllocOnStack {\n return false\n }\n\n // Usually resilient classes cannot be promoted anyway, because their initializers are\n // not visible and let the object appear to escape.\n if allocRef.type.nominal!.isResilient(in: allocRef.parentFunction) {\n return false\n }\n\n if let dtor = (allocRef.type.nominal as? ClassDecl)?.destructor {\n if dtor.isIsolated {\n // Classes (including actors) with isolated deinit can escape implicitly.\n //\n // We could optimize this further and allow promotion if we can prove that\n // deinit will take fast path (i.e. it will not schedule a job).\n // But for now, let's keep things simple and disable promotion conservatively.\n return false\n }\n }\n\n // The most important check: does the object escape the current function?\n if allocRef.isEscaping(context) {\n return false\n }\n\n if deadEndBlocks.isDeadEnd(allocRef.parentBlock) {\n\n // Allocations inside a code region which ends up in a no-return block may missing their\n // final release. Therefore we extend their lifetime indefinitely, e.g.\n //\n // %k = alloc_ref $Klass\n // ...\n // unreachable // The end of %k's lifetime\n //\n // There is one exception: if it's in a loop (within the dead-end region) we must not\n // extend its lifetime. In this case we can be sure that its final release is not\n // missing, because otherwise the object would be leaking. For example:\n //\n // bb1:\n // %k = alloc_ref $Klass\n // ... // %k's lifetime must end somewhere here\n // cond_br %c, bb1, bb2\n // bb2:\n // unreachable\n //\n // Therefore, if the allocation is inside a loop, we can treat it like allocations in\n // non dead-end regions.\n if !isInLoop(block: allocRef.parentBlock, context) {\n allocRef.setIsStackAllocatable(context)\n return true\n }\n }\n\n // Try to find the top most dominator block which dominates all use points.\n // * This block can be located "earlier" than the actual allocation block, in case the\n // promoted object is stored into an "outer" object, e.g.\n //\n // bb0: // outerDominatingBlock _\n // %o = alloc_ref $Outer |\n // ... |\n // bb1: // allocation block _ |\n // %k = alloc_ref $Klass | | "outer"\n // %f = ref_element_addr %o, #Outer.f | "inner" | liverange\n // store %k to %f | liverange |\n // ... | |\n // destroy_value %o _| _|\n //\n // * Finding the `outerDominatingBlock` is not guaranteed to work.\n // In this example, the top most dominator block is `bb0`, but `bb0` has no\n // use points in the outer liverange. We'll get `bb3` as outerDominatingBlock.\n // This is no problem because 1. it's an unusual case and 2. the `outerBlockRange`\n // is invalid in this case and we'll bail later.\n //\n // bb0: // real top most dominating block\n // cond_br %c, bb1, bb2\n // bb1:\n // %o1 = alloc_ref $Outer\n // br bb3(%o1)\n // bb2:\n // %o2 = alloc_ref $Outer\n // br bb3(%o1)\n // bb3(%o): // resulting outerDominatingBlock: wrong!\n // %k = alloc_ref $Klass\n // %f = ref_element_addr %o, #Outer.f\n // store %k to %f\n // destroy_value %o\n //\n let domTree = context.dominatorTree\n let outerDominatingBlock = getDominatingBlockOfAllUsePoints(context: context, allocRef, domTree: domTree)\n\n // The "inner" liverange contains all use points which are dominated by the allocation block.\n // Note that this `visit` cannot fail because otherwise our initial `isEscaping` check would have failed already.\n var innerRange = allocRef.visit(using: ComputeInnerLiverange(of: allocRef, domTree, context), context)!\n defer { innerRange.deinitialize() }\n\n // The "outer" liverange contains all use points.\n // Same here: this `visit` cannot fail.\n var outerBlockRange = allocRef.visit(using: ComputeOuterBlockrange(dominatedBy: outerDominatingBlock, context), context)!\n defer { outerBlockRange.deinitialize() }\n\n assert(innerRange.blockRange.isValid, "inner range should be valid because we did a dominance check")\n\n if !outerBlockRange.isValid {\n // This happens if we fail to find a correct outerDominatingBlock.\n return false\n }\n\n // Check if there is a control flow edge from the inner to the outer liverange, which\n // would mean that the promoted object can escape to the outer liverange.\n // This can e.g. be the case if the inner liverange does not post dominate the outer range:\n // _\n // %o = alloc_ref $Outer |\n // cond_br %c, bb1, bb2 |\n // bb1: _ |\n // %k = alloc_ref $Klass | | outer\n // %f = ref_element_addr %o, #Outer.f | inner | range\n // store %k to %f | range |\n // br bb2 // branch from inner to outer _| |\n // bb2: |\n // destroy_value %o _|\n //\n // Or if there is a loop with a back-edge from the inner to the outer range:\n // _\n // %o = alloc_ref $Outer |\n // br bb1 |\n // bb1: _ |\n // %k = alloc_ref $Klass | | outer\n // %f = ref_element_addr %o, #Outer.f | inner | range\n // store %k to %f | range |\n // cond_br %c, bb1, bb2 // inner -> outer _| |\n // bb2: |\n // destroy_value %o _|\n //\n if innerRange.blockRange.hasControlFlowEdge(to: outerBlockRange) {\n return false\n }\n\n // There shouldn't be any critical exit edges from the liverange, because that would mean\n // that the promoted allocation is leaking.\n // Just to be on the safe side, do a check and bail if we find critical exit edges: we\n // cannot insert instructions on critical edges.\n if innerRange.blockRange.containsCriticalExitEdges(deadEndBlocks: deadEndBlocks) {\n return false\n }\n\n // Do the transformation!\n // Insert `dealloc_stack_ref` instructions at the exit- and end-points of the inner liverange.\n for exitInst in innerRange.exits {\n if !deadEndBlocks.isDeadEnd(exitInst.parentBlock) {\n let builder = Builder(before: exitInst, context)\n builder.createDeallocStackRef(allocRef)\n }\n }\n\n for endInst in innerRange.ends {\n Builder.insert(after: endInst, location: allocRef.location, context) {\n (builder) in builder.createDeallocStackRef(allocRef)\n }\n }\n\n allocRef.setIsStackAllocatable(context)\n return true\n}\n\nprivate func getDominatingBlockOfAllUsePoints(context: FunctionPassContext,\n _ value: SingleValueInstruction,\n domTree: DominatorTree) -> BasicBlock {\n struct FindDominatingBlock : EscapeVisitorWithResult {\n var result: BasicBlock\n let domTree: DominatorTree\n mutating func visitUse(operand: Operand, path: EscapePath) -> UseResult {\n let defBlock = operand.value.parentBlock\n if defBlock.dominates(result, domTree) {\n result = defBlock\n }\n return .continueWalk\n }\n }\n \n return value.visit(using: FindDominatingBlock(result: value.parentBlock, domTree: domTree), context)!\n}\n\nprivate struct ComputeInnerLiverange : EscapeVisitorWithResult {\n var result: InstructionRange\n let domTree: DominatorTree\n\n init(of instruction: Instruction, _ domTree: DominatorTree, _ context: FunctionPassContext) {\n result = InstructionRange(begin: instruction, context)\n self.domTree = domTree\n }\n\n mutating func visitUse(operand: Operand, path: EscapePath) -> UseResult {\n let user = operand.instruction\n let beginBlockOfRange = result.blockRange.begin\n if beginBlockOfRange.dominates(user.parentBlock, domTree) {\n result.insert(user)\n }\n return .continueWalk\n }\n}\n\nprivate struct ComputeOuterBlockrange : EscapeVisitorWithResult {\n var result: BasicBlockRange\n\n init(dominatedBy: BasicBlock, _ context: FunctionPassContext) {\n result = BasicBlockRange(begin: dominatedBy, context)\n }\n\n mutating func visitUse(operand: Operand, path: EscapePath) -> UseResult {\n let user = operand.instruction\n result.insert(user.parentBlock)\n\n let value = operand.value\n let operandsDefinitionBlock = value.parentBlock\n\n // Also insert the operand's definition. Otherwise we would miss allocation\n // instructions (for which the `visitUse` closure is not called).\n result.insert(operandsDefinitionBlock)\n\n // We need to explicitly add predecessor blocks of phis because they\n // are not necesesarily visited during the down-walk in `isEscaping()`.\n // This is important for the special case where there is a back-edge from the\n // inner range to the inner rage's begin-block:\n //\n // bb0: // <- need to be in the outer range\n // br bb1(%some_init_val)\n // bb1(%arg):\n // %k = alloc_ref $Klass // innerInstRange.begin\n // cond_br bb2, bb1(%k) // back-edge to bb1 == innerInstRange.blockRange.begin\n //\n if let phi = Phi(value) {\n result.insert(contentsOf: phi.predecessors)\n }\n return .continueWalk\n }\n}\n\nprivate extension BasicBlockRange {\n /// Returns true if there is a direct edge connecting this range with the `otherRange`.\n func hasControlFlowEdge(to otherRange: BasicBlockRange) -> Bool {\n func isOnlyInOtherRange(_ block: BasicBlock) -> Bool {\n return !inclusiveRangeContains(block) && otherRange.inclusiveRangeContains(block)\n }\n\n for lifeBlock in inclusiveRange {\n assert(otherRange.inclusiveRangeContains(lifeBlock), "range must be a subset of other range")\n for succ in lifeBlock.successors {\n if isOnlyInOtherRange(succ) && succ != otherRange.begin {\n return true\n }\n // The entry of the begin-block is conceptually not part of the range. We can check if\n // it's part of the `otherRange` by checking the begin-block's predecessors.\n if succ == begin && begin.predecessors.contains(where: { isOnlyInOtherRange($0) }) {\n return true\n }\n }\n }\n return false\n }\n\n func containsCriticalExitEdges(deadEndBlocks: DeadEndBlocksAnalysis) -> Bool {\n exits.contains { !deadEndBlocks.isDeadEnd($0) && !$0.hasSinglePredecessor }\n }\n}\n\nprivate func isInLoop(block startBlock: BasicBlock, _ context: FunctionPassContext) -> Bool {\n var worklist = BasicBlockWorklist(context)\n defer { worklist.deinitialize() }\n\n worklist.pushIfNotVisited(contentsOf: startBlock.successors)\n while let block = worklist.pop() {\n if block == startBlock {\n return true\n }\n worklist.pushIfNotVisited(contentsOf: block.successors)\n }\n return false\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_StackPromotion.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_StackPromotion.swift
Swift
13,892
0.95
0.147399
0.495146
python-kit
206
2024-06-10T15:29:58.393901
Apache-2.0
false
9cf2e79688b14fc4397470716d90b2fd
//===--- StripObjectHeaders.swift ------------------------------------------==//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\n\n/// Sets the `[bare]` attribute for `alloc_ref` and `global_value` instructions\n/// if their header (reference count and metatype) is not used throughout the\n/// lifetime of the object.\n///\nlet stripObjectHeadersPass = FunctionPass(name: "strip-object-headers") {\n (function: Function, context: FunctionPassContext) in\n\n for inst in function.instructions {\n switch inst {\n case let gv as GlobalValueInst:\n if !gv.isBare && !gv.needObjectHeader() {\n gv.setIsBare(context)\n }\n case let ar as AllocRefInst:\n if !ar.isBare && !ar.needObjectHeader() {\n ar.setIsBare(context)\n }\n default:\n break\n }\n }\n}\n\nprivate extension Value {\n func needObjectHeader() -> Bool {\n var walker = IsBareObjectWalker(rootDef: self)\n return walker.walkDownUses(ofValue: self, path: SmallProjectionPath()) == .abortWalk\n }\n}\n\nprivate struct IsBareObjectWalker : ValueDefUseWalker, ValueUseDefWalker {\n var walkUpCache = WalkerCache<SmallProjectionPath>()\n var walkDownCache = WalkerCache<SmallProjectionPath>()\n let rootDef: Value\n\n mutating func walkDown(value operand: Operand, path: Path) -> WalkResult {\n switch operand.instruction {\n // White-list all instructions which don't use the object header.\n case is StructInst, is TupleInst, is EnumInst,\n is StructExtractInst, is TupleExtractInst, is UncheckedEnumDataInst,\n is DestructureStructInst, is DestructureTupleInst,\n is BeginBorrowInst, is MarkDependenceInst,\n is BranchInst, is CondBranchInst, is SwitchEnumInst,\n is UpcastInst, is UncheckedRefCastInst,\n is BeginDeallocRefInst,\n is EndInitLetRefInst,\n is EndCOWMutationInst:\n return walkDownDefault(value: operand, path: path)\n default:\n return leafUse(value: operand, path: path)\n }\n }\n\n mutating func leafUse(value operand: Operand, path: SmallProjectionPath) -> WalkResult {\n switch operand.instruction {\n // White-list all instructions which don't use the object header.\n case is RefElementAddrInst, is RefTailAddrInst,\n is DeallocStackRefInst,\n is DebugValueInst, is FixLifetimeInst:\n return .continueWalk\n case let deallocRef as DeallocRefInst:\n // Check if the final dealloc_ref comes from the single `rootDef`.\n // In case of phi-arguments it might come from multiple root definitions.\n return walkUp(value: deallocRef.operand.value, path: path)\n default:\n return .abortWalk\n }\n }\n\n mutating func rootDef(value: Value, path: SmallProjectionPath) -> WalkResult {\n return value == rootDef ? .continueWalk : .abortWalk\n }\n\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_StripObjectHeaders.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_FunctionPasses_StripObjectHeaders.swift
Swift
3,172
0.95
0.147727
0.240506
python-kit
172
2024-10-30T01:26:46.599040
Apache-2.0
false
741f484a28665e5d0909fd59a3c781ac
//===--- SimplifyAllocRefDynamic.swift ------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\n\nextension AllocRefDynamicInst : OnoneSimplifiable {\n func simplify(_ context: SimplifyContext) {\n /// Optimize alloc_ref_dynamic of a known type to alloc_ref:\n ///\n /// %3 = metatype SubClass.Type\n /// %4 = upcast %3 : SubClass.Type to BaseClass.Type\n /// %6 = alloc_ref_dynamic [...] %4 : BaseClass.Type, $BaseClass\n /// %8 = (... some use of ...) %6 : $BaseClass\n /// ->\n /// %6 = alloc_ref [...] $SubClass\n /// %7 = upcast %6 : $SubClass to $BaseClass\n /// %8 = (... some use of ...) %7 : $BaseClass\n\n let type: Type\n let emitUpcast: Bool\n if let metatypeInst = metatypeOperand.value as? MetatypeInst {\n type = metatypeInst.type.loweredInstanceTypeOfMetatype(in: parentFunction)\n emitUpcast = false\n } else if let upcastInst = metatypeOperand.value as? UpcastInst,\n let metatypeInst = upcastInst.operands[0].value as? MetatypeInst {\n type = metatypeInst.type.loweredInstanceTypeOfMetatype(in: parentFunction)\n emitUpcast = true\n } else {\n return\n }\n\n let builder = Builder(before: self, context)\n let newAlloc = builder.createAllocRef(type, isObjC: self.isObjC, canAllocOnStack: self.canAllocOnStack, isBare: false,\n tailAllocatedTypes: self.tailAllocatedTypes, tailAllocatedCounts: Array(self.tailAllocatedCounts.values))\n \n let result: Value\n if emitUpcast {\n result = builder.createUpcast(from: newAlloc, to: self.type)\n } else {\n result = newAlloc\n }\n uses.replaceAll(with: result, context)\n context.erase(instruction: self)\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_InstructionSimplification_SimplifyAllocRefDynamic.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_InstructionSimplification_SimplifyAllocRefDynamic.swift
Swift
2,107
0.95
0.092593
0.428571
python-kit
92
2024-05-20T00:01:40.703445
GPL-3.0
false
926843e15147080b7397aed79fd5d038
//===--- SimplifyAllocStack.swift -----------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2025 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\nimport AST\n\nextension AllocStackInst : Simplifiable, SILCombineSimplifiable {\n func simplify(_ context: SimplifyContext) {\n if optimizeEnum(context) {\n return\n }\n _ = optimizeExistential(context)\n }\n}\n\nprivate extension AllocStackInst {\n /// Replaces an alloc_stack of an enum by an alloc_stack of the payload if only one enum case (with payload)\n /// is stored to that location.\n ///\n /// For example:\n /// ```\n /// %0 = alloc_stack $Optional<T>\n /// %1 = init_enum_data_addr %loc\n /// store %2 to %1\n /// ...\n /// %3 = unchecked_take_enum_data_addr %0\n /// %4 = load %3\n /// ```\n /// is transformed to\n /// ```\n /// %0 = alloc_stack $T\n /// store %2 to %0\n /// ...\n /// %4 = load %0\n /// ```\n func optimizeEnum(_ context: SimplifyContext) -> Bool {\n guard let (payloadType, isSingleInitTakePair) = getEnumInfo() else {\n return false\n }\n\n let builder = Builder(before: self, context)\n let newAlloc = builder.createAllocStack(payloadType,\n hasDynamicLifetime: hasDynamicLifetime,\n isLexical: isLexical,\n isFromVarDecl: isFromVarDecl,\n usesMoveableValueDebugInfo: usesMoveableValueDebugInfo)\n let oldAllocType = type\n if let varInfo = debugVariable {\n builder.createDebugValue(value: Undef.get(type: oldAllocType, context), debugVariable: varInfo)\n }\n self.replace(with: newAlloc, context)\n\n for use in newAlloc.uses {\n switch use.instruction {\n case let iea as InjectEnumAddrInst:\n context.erase(instruction: iea)\n case let da as DestroyAddrInst:\n if isSingleInitTakePair {\n // It's not possible that the enum has a payload at the destroy_addr, because it must have already\n // been taken by the take of the single init-take pair.\n // We _have_ to remove the destroy_addr, because we also remove all inject_enum_addrs which might\n // inject a payload-less case before the destroy_addr.\n // Otherwise the enum payload can still be valid at the destroy_addr, so we have to keep the destroy_addr.\n // Just replace the enum with the payload (and because it's not a singleInitTakePair, we can be sure\n // that the enum cannot have any other case than the payload case).\n context.erase(instruction: da)\n }\n case let ieda as InitEnumDataAddrInst:\n ieda.replace(with: newAlloc, context)\n case let uteda as UncheckedTakeEnumDataAddrInst:\n uteda.replace(with: newAlloc, context)\n case is DeallocStackInst:\n break\n case let dv as DebugValueInst:\n // TODO: Add support for op_enum_fragment\n dv.operand.set(to: Undef.get(type: oldAllocType, context), context)\n default:\n fatalError("unexpected alloc_stack user");\n }\n }\n return true\n }\n\n func getEnumInfo() -> (payloadType: Type, isSingleInitTakePair: Bool)? {\n if !type.isEnum {\n return nil\n }\n var numInits = 0\n var numTakes = 0\n var initBlock: BasicBlock? = nil\n var takeBlock: BasicBlock? = nil\n var caseIndex: Int? = nil\n var payloadType: Type? = nil\n for use in uses {\n switch use.instruction {\n case is DestroyAddrInst,\n is DeallocStackInst,\n is DebugValueInst,\n // We'll check init_enum_addr below.\n is InjectEnumAddrInst:\n break\n case let ieda as InitEnumDataAddrInst:\n if let previouslyFoundCase = caseIndex, previouslyFoundCase != ieda.caseIndex {\n return nil\n }\n caseIndex = ieda.caseIndex\n assert(payloadType == nil || payloadType! == ieda.type)\n payloadType = ieda.type\n numInits += 1\n initBlock = ieda.parentBlock\n case let uted as UncheckedTakeEnumDataAddrInst:\n if let previouslyFoundCase = caseIndex, previouslyFoundCase != uted.caseIndex {\n return nil\n }\n caseIndex = uted.caseIndex\n numTakes += 1\n takeBlock = uted.parentBlock\n default:\n return nil\n }\n }\n\n guard let caseIndex, let payloadType else {\n return nil\n }\n\n // If the enum has a single init-take pair in a single block, we know that the enum cannot contain any\n // valid payload outside that init-take pair.\n //\n // This also means that we can ignore any inject_enum_addr of another enum case, because this can only\n // inject a case without a payload.\n if numInits == 1 && numTakes == 1 && initBlock == takeBlock {\n return (payloadType, isSingleInitTakePair: true)\n }\n // No single init-take pair: We cannot ignore inject_enum_addrs with a mismatching case.\n if uses.users(ofType: InjectEnumAddrInst.self).contains(where: { $0.caseIndex != caseIndex}) {\n return nil\n }\n return (payloadType, isSingleInitTakePair: false)\n }\n\n /// Replaces an alloc_stack of an existential by an alloc_stack of the concrete type.\n ///\n /// For example:\n /// ```\n /// %0 = alloc_stack $any P\n /// %1 = init_existential_addr %0, $T\n /// use %1\n /// ```\n /// is transformed to\n /// ```\n /// %0 = alloc_stack $T\n /// use %0\n /// ```\n ///\n /// Also, if the alloc_stack is already an opened existential and the concrete type is known,\n /// replace it as well:\n /// ```\n /// %0 = metatype $@thick T.Type\n /// %1 = init_existential_metatype %0, $@thick any P.Type\n /// %2 = open_existential_metatype %1 : $@thick any P.Type to $@thick (@opened("X", P) Self).Type\n /// ...\n /// %3 = alloc_stack $@opened("X", any P) Self\n /// use %3\n /// ```\n /// is transformed to\n /// ```\n /// ...\n /// %3 = alloc_stack $T\n /// use %3\n /// ```\n func optimizeExistential(_ context: SimplifyContext) -> Bool {\n guard type.isExistential || type.isExistentialArchetype,\n let concreteFormalType = getConcreteTypeOfExistential()\n else {\n return false\n }\n\n let builder = Builder(before: self, context)\n let newAlloc = builder.createAllocStack(concreteFormalType.loweredType(in: parentFunction),\n hasDynamicLifetime: hasDynamicLifetime,\n isLexical: isLexical,\n isFromVarDecl: isFromVarDecl,\n usesMoveableValueDebugInfo: usesMoveableValueDebugInfo) \n for use in uses {\n switch use.instruction {\n case let dea as DeinitExistentialAddrInst:\n context.erase(instruction: dea)\n case let iea as InitExistentialAddrInst:\n if iea.type != newAlloc.type {\n // We need a cast if the concrete type of the init_existential_addr is itself an opened existential\n // for which we know the concrete type (which is differnt).\n let builder = Builder(before: iea, context)\n let addrCast = builder.createUncheckedAddrCast(from: newAlloc, to: iea.type)\n iea.replace(with: addrCast, context)\n } else {\n iea.replace(with: newAlloc, context)\n }\n case let oea as OpenExistentialAddrInst:\n assert(oea.uses.ignoreUsers(ofType: DestroyAddrInst.self).isEmpty)\n oea.replace(with: newAlloc, context)\n case let cab as CheckedCastAddrBranchInst:\n let builder = Builder(before: cab, context)\n builder.createCheckedCastAddrBranch(\n source: newAlloc, sourceFormalType: concreteFormalType,\n destination: cab.destination, targetFormalType: cab.targetFormalType,\n isolatedConformances: cab.isolatedConformances,\n consumptionKind: cab.consumptionKind,\n successBlock: cab.successBlock, failureBlock: cab.failureBlock)\n context.erase(instruction: cab)\n case let ucca as UnconditionalCheckedCastAddrInst:\n let builder = Builder(before: ucca, context)\n builder.createUnconditionalCheckedCastAddr(\n isolatedConformances: ucca.isolatedConformances,\n source: newAlloc, sourceFormalType: concreteFormalType,\n destination: ucca.destination, targetFormalType: ucca.targetFormalType)\n context.erase(instruction: ucca)\n default:\n use.set(to: newAlloc, context)\n }\n }\n context.erase(instruction: self)\n return true\n }\n\n // Returns the concrete type of this alloc_stack if known.\n // Assuming that its type is either an existential or an opened existential.\n private func getConcreteTypeOfExistential() -> CanonicalType? {\n var initExistential: InitExistentialAddrInst? = nil\n var requiresLegalFormalType = false\n\n for use in uses {\n switch use.instruction {\n case is DestroyAddrInst,\n is DeinitExistentialAddrInst,\n is DeallocStackInst,\n is DebugValueInst:\n break\n case let oea as OpenExistentialAddrInst:\n if !oea.uses.ignoreUsers(ofType: DestroyAddrInst.self).isEmpty {\n return nil\n }\n case let iea as InitExistentialAddrInst:\n if initExistential != nil {\n return nil\n }\n initExistential = iea\n case is CheckedCastAddrBranchInst, is UnconditionalCheckedCastAddrInst:\n // To construct a new cast instruction we need a formal type.\n requiresLegalFormalType = true\n if use != use.instruction.operands[0] {\n return nil\n }\n case is UncheckedAddrCastInst:\n if self.type.isExistential {\n // Bail if the address of the original existential escapes.\n // This is not a problem if the alloc_stack already contains the opened existential.\n return nil\n }\n default:\n return nil\n }\n }\n let concreteType: CanonicalType\n if let initExistential {\n assert(self.type.isExistential)\n if let cft = initExistential.concreteTypeOfDependentExistentialArchetype {\n // Case 1: We will replace the alloc_stack of an existential with the concrete type.\n // `alloc_stack $any P` -> `alloc_stack $ConcreteType`\n concreteType = cft\n } else {\n // The instruction or argument which defines the archetype must dominate the alloc_stack\n // because after the transformation, the alloc_stack will use the archetype.\n for typeDependentOp in initExistential.typeDependentOperands {\n if !typeDependentOp.value.triviallyDominates(self) {\n return nil\n }\n }\n // Case 2: We will replace the alloc_stack of an existential with the existential archetype.\n // `alloc_stack $any P` -> `alloc_stack $@opened("...")`\n concreteType = initExistential.type.canonicalType\n }\n } else if self.type.isExistentialArchetype, let cft = self.concreteTypeOfDependentExistentialArchetype {\n // Case 3: We will replace the alloc_stack of an existential archetype with the concrete type:\n // `alloc_stack $@opened("...")` -> `alloc_stack $ConcreteType`\n concreteType = cft\n } else {\n return nil\n }\n if requiresLegalFormalType && !concreteType.isLegalFormalType {\n return nil\n }\n return concreteType\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_InstructionSimplification_SimplifyAllocStack.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_InstructionSimplification_SimplifyAllocStack.swift
Swift
11,768
0.95
0.121711
0.308219
awesome-app
595
2024-03-28T17:57:39.999844
BSD-3-Clause
false
47b45e8e17d4ce70badfc03167ef5079
//===--- SimplifyApply.swift ----------------------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\nimport AST\n\nextension ApplyInst : OnoneSimplifiable, SILCombineSimplifiable {\n func simplify(_ context: SimplifyContext) {\n if tryTransformThickToThinCallee(of: self, context) {\n return\n }\n if context.tryOptimizeKeypath(apply: self) {\n context.erase(instruction: self)\n return\n }\n if context.tryDevirtualize(apply: self, isMandatory: false) != nil {\n return\n }\n if tryRemoveArrayCast(apply: self, context) {\n return\n }\n if !context.preserveDebugInfo {\n _ = tryReplaceExistentialArchetype(of: self, context)\n }\n }\n}\n\nextension TryApplyInst : OnoneSimplifiable, SILCombineSimplifiable {\n func simplify(_ context: SimplifyContext) {\n if context.tryDevirtualize(apply: self, isMandatory: false) != nil {\n return\n }\n if !context.preserveDebugInfo {\n _ = tryReplaceExistentialArchetype(of: self, context)\n }\n }\n}\n\nextension BeginApplyInst : OnoneSimplifiable {\n func simplify(_ context: SimplifyContext) {\n _ = context.tryDevirtualize(apply: self, isMandatory: false)\n }\n}\n\n/// Optimizes a thick function call if the callee is a `thin_to_thick_function` instruction:\n///\n/// %2 = thin_to_thick_function %1\n/// %3 = apply %2(...) : @callee_guaranteed\n/// ->\n/// %2 = thin_to_thick_function %1\n/// %3 = apply %1(...): @convention(thin)\n///\nprivate func tryTransformThickToThinCallee(of apply: ApplyInst, _ context: SimplifyContext) -> Bool {\n if let tttf = apply.callee as? ThinToThickFunctionInst,\n !apply.callee.type.isCalleeConsumedFunction\n {\n let builder = Builder(before: apply, context)\n let newApply = builder.createApply(function: tttf.operand.value,\n apply.substitutionMap,\n arguments: Array(apply.arguments),\n isNonThrowing: apply.isNonThrowing,\n isNonAsync: apply.isNonAsync,\n specializationInfo: apply.specializationInfo)\n apply.replace(with: newApply, context)\n return true\n }\n return false\n}\n\n/// Removes casts between arrays of the same type.\n///\n/// %1 = function_ref @_arrayConditionalCast : (@guaranteed Array<Int>) -> @owned Optional<Array<Int>>\n/// %2 = apply %1(%0) : (@guaranteed Array<Int>) -> @owned Optional<Array<Int>>\n/// ->\n/// %1 = copy_value %0\n/// %2 = enum $Optional<Array<Int>>, #Optional.some!enumelt, %1\n///\nprivate func tryRemoveArrayCast(apply: ApplyInst, _ context: SimplifyContext) -> Bool {\n guard let callee = apply.referencedFunction,\n callee.hasSemanticsAttribute("array.conditional_cast"),\n apply.parentFunction.hasOwnership,\n\n // Check if the cast function has the expected calling convention\n apply.arguments.count == 1,\n apply.convention(of: apply.argumentOperands[0]) == .directGuaranteed,\n apply.functionConvention.results[0].convention == .owned,\n apply.type.isOptional,\n\n // Check if the source and target type of the cast is identical.\n // Note that we are checking the _formal_ element types and not the lowered types, because\n // the element types are replacement type in the Array's substitution map and this is a formal type.\n apply.arguments[0].type == apply.type.optionalPayloadType(in: apply.parentFunction)\n else {\n return false\n }\n\n let builder = Builder(after: apply, context)\n let copiedArray = builder.createCopyValue(operand: apply.arguments[0])\n let optional = builder.createEnum(caseIndex: 1, payload: copiedArray, enumType: apply.type)\n apply.replace(with: optional, context)\n return true\n}\n\n/// If the apply uses an existential archetype (`@opened("...")`) and the concrete type is known,\n/// replace the existential archetype with the concrete type\n/// 1. in the apply's substitution map\n/// 2. in the arguments, e.g. by inserting address casts\n/// For example:\n/// ```\n/// %5 = apply %1<@opend("...")>(%2) : <τ_0_0> (τ_0_0) -> ()\n/// ```\n/// ->\n/// ```\n/// %4 = unchecked_addr_cast %2 to $*ConcreteType\n/// %5 = apply %1<ConcreteType>(%4) : <τ_0_0> (τ_0_0) -> ()\n/// ```\nprivate func tryReplaceExistentialArchetype(of apply: ApplyInst, _ context: SimplifyContext) -> Bool {\n if let concreteType = apply.concreteTypeOfDependentExistentialArchetype,\n apply.canReplaceExistentialArchetype()\n {\n let builder = Builder(after: apply, context)\n\n let newApply = builder.createApply(\n function: apply.callee,\n apply.replaceOpenedArchetypeInSubstituations(withConcreteType: concreteType, context),\n arguments: apply.replaceExistentialArchetypeInArguments(withConcreteType: concreteType, context),\n isNonThrowing: apply.isNonThrowing, isNonAsync: apply.isNonAsync,\n specializationInfo: apply.specializationInfo)\n apply.replace(with: newApply, context)\n\n return true\n }\n return false\n}\n\n// The same as the previous function, just for try_apply instructions.\nprivate func tryReplaceExistentialArchetype(of tryApply: TryApplyInst, _ context: SimplifyContext) -> Bool {\n if let concreteType = tryApply.concreteTypeOfDependentExistentialArchetype,\n tryApply.canReplaceExistentialArchetype()\n {\n let builder = Builder(before: tryApply, context)\n\n builder.createTryApply(\n function: tryApply.callee,\n tryApply.replaceOpenedArchetypeInSubstituations(withConcreteType: concreteType, context),\n arguments: tryApply.replaceExistentialArchetypeInArguments(withConcreteType: concreteType, context),\n normalBlock: tryApply.normalBlock, errorBlock: tryApply.errorBlock,\n isNonAsync: tryApply.isNonAsync,\n specializationInfo: tryApply.specializationInfo)\n context.erase(instruction: tryApply)\n\n return true\n }\n return false\n}\n\nprivate extension FullApplySite {\n // Precondition: the apply uses only a single existential archetype.\n // This is checked in `concreteTypeOfDependentExistentialArchetype`\n func canReplaceExistentialArchetype() -> Bool {\n // Make sure that existential archetype _is_ a replacement type and not e.g. _contained_ in a\n // replacement type, like\n // apply %1<Array<@opened("...")>()\n guard substitutionMap.replacementTypes.contains(where: { $0.isExistentialArchetype }),\n substitutionMap.replacementTypes.allSatisfy({ $0.isExistentialArchetype || !$0.hasLocalArchetype })\n else {\n return false\n }\n\n // Don't allow existential archetypes in direct results and error results.\n // Note that an opened existential value is address only, so it cannot be a direct result anyway\n // (but it can be once we have opaque values).\n // Also don't support things like direct `Array<@opened("...")>` return values.\n if let singleDirectResult, singleDirectResult.type.hasLocalArchetype {\n return false\n }\n if let singleDirectErrorResult, singleDirectErrorResult.type.hasLocalArchetype {\n return false\n }\n\n return arguments.allSatisfy { value in\n let type = value.type\n // Allow three cases:\n // case 1. the argument _is_ the existential archetype\n return type.isExistentialArchetype ||\n // case 2. the argument _is_ a metatype of the existential archetype\n (type.isMetatype && type.canonicalType.instanceTypeOfMetatype.isExistentialArchetype) ||\n // case 3. the argument has nothing to do with the existential archetype (or any other local archetype)\n !type.hasLocalArchetype\n }\n }\n\n func replaceExistentialArchetypeInArguments(\n withConcreteType concreteType: CanonicalType,\n _ context: SimplifyContext\n ) -> [Value] {\n let newArgs = arguments.map { (arg) -> Value in\n if arg.type.isExistentialArchetype {\n // case 1. the argument _is_ the existential archetype:\n // just insert an address cast to satisfy type equivalence.\n let builder = Builder(before: self, context)\n let concreteSILType = concreteType.loweredType(in: self.parentFunction)\n return builder.createUncheckedAddrCast(from: arg, to: concreteSILType.addressType)\n }\n if arg.type.isMetatype, arg.type.canonicalType.instanceTypeOfMetatype.isExistentialArchetype {\n // case 2. the argument _is_ a metatype of the existential archetype:\n // re-create the metatype with the concrete type.\n let builder = Builder(before: self, context)\n return builder.createMetatype(ofInstanceType: concreteType, representation: arg.type.representationOfMetatype)\n }\n // case 3. the argument has nothing to do with the existential archetype (or any other local archetype)\n return arg\n }\n return Array(newArgs)\n }\n\n func replaceOpenedArchetypeInSubstituations(\n withConcreteType concreteType: CanonicalType,\n _ context: SimplifyContext\n ) -> SubstitutionMap {\n let openedArcheType = substitutionMap.replacementTypes.first(where: { $0.isExistentialArchetype })!\n\n let newReplacementTypes = substitutionMap.replacementTypes.map {\n return $0 == openedArcheType ? concreteType.rawType : $0\n }\n let genSig = callee.type.invocationGenericSignatureOfFunction\n return SubstitutionMap(genericSignature: genSig, replacementTypes: newReplacementTypes)\n }\n}\n\nprivate extension Type {\n func optionalPayloadType(in function: Function) -> Type {\n let subs = contextSubstitutionMap\n return subs.replacementTypes[0].loweredType(in: function)\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_InstructionSimplification_SimplifyApply.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_InstructionSimplification_SimplifyApply.swift
Swift
9,984
0.95
0.114286
0.282511
vue-tools
633
2025-06-06T08:43:45.800294
MIT
false
6ad6428720910ffe76c078ac7bd52647
//===--- SimplifyBeginAndLoadBorrow.swift ---------------------------------===//\n//\n// This source file is part of the Swift.org open source project\n//\n// Copyright (c) 2014 - 2023 Apple Inc. and the Swift project authors\n// Licensed under Apache License v2.0 with Runtime Library Exception\n//\n// See https://swift.org/LICENSE.txt for license information\n// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors\n//\n//===----------------------------------------------------------------------===//\n\nimport SIL\n\nextension BeginBorrowInst : OnoneSimplifiable, SILCombineSimplifiable {\n func simplify(_ context: SimplifyContext) {\n if borrowedValue.ownership == .owned,\n // We need to keep lexical lifetimes in place.\n !isLexical,\n // The same for borrow-scopes which encapsulated pointer escapes.\n !findPointerEscapingUse(of: borrowedValue)\n {\n tryReplaceBorrowWithOwnedOperand(beginBorrow: self, context)\n } else {\n removeBorrowOfThinFunction(beginBorrow: self, context)\n }\n }\n}\n\nextension LoadBorrowInst : Simplifiable, SILCombineSimplifiable {\n func simplify(_ context: SimplifyContext) {\n if uses.ignoreDebugUses.ignoreUsers(ofType: EndBorrowInst.self).isEmpty {\n context.erase(instructionIncludingAllUsers: self)\n return\n }\n\n // If the load_borrow is followed by a copy_value, combine both into a `load [copy]`:\n // ```\n // %1 = load_borrow %0\n // %2 = some_forwarding_instruction %1 // zero or more forwarding instructions\n // %3 = copy_value %2\n // end_borrow %1\n // ```\n // ->\n // ```\n // %1 = load [copy] %0\n // %3 = some_forwarding_instruction %1 // zero or more forwarding instructions\n // ```\n //\n tryCombineWithCopy(context)\n }\n\n private func tryCombineWithCopy(_ context: SimplifyContext) {\n let forwardedValue = lookThroughSingleForwardingUses()\n guard let singleUser = forwardedValue.uses.ignoreUsers(ofType: EndBorrowInst.self).singleUse?.instruction,\n let copy = singleUser as? CopyValueInst,\n copy.parentBlock == self.parentBlock else {\n return\n }\n let builder = Builder(before: self, context)\n let loadCopy = builder.createLoad(fromAddress: address, ownership: .copy)\n let forwardedOwnedValue = replaceGuaranteed(value: self, withOwnedValue: loadCopy, context)\n copy.replace(with: forwardedOwnedValue, context)\n context.erase(instructionIncludingAllUsers: self)\n }\n}\n\nprivate func tryReplaceBorrowWithOwnedOperand(beginBorrow: BeginBorrowInst, _ context: SimplifyContext) {\n // The last value of a (potentially empty) forwarding chain, beginning at the `begin_borrow`.\n let forwardedValue = beginBorrow.lookThroughSingleForwardingUses()\n if forwardedValue.allUsesCanBeConvertedToOwned {\n if tryReplaceCopy(of: forwardedValue, withCopiedOperandOf: beginBorrow, context) {\n return\n }\n if beginBorrow.borrowedValue.isDestroyed(after: beginBorrow) {\n convertAllUsesToOwned(of: beginBorrow, context)\n }\n }\n}\n\nprivate func removeBorrowOfThinFunction(beginBorrow: BeginBorrowInst, _ context: SimplifyContext) {\n guard let thin2thickFn = beginBorrow.borrowedValue as? ThinToThickFunctionInst,\n // For simplicity don't go into the trouble of removing reborrow phi arguments.\n beginBorrow.uses.filterUsers(ofType: BranchInst.self).isEmpty else\n {\n return\n }\n // `thin_to_thick_function` has "none" ownership and is compatible with guaranteed values.\n // Therefore the `begin_borrow` is not needed.\n beginBorrow.uses.ignoreUsers(ofType: EndBorrowInst.self).replaceAll(with: thin2thickFn, context)\n context.erase(instructionIncludingAllUsers: beginBorrow)\n}\n\n/// Replace\n/// ```\n/// %1 = begin_borrow %0\n/// %2 = struct_extract %1 // a chain of forwarding instructions\n/// %3 = copy_value %1\n/// // ... uses of %3\n/// end_borrow %1\n/// ```\n/// with\n/// ```\n/// %1 = copy_value %0\n/// %3 = destructure_struct %0 // owned version of the forwarding instructions\n/// // ... uses of %3\n/// ```\nprivate func tryReplaceCopy(\n of forwardedValue: Value,\n withCopiedOperandOf beginBorrow: BeginBorrowInst,\n _ context: SimplifyContext\n) -> Bool {\n guard let singleUser = forwardedValue.uses.ignoreUsers(ofType: EndBorrowInst.self).singleUse?.instruction,\n let copy = singleUser as? CopyValueInst,\n copy.parentBlock == beginBorrow.parentBlock else {\n return false\n }\n let builder = Builder(before: beginBorrow, context)\n let copiedOperand = builder.createCopyValue(operand: beginBorrow.borrowedValue)\n let forwardedOwnedValue = replaceGuaranteed(value: beginBorrow, withOwnedValue: copiedOperand, context)\n copy.replace(with: forwardedOwnedValue, context)\n context.erase(instructionIncludingAllUsers: beginBorrow)\n return true\n}\n\n/// Replace\n/// ```\n/// %1 = begin_borrow %0\n/// %2 = struct_extract %1 // a chain of forwarding instructions\n/// // ... uses of %2\n/// end_borrow %1\n/// destroy_value %1 // the only other use of %0 beside begin_borrow\n/// ```\n/// with\n/// ```\n/// %2 = destructure_struct %0 // owned version of the forwarding instructions\n/// // ... uses of %2\n/// destroy_value %2\n/// ```\nprivate func convertAllUsesToOwned(of beginBorrow: BeginBorrowInst, _ context: SimplifyContext) {\n let forwardedOwnedValue = replaceGuaranteed(value: beginBorrow, withOwnedValue: beginBorrow.borrowedValue, context)\n beginBorrow.borrowedValue.replaceAllDestroys(with: forwardedOwnedValue, context)\n context.erase(instructionIncludingAllUsers: beginBorrow)\n}\n\nprivate extension Value {\n /// Returns the last value of a (potentially empty) forwarding chain.\n /// For example, returns %3 for the following def-use chain:\n /// ```\n /// %1 = struct_extract %self, #someField\n /// %2 = tuple_extract %1, 0\n /// %3 = struct $S(%2) // %3 has no forwarding users\n /// ```\n /// Returns self if this value has no uses which are ForwardingInstructions.\n func lookThroughSingleForwardingUses() -> Value {\n if let singleUse = uses.ignoreUsers(ofType: EndBorrowInst.self).singleUse,\n let fwdInst = singleUse.instruction as? (SingleValueInstruction & ForwardingInstruction),\n fwdInst.canConvertToOwned,\n fwdInst.isSingleForwardedOperand(singleUse),\n fwdInst.parentBlock == parentBlock\n {\n return fwdInst.lookThroughSingleForwardingUses()\n }\n return self\n }\n\n var allUsesCanBeConvertedToOwned: Bool {\n let relevantUses = uses.ignoreUsers(ofType: EndBorrowInst.self)\n return relevantUses.allSatisfy { $0.canAccept(ownership: .owned) }\n }\n\n func isDestroyed(after nonDestroyUser: Instruction) -> Bool {\n return uses.getSingleUser(notOfType: DestroyValueInst.self) == nonDestroyUser &&\n nonDestroyUser.dominates(destroysOf: self)\n }\n\n func replaceAllDestroys(with replacement: Value, _ context: SimplifyContext) {\n uses.filterUsers(ofType: DestroyValueInst.self).replaceAll(with: replacement, context)\n }\n}\n\nprivate extension Instruction {\n func dominates(destroysOf value: Value) -> Bool {\n // In instruction simplification we don't have a domtree. Therefore do a simple dominance\n // check based on same-block relations.\n if parentBlock == value.parentBlock {\n // The value and instruction are in the same block. All uses are dominated by both.\n return true\n }\n let destroys = value.uses.filterUsers(ofType: DestroyValueInst.self)\n return destroys.allSatisfy({ $0.instruction.parentBlock == parentBlock})\n }\n}\n\nprivate extension ForwardingInstruction {\n func isSingleForwardedOperand(_ operand: Operand) -> Bool {\n switch self {\n case is StructInst, is TupleInst:\n // TODO: we could move that logic to StructInst/TupleInst.singleForwardedOperand.\n return operands.lazy.map({ $0.value.type }).hasSingleNonTrivialElement(at: operand.index, in: parentFunction)\n default:\n if let sfo = singleForwardedOperand {\n return sfo == operand\n }\n return false\n }\n }\n}\n\n/// Replaces a guaranteed value with an owned value.\n///\n/// If the `guaranteedValue`'s use is a ForwardingInstruction (or forwarding instruction chain),\n/// it is converted to an owned version of the forwarding instruction (or instruction chain).\n///\n/// Returns the last owned value in a forwarding-chain or `ownedValue` if `guaranteedValue` has\n/// no forwarding uses.\nprivate func replaceGuaranteed(value: Value, withOwnedValue ownedValue: Value, _ context: SimplifyContext) -> Value {\n var result = ownedValue\n var numForwardingUses = 0\n for use in value.uses {\n\n switch use.instruction {\n case let tei as TupleExtractInst:\n numForwardingUses += 1\n let dti = Builder(before: tei, context).createDestructureTuple(tuple: ownedValue)\n result = replaceGuaranteed(value: tei, withOwnedValue: dti.results[tei.fieldIndex], context)\n context.erase(instruction: tei)\n case let sei as StructExtractInst:\n numForwardingUses += 1\n let dsi = Builder(before: sei, context).createDestructureStruct(struct: ownedValue)\n result = replaceGuaranteed(value: sei, withOwnedValue: dsi.results[sei.fieldIndex], context)\n context.erase(instruction: sei)\n case let fwdInst as (SingleValueInstruction & ForwardingInstruction) where\n fwdInst.isSingleForwardedOperand(use):\n // Other forwarding instructions beside tuple_extract and struct_extract\n numForwardingUses += 1\n use.set(to: ownedValue, context)\n fwdInst.setForwardingOwnership(to: .owned, context)\n result = replaceGuaranteed(value: fwdInst, withOwnedValue: fwdInst, context)\n case is EndBorrowInst:\n break\n default:\n precondition(use.canAccept(ownership: .owned))\n use.set(to: ownedValue, context)\n }\n }\n precondition(numForwardingUses <= 1, "guaranteed value must not have multiple forwarding uses")\n return result\n}\n\nprivate extension ForwardingInstruction {\n var canConvertToOwned: Bool {\n switch self {\n case let si as StructExtractInst:\n if si.struct.type.isMoveOnly {\n // We cannot easily convert a struct_extract to a destructure_struct of a move-only type, because\n // the deinit would get lost.\n return false\n }\n let structFields = si.struct.type.getNominalFields(in: parentFunction)\n return structFields?.hasSingleNonTrivialElement(at: si.fieldIndex, in: parentFunction) ?? false\n case let ti as TupleExtractInst:\n return ti.tuple.type.tupleElements.hasSingleNonTrivialElement(at: ti.fieldIndex, in: parentFunction)\n default:\n return canForwardOwnedValues\n }\n }\n}\n\nprivate extension Collection where Element == Type {\n func hasSingleNonTrivialElement(at nonTrivialElementIndex: Int, in function: Function) -> Bool {\n for (elementIdx, elementTy) in self.enumerated() {\n if elementTy.isTrivial(in: function) != (elementIdx != nonTrivialElementIndex) {\n return false\n }\n }\n return true\n }\n}\n
dataset_sample\swift\swift\cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_InstructionSimplification_SimplifyBeginAndLoadBorrow.swift
cpp_apple_swift_SwiftCompilerSources_Sources_Optimizer_InstructionSimplification_SimplifyBeginAndLoadBorrow.swift
Swift
10,967
0.95
0.086022
0.307692
react-lib
396
2025-06-14T05:13:23.112422
Apache-2.0
false
2a57f6fbf4d7ec14feb83092ecf7fadb