diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 00000000..2922bb63 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,2 @@ +# Formatting repository +b48ffb6d541ae3665c4ee4e705fd2969be748f6b diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 599dfc19..bc831e94 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -15,6 +15,7 @@ jobs: tests: name: Test uses: swiftlang/github-workflows/.github/workflows/swift_package_test.yml@main + needs: [soundness, space-format-check] with: linux_os_versions: '["amazonlinux2", "bookworm", "noble", "jammy", "rhel-ubi9"]' linux_pre_build_command: ./.github/scripts/prebuild.sh @@ -38,6 +39,7 @@ jobs: cmake-smoke-test: name: cmake-smoke-test uses: swiftlang/github-workflows/.github/workflows/swift_package_test.yml@main + needs: [soundness, space-format-check] with: linux_os_versions: '["noble"]' linux_pre_build_command: SKIP_ANDROID=1 INSTALL_CMAKE=1 ./.github/scripts/prebuild.sh @@ -57,7 +59,7 @@ jobs: with: license_header_check_project_name: "Swift" api_breakage_check_enabled: false - format_check_enabled: false + format_check_enabled: true space-format-check: name: Space format check diff --git a/.licenseignore b/.licenseignore index e7babf0c..132672c8 100644 --- a/.licenseignore +++ b/.licenseignore @@ -8,3 +8,4 @@ .swift-version CODEOWNERS Package.swift +.git-blame-ignore-revs diff --git a/.swift-format b/.swift-format index c2fb7509..3d02fb82 100644 --- a/.swift-format +++ b/.swift-format @@ -1,10 +1,13 @@ { - "version": 1, - "lineLength": 10000, - "indentation": { - "spaces": 4 - }, - "rules": { - }, - "tabWidth": 4 + "version": 1, + "lineLength": 10000, + "indentation": { + "spaces": 4 + }, + "lineBreakBeforeEachArgument": true, + "maximumBlankLines" : 1, + "multiElementCollectionTrailingCommas" : true, + "rules": { + }, + "tabWidth": 4 } diff --git a/Package.swift b/Package.swift index 63989112..49d96ac1 100644 --- a/Package.swift +++ b/Package.swift @@ -15,9 +15,9 @@ import PackageDescription #if canImport(Darwin) -let appleOS = true + let appleOS = true #else -let appleOS = false + let appleOS = false #endif let isStaticBuild = Context.environment["SWIFTBUILD_STATIC_LINK"] != nil @@ -91,24 +91,27 @@ let package = Package( name: "swbuild", dependencies: [ "SwiftBuild", - "SWBBuildServiceBundle", // the CLI needs to launch the service bundle + "SWBBuildServiceBundle", // the CLI needs to launch the service bundle ], exclude: ["CMakeLists.txt"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .executableTarget( name: "SWBBuildServiceBundle", dependencies: [ "SWBBuildService", "SWBBuildSystem", "SWBServiceCore", "SWBUtil", "SWBCore", ], exclude: ["CMakeLists.txt"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), // Libraries .target( name: "SwiftBuild", dependencies: ["SWBCSupport", "SWBCore", "SWBProtocol", "SWBUtil", "SWBProjectModel"], exclude: ["CMakeLists.txt"], - swiftSettings: swiftSettings(languageMode: .v5)), + swiftSettings: swiftSettings(languageMode: .v5) + ), .target( name: "SWBBuildService", dependencies: [ @@ -118,12 +121,14 @@ let package = Package( .product(name: "SystemPackage", package: "swift-system", condition: .when(platforms: [.linux, .openbsd, .android, .windows, .custom("freebsd")])), ], exclude: ["CMakeLists.txt"], - swiftSettings: swiftSettings(languageMode: .v5)), + swiftSettings: swiftSettings(languageMode: .v5) + ), .target( name: "SWBBuildSystem", dependencies: ["SWBCore", "SWBTaskConstruction", "SWBTaskExecution"], exclude: ["CMakeLists.txt"], - swiftSettings: swiftSettings(languageMode: .v5)), + swiftSettings: swiftSettings(languageMode: .v5) + ), .target( name: "SWBCore", dependencies: [ @@ -137,7 +142,8 @@ let package = Package( ], exclude: ["CMakeLists.txt"], resources: [.process("Specs")], - swiftSettings: swiftSettings(languageMode: .v5)), + swiftSettings: swiftSettings(languageMode: .v5) + ), .target( name: "SWBCSupport", exclude: ["empty.swift"], @@ -146,27 +152,34 @@ let package = Package( .define("_CRT_SECURE_NO_WARNINGS", .when(platforms: [.windows])), .define("_CRT_NONSTDC_NO_WARNINGS", .when(platforms: [.windows])), ], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .target( name: "SWBCLibc", exclude: ["CMakeLists.txt", "README.md"], publicHeadersPath: ".", - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .target( name: "SWBLibc", dependencies: ["SWBCLibc"], exclude: ["CMakeLists.txt"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .target( name: "SWBLLBuild", dependencies: [ "SWBUtil" - ] + (useLLBuildFramework ? [] : [ - .product(name: "libllbuild", package: useLocalDependencies ? "llbuild" : "swift-llbuild"), - .product(name: "llbuildSwift", package: useLocalDependencies ? "llbuild" : "swift-llbuild"), - ]), + ] + + (useLLBuildFramework + ? [] + : [ + .product(name: "libllbuild", package: useLocalDependencies ? "llbuild" : "swift-llbuild"), + .product(name: "llbuildSwift", package: useLocalDependencies ? "llbuild" : "swift-llbuild"), + ]), exclude: ["CMakeLists.txt"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .target( name: "SWBMacro", dependencies: [ @@ -174,36 +187,42 @@ let package = Package( .product(name: "SwiftDriver", package: "swift-driver"), ], exclude: ["CMakeLists.txt"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .target( name: "SWBProjectModel", dependencies: ["SWBProtocol"], exclude: ["CMakeLists.txt"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .target( name: "SWBProtocol", dependencies: ["SWBUtil"], exclude: ["CMakeLists.txt"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .target( name: "SWBServiceCore", dependencies: ["SWBProtocol"], exclude: ["CMakeLists.txt"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .target( name: "SWBTaskConstruction", dependencies: [ "SWBCore", "SWBUtil", - .product(name: "SwiftDriver", package: "swift-driver") + .product(name: "SwiftDriver", package: "swift-driver"), ], exclude: ["CMakeLists.txt"], - swiftSettings: swiftSettings(languageMode: .v5)), + swiftSettings: swiftSettings(languageMode: .v5) + ), .target( name: "SWBTaskExecution", dependencies: ["SWBCore", "SWBUtil", "SWBCAS", "SWBLLBuild", "SWBTaskConstruction"], exclude: ["CMakeLists.txt"], - swiftSettings: swiftSettings(languageMode: .v5)), + swiftSettings: swiftSettings(languageMode: .v5) + ), .target( name: "SWBUtil", dependencies: [ @@ -214,37 +233,43 @@ let package = Package( .product(name: "SystemPackage", package: "swift-system", condition: .when(platforms: [.linux, .openbsd, .android, .windows, .custom("freebsd")])), ], exclude: ["CMakeLists.txt"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .target( name: "SWBCAS", dependencies: ["SWBUtil", "SWBCSupport"], exclude: ["CMakeLists.txt"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .target( name: "SWBAndroidPlatform", dependencies: ["SWBCore", "SWBMacro", "SWBUtil"], exclude: ["CMakeLists.txt"], resources: [.process("Specs")], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .target( name: "SWBApplePlatform", dependencies: ["SWBCore", "SWBMacro", "SWBUtil", "SWBTaskConstruction"], exclude: ["CMakeLists.txt"], resources: [.process("Specs")], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .target( name: "SWBGenericUnixPlatform", dependencies: ["SWBCore", "SWBUtil"], exclude: ["CMakeLists.txt"], resources: [.process("Specs")], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .target( name: "SWBQNXPlatform", dependencies: ["SWBCore", "SWBMacro", "SWBUtil"], exclude: ["CMakeLists.txt"], resources: [.process("Specs")], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .target( name: "SWBUniversalPlatform", dependencies: [ @@ -257,180 +282,220 @@ let package = Package( ], exclude: ["CMakeLists.txt"], resources: [.process("Specs")], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .target( name: "SWBWebAssemblyPlatform", dependencies: ["SWBCore", "SWBMacro", "SWBUtil"], exclude: ["CMakeLists.txt"], resources: [.process("Specs")], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .target( name: "SWBWindowsPlatform", dependencies: ["SWBCore", "SWBMacro", "SWBUtil"], exclude: ["CMakeLists.txt", "README.md"], resources: [.process("Specs")], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), // Test support .target( name: "SwiftBuildTestSupport", dependencies: ["SwiftBuild", "SWBTestSupport", "SWBUtil"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .target( name: "SWBTestSupport", dependencies: ["SwiftBuild", "SWBBuildSystem", "SWBCore", "SWBTaskConstruction", "SWBTaskExecution", "SWBUtil", "SWBLLBuild", "SWBMacro"], - swiftSettings: swiftSettings(languageMode: .v5)), + swiftSettings: swiftSettings(languageMode: .v5) + ), // Tests .testTarget( name: "SWBAndroidPlatformTests", dependencies: ["SWBAndroidPlatform", "SWBTestSupport"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .testTarget( name: "SWBApplePlatformTests", dependencies: ["SWBApplePlatform", "SWBTestSupport"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .testTarget( name: "SWBGenericUnixPlatformTests", dependencies: ["SWBGenericUnixPlatform", "SWBTestSupport"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .testTarget( name: "SWBQNXPlatformTests", dependencies: ["SWBQNXPlatform", "SWBTestSupport"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .testTarget( name: "SWBUniversalPlatformTests", dependencies: ["SWBUniversalPlatform", "SWBTestSupport"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .testTarget( name: "SWBWebAssemblyPlatformTests", dependencies: ["SWBWebAssemblyPlatform", "SWBTestSupport"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .testTarget( name: "SWBWindowsPlatformTests", dependencies: ["SWBWindowsPlatform", "SWBTestSupport"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .testTarget( name: "SwiftBuildTests", dependencies: ["SwiftBuild", "SWBBuildService", "SwiftBuildTestSupport"], resources: [ .copy("TestData") ], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .testTarget( name: "SWBProjectModelTests", dependencies: ["SWBProjectModel"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .testTarget( name: "SWBProtocolTests", dependencies: ["SWBProtocol", "SWBUtil"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .testTarget( name: "SWBUtilTests", dependencies: ["SWBTestSupport", "SWBUtil"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .testTarget( name: "SWBCASTests", dependencies: ["SWBTestSupport", "SWBCAS", "SWBUtil"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .testTarget( name: "SWBMacroTests", dependencies: ["SWBTestSupport", "SWBMacro"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .testTarget( name: "SWBServiceCoreTests", dependencies: ["SWBServiceCore"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .testTarget( name: "SWBCoreTests", dependencies: ["SWBCore", "SWBTestSupport", "SWBUtil", "SWBLLBuild"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .testTarget( name: "SWBTaskConstructionTests", dependencies: ["SWBTaskConstruction", "SWBCore", "SWBTestSupport", "SWBProtocol", "SWBUtil"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .testTarget( name: "SWBTaskExecutionTests", dependencies: ["SWBTaskExecution", "SWBTestSupport"], resources: [ .copy("TestData") ], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .testTarget( name: "SWBLLBuildTests", dependencies: ["SWBLLBuild", "SWBTestSupport"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .testTarget( name: "SWBBuildSystemTests", dependencies: ["SWBBuildService", "SWBBuildSystem", "SwiftBuildTestSupport", "SWBTestSupport"], resources: [ .copy("TestData") ], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .testTarget( name: "SWBBuildServiceTests", dependencies: ["SwiftBuild", "SWBBuildService", "SWBTestSupport"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .testTarget( name: "SWBTestSupportTests", dependencies: ["SWBTestSupport"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), // Perf tests .testTarget( name: "SWBBuildSystemPerfTests", dependencies: ["SWBBuildSystem", "SWBTestSupport", "SwiftBuildTestSupport"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .testTarget( name: "SWBCASPerfTests", dependencies: ["SWBCAS", "SWBTestSupport"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .testTarget( name: "SWBCorePerfTests", dependencies: ["SWBCore", "SWBTestSupport"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .testTarget( name: "SWBTaskConstructionPerfTests", dependencies: ["SWBTaskConstruction", "SWBTestSupport"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .testTarget( name: "SWBUtilPerfTests", dependencies: ["SWBUtil", "SWBTestSupport"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), .testTarget( name: "SwiftBuildPerfTests", dependencies: ["SwiftBuild", "SWBTestSupport", "SwiftBuildTestSupport"], - swiftSettings: swiftSettings(languageMode: .v6)), + swiftSettings: swiftSettings(languageMode: .v6) + ), // Commands .plugin( name: "launch-xcode", - capability: .command(intent: .custom( - verb: "launch-xcode", - description: "Launch the currently selected Xcode configured to use the just-built build service" - )) + capability: .command( + intent: .custom( + verb: "launch-xcode", + description: "Launch the currently selected Xcode configured to use the just-built build service" + ) + ) ), .plugin( name: "run-xcodebuild", - capability: .command(intent: .custom( - verb: "run-xcodebuild", - description: "Run xcodebuild from the currently selected Xcode configured to use the just-built build service" - )) + capability: .command( + intent: .custom( + verb: "run-xcodebuild", + description: "Run xcodebuild from the currently selected Xcode configured to use the just-built build service" + ) + ) ), .plugin( name: "cmake-smoke-test", - capability: .command(intent: .custom( - verb: "cmake-smoke-test", - description: "Build Swift Build using CMake for validation purposes" - )) + capability: .command( + intent: .custom( + verb: "cmake-smoke-test", + description: "Build Swift Build using CMake for validation purposes" + ) + ) ), .plugin( name: "generate-windows-installer-component-groups", - capability: .command(intent: .custom( - verb: "generate-windows-installer-component-groups", - description: "Generate XML fragments for cli.wxs in swift-installer-scripts" - )) - ) + capability: .command( + intent: .custom( + verb: "generate-windows-installer-component-groups", + description: "Generate XML fragments for cli.wxs in swift-installer-scripts" + ) + ) + ), ], swiftLanguageModes: [.v5, .v6], cxxLanguageStandard: .cxx20 @@ -467,7 +532,7 @@ if useLocalDependencies { .package(path: "../swift-argument-parser"), ] if !useLLBuildFramework { - package.dependencies += [.package(path: "../llbuild"),] + package.dependencies += [.package(path: "../llbuild")] } } else { package.dependencies += [ @@ -476,6 +541,6 @@ if useLocalDependencies { .package(url: "https://github.com/apple/swift-argument-parser.git", from: "1.0.3"), ] if !useLLBuildFramework { - package.dependencies += [.package(url: "https://github.com/swiftlang/swift-llbuild.git", branch: "main"),] + package.dependencies += [.package(url: "https://github.com/swiftlang/swift-llbuild.git", branch: "main")] } } diff --git a/Plugins/cmake-smoke-test/cmake-smoke-test.swift b/Plugins/cmake-smoke-test/cmake-smoke-test.swift index fcba09f3..a6fda901 100644 --- a/Plugins/cmake-smoke-test/cmake-smoke-test.swift +++ b/Plugins/cmake-smoke-test/cmake-smoke-test.swift @@ -63,7 +63,7 @@ struct CMakeSmokeTest: CommandPlugin { } var sharedSwiftFlags = [ - "-module-cache-path", moduleCachePath + "-module-cache-path", moduleCachePath, ] if let sysrootPath { @@ -75,15 +75,16 @@ struct CMakeSmokeTest: CommandPlugin { "-DLLBuild_DIR=\(llbuildBuildURL.appending(components: "cmake", "modules").filePath)", "-DTSC_DIR=\(swiftToolsSupportCoreBuildURL.appending(components: "cmake", "modules").filePath)", "-DSwiftDriver_DIR=\(swiftDriverBuildURL.appending(components: "cmake", "modules").filePath)", - "-DSwiftSystem_DIR=\(swiftSystemBuildURL.appending(components: "cmake", "modules").filePath)" + "-DSwiftSystem_DIR=\(swiftSystemBuildURL.appending(components: "cmake", "modules").filePath)", ] - let sharedCMakeArgs = [ - "-G", "Ninja", - "-DCMAKE_MAKE_PROGRAM=\(ninjaPath)", - "-DCMAKE_BUILD_TYPE:=Debug", - "-DCMAKE_Swift_FLAGS='\(sharedSwiftFlags.joined(separator: " "))'" - ] + cMakeProjectArgs + extraCMakeArgs + let sharedCMakeArgs = + [ + "-G", "Ninja", + "-DCMAKE_MAKE_PROGRAM=\(ninjaPath)", + "-DCMAKE_BUILD_TYPE:=Debug", + "-DCMAKE_Swift_FLAGS='\(sharedSwiftFlags.joined(separator: " "))'", + ] + cMakeProjectArgs + extraCMakeArgs Diagnostics.progress("Building swift-tools-support-core") try await Process.checkNonZeroExit(url: cmakeURL, arguments: sharedCMakeArgs + [swiftToolsSupportCoreURL.filePath], workingDirectory: swiftToolsSupportCoreBuildURL) @@ -159,13 +160,13 @@ enum OS { static func host() throws -> Self { #if os(macOS) - return .macOS + return .macOS #elseif os(Linux) - return .linux + return .linux #elseif os(Windows) - return .windows + return .windows #else - throw Errors.unimplementedForHostOS + throw Errors.unimplementedForHostOS #endif } } @@ -202,100 +203,104 @@ extension Process { static func checkNonZeroExit(url: URL, arguments: [String], workingDirectory: URL, environment: [String: String]? = nil) async throws { try Diagnostics.progress("\(url.filePath) \(arguments.joined(separator: " "))") #if USE_PROCESS_SPAWNING_WORKAROUND && !os(Windows) - Diagnostics.progress("Using process spawning workaround") - // Linux workaround for https://github.com/swiftlang/swift-corelibs-foundation/issues/4772 - // Foundation.Process on Linux seems to inherit the Process.run()-calling thread's signal mask, creating processes that even have SIGTERM blocked - // This manifests as CMake getting stuck when invoking 'uname' with incorrectly configured signal handlers. - var fileActions = posix_spawn_file_actions_t() - defer { posix_spawn_file_actions_destroy(&fileActions) } - var attrs: posix_spawnattr_t = posix_spawnattr_t() - defer { posix_spawnattr_destroy(&attrs) } - posix_spawn_file_actions_init(&fileActions) - try posix_spawn_file_actions_addchdir_np(&fileActions, workingDirectory.filePath) - - posix_spawnattr_init(&attrs) - posix_spawnattr_setpgroup(&attrs, 0) - var noSignals = sigset_t() - sigemptyset(&noSignals) - posix_spawnattr_setsigmask(&attrs, &noSignals) - - var mostSignals = sigset_t() - sigemptyset(&mostSignals) - for i in 1 ..< SIGSYS { - if i == SIGKILL || i == SIGSTOP { - continue - } - sigaddset(&mostSignals, i) - } - posix_spawnattr_setsigdefault(&attrs, &mostSignals) - posix_spawnattr_setflags(&attrs, numericCast(POSIX_SPAWN_SETPGROUP | POSIX_SPAWN_SETSIGDEF | POSIX_SPAWN_SETSIGMASK)) - var pid: pid_t = -1 - try withArrayOfCStrings([url.filePath] + arguments) { arguments in - try withArrayOfCStrings((environment ?? [:]).map { key, value in "\(key)=\(value)" }) { environment in - let spawnResult = try posix_spawn(&pid, url.filePath, /*file_actions=*/&fileActions, /*attrp=*/&attrs, arguments, nil); - var exitCode: Int32 = -1 - var result = wait4(pid, &exitCode, 0, nil); - while (result == -1 && errno == EINTR) { - result = wait4(pid, &exitCode, 0, nil) - } - guard result != -1 else { - throw Errors.miscError("wait failed") + Diagnostics.progress("Using process spawning workaround") + // Linux workaround for https://github.com/swiftlang/swift-corelibs-foundation/issues/4772 + // Foundation.Process on Linux seems to inherit the Process.run()-calling thread's signal mask, creating processes that even have SIGTERM blocked + // This manifests as CMake getting stuck when invoking 'uname' with incorrectly configured signal handlers. + var fileActions = posix_spawn_file_actions_t() + defer { posix_spawn_file_actions_destroy(&fileActions) } + var attrs: posix_spawnattr_t = posix_spawnattr_t() + defer { posix_spawnattr_destroy(&attrs) } + posix_spawn_file_actions_init(&fileActions) + try posix_spawn_file_actions_addchdir_np(&fileActions, workingDirectory.filePath) + + posix_spawnattr_init(&attrs) + posix_spawnattr_setpgroup(&attrs, 0) + var noSignals = sigset_t() + sigemptyset(&noSignals) + posix_spawnattr_setsigmask(&attrs, &noSignals) + + var mostSignals = sigset_t() + sigemptyset(&mostSignals) + for i in 1..(_ seq: S, _ initial: U, _ combine: (U, S.Element) -> U) -> [U] { - var result: [U] = [] - result.reserveCapacity(seq.underestimatedCount) - var runningResult = initial - for element in seq { - runningResult = combine(runningResult, element) - result.append(runningResult) - } - return result -} + func scan(_ seq: S, _ initial: U, _ combine: (U, S.Element) -> U) -> [U] { + var result: [U] = [] + result.reserveCapacity(seq.underestimatedCount) + var runningResult = initial + for element in seq { + runningResult = combine(runningResult, element) + result.append(runningResult) + } + return result + } -func withArrayOfCStrings( - _ args: [String], - _ body: (UnsafePointer?>) throws -> T -) throws -> T { - let argsCounts = Array(args.map { $0.utf8.count + 1 }) - let argsOffsets = [0] + scan(argsCounts, 0, +) - let argsBufferSize = argsOffsets.last! - var argsBuffer: [UInt8] = [] - argsBuffer.reserveCapacity(argsBufferSize) - for arg in args { - argsBuffer.append(contentsOf: arg.utf8) - argsBuffer.append(0) - } - return try argsBuffer.withUnsafeMutableBufferPointer { - (argsBuffer) in - let ptr = UnsafeRawPointer(argsBuffer.baseAddress!).bindMemory( - to: Int8.self, capacity: argsBuffer.count) - var cStrings: [UnsafePointer?] = argsOffsets.map { ptr + $0 } - cStrings[cStrings.count - 1] = nil - return try cStrings.withUnsafeMutableBufferPointer { - let unsafeString = UnsafeMutableRawPointer($0.baseAddress!).bindMemory( - to: UnsafeMutablePointer?.self, capacity: $0.count) - return try body(unsafeString) + func withArrayOfCStrings( + _ args: [String], + _ body: (UnsafePointer?>) throws -> T + ) throws -> T { + let argsCounts = Array(args.map { $0.utf8.count + 1 }) + let argsOffsets = [0] + scan(argsCounts, 0, +) + let argsBufferSize = argsOffsets.last! + var argsBuffer: [UInt8] = [] + argsBuffer.reserveCapacity(argsBufferSize) + for arg in args { + argsBuffer.append(contentsOf: arg.utf8) + argsBuffer.append(0) + } + return try argsBuffer.withUnsafeMutableBufferPointer { + (argsBuffer) in + let ptr = UnsafeRawPointer(argsBuffer.baseAddress!).bindMemory( + to: Int8.self, + capacity: argsBuffer.count + ) + var cStrings: [UnsafePointer?] = argsOffsets.map { ptr + $0 } + cStrings[cStrings.count - 1] = nil + return try cStrings.withUnsafeMutableBufferPointer { + let unsafeString = UnsafeMutableRawPointer($0.baseAddress!).bindMemory( + to: UnsafeMutablePointer?.self, + capacity: $0.count + ) + return try body(unsafeString) + } + } } - } -} #endif diff --git a/Plugins/generate-windows-installer-component-groups/generate-windows-installer-component-groups.swift b/Plugins/generate-windows-installer-component-groups/generate-windows-installer-component-groups.swift index a2b4150f..317e1472 100644 --- a/Plugins/generate-windows-installer-component-groups/generate-windows-installer-component-groups.swift +++ b/Plugins/generate-windows-installer-component-groups/generate-windows-installer-component-groups.swift @@ -25,24 +25,24 @@ struct GenerateWindowsInstallerComponentGroups: CommandPlugin { continue } librariesComponent += #""" - - - + + + - """# + """# let resources = sourceModule.sourceFiles.filter { resource in resource.type == .resource && ["xcspec", "xcbuildrules"].contains(resource.url.pathExtension) } if !resources.isEmpty { groupRefs += #" \#n"# directories += #" \#n"# resourcesComponents += #" \#n"# - for resource in resources { + for resource in resources { resourcesComponents += #""" - - - + + + - """# + """# } resourcesComponents += " \n" } diff --git a/Plugins/launch-xcode/launch-xcode.swift b/Plugins/launch-xcode/launch-xcode.swift index 674b8be7..ac67bcd3 100644 --- a/Plugins/launch-xcode/launch-xcode.swift +++ b/Plugins/launch-xcode/launch-xcode.swift @@ -17,37 +17,37 @@ import Foundation struct LaunchXcode: CommandPlugin { func performCommand(context: PluginContext, arguments: [String]) async throws { #if !os(macOS) - throw LaunchXcodeError.unsupportedPlatform + throw LaunchXcodeError.unsupportedPlatform #else - var args = ArgumentExtractor(arguments) - var configuration: PackageManager.BuildConfiguration = .debug - // --release - if args.extractFlag(named: "release") > 0 { - configuration = .release - } else { - // --configuration release - let configurationOptions = args.extractOption(named: "configuration") - if configurationOptions.contains("release") { + var args = ArgumentExtractor(arguments) + var configuration: PackageManager.BuildConfiguration = .debug + // --release + if args.extractFlag(named: "release") > 0 { configuration = .release + } else { + // --configuration release + let configurationOptions = args.extractOption(named: "configuration") + if configurationOptions.contains("release") { + configuration = .release + } } - } - let buildResult = try packageManager.build(.all(includingTests: false), parameters: .init(configuration: configuration, echoLogs: true)) - guard buildResult.succeeded else { return } - guard let buildServiceURL = buildResult.builtArtifacts.map({ $0.url }).filter({ $0.lastPathComponent == "SWBBuildServiceBundle" }).first else { - throw LaunchXcodeError.buildServiceURLNotFound - } + let buildResult = try packageManager.build(.all(includingTests: false), parameters: .init(configuration: configuration, echoLogs: true)) + guard buildResult.succeeded else { return } + guard let buildServiceURL = buildResult.builtArtifacts.map({ $0.url }).filter({ $0.lastPathComponent == "SWBBuildServiceBundle" }).first else { + throw LaunchXcodeError.buildServiceURLNotFound + } - print("Launching Xcode...") - let process = Process() - process.executableURL = URL(fileURLWithPath: "/usr/bin/open") - process.arguments = ["-n", "-F", "-W", "--env", "XCBBUILDSERVICE_PATH=\(buildServiceURL.path())", "-b", "com.apple.dt.Xcode"] - process.standardOutput = nil - process.standardError = nil - try await process.run() - if process.terminationStatus != 0 { - throw LaunchXcodeError.launchFailed - } + print("Launching Xcode...") + let process = Process() + process.executableURL = URL(fileURLWithPath: "/usr/bin/open") + process.arguments = ["-n", "-F", "-W", "--env", "XCBBUILDSERVICE_PATH=\(buildServiceURL.path())", "-b", "com.apple.dt.Xcode"] + process.standardOutput = nil + process.standardError = nil + try await process.run() + if process.terminationStatus != 0 { + throw LaunchXcodeError.launchFailed + } #endif } } diff --git a/Plugins/run-xcodebuild/run-xcodebuild.swift b/Plugins/run-xcodebuild/run-xcodebuild.swift index 710a84a2..814345a5 100644 --- a/Plugins/run-xcodebuild/run-xcodebuild.swift +++ b/Plugins/run-xcodebuild/run-xcodebuild.swift @@ -17,35 +17,35 @@ import Foundation struct RunXcodebuild: CommandPlugin { func performCommand(context: PluginContext, arguments: [String]) async throws { #if !os(macOS) - throw RunXcodebuildError.unsupportedPlatform + throw RunXcodebuildError.unsupportedPlatform #else - var args = ArgumentExtractor(arguments) - var configuration: PackageManager.BuildConfiguration = .debug - // --release - if args.extractFlag(named: "release") > 0 { - configuration = .release - } else { - // --configuration release - let configurationOptions = args.extractOption(named: "configuration") - if configurationOptions.contains("release") { + var args = ArgumentExtractor(arguments) + var configuration: PackageManager.BuildConfiguration = .debug + // --release + if args.extractFlag(named: "release") > 0 { configuration = .release + } else { + // --configuration release + let configurationOptions = args.extractOption(named: "configuration") + if configurationOptions.contains("release") { + configuration = .release + } } - } - let buildResult = try packageManager.build(.all(includingTests: false), parameters: .init(configuration: configuration, echoLogs: true)) - guard buildResult.succeeded else { return } - guard let buildServiceURL = buildResult.builtArtifacts.map({ $0.url }).filter({ $0.lastPathComponent == "SWBBuildServiceBundle" }).first else { - throw RunXcodebuildError.buildServiceURLNotFound - } + let buildResult = try packageManager.build(.all(includingTests: false), parameters: .init(configuration: configuration, echoLogs: true)) + guard buildResult.succeeded else { return } + guard let buildServiceURL = buildResult.builtArtifacts.map({ $0.url }).filter({ $0.lastPathComponent == "SWBBuildServiceBundle" }).first else { + throw RunXcodebuildError.buildServiceURLNotFound + } - let process = Process() - process.executableURL = URL(fileURLWithPath: "/usr/bin/xcrun") - process.arguments = ["xcodebuild"] + args.remainingArguments - process.environment = ProcessInfo.processInfo.environment.merging(["XCBBUILDSERVICE_PATH": buildServiceURL.path()]) { _, new in new } - try await process.run() - if process.terminationStatus != 0 { - throw RunXcodebuildError.xcodebuildError(terminationReason: process.terminationReason, terminationStatus: process.terminationStatus) - } + let process = Process() + process.executableURL = URL(fileURLWithPath: "/usr/bin/xcrun") + process.arguments = ["xcodebuild"] + args.remainingArguments + process.environment = ProcessInfo.processInfo.environment.merging(["XCBBUILDSERVICE_PATH": buildServiceURL.path()]) { _, new in new } + try await process.run() + if process.terminationStatus != 0 { + throw RunXcodebuildError.xcodebuildError(terminationReason: process.terminationReason, terminationStatus: process.terminationStatus) + } #endif } } @@ -62,14 +62,15 @@ enum RunXcodebuildError: Error, CustomStringConvertible { case .buildServiceURLNotFound: return "Failed to determine path to built SWBBuildServiceBundle" case let .xcodebuildError(terminationReason, terminationStatus): - let reason = switch terminationReason { - case .exit: - "status code" - case .uncaughtSignal: - "uncaught signal" - @unknown default: - preconditionFailure() - } + let reason = + switch terminationReason { + case .exit: + "status code" + case .uncaughtSignal: + "uncaught signal" + @unknown default: + preconditionFailure() + } return "xcodebuild exited with \(reason) \(terminationStatus), did you remember to pass `--disable-sandbox`?" } } diff --git a/README.md b/README.md index 504b53d8..3ccd862d 100644 --- a/README.md +++ b/README.md @@ -64,6 +64,13 @@ code of conduct defined by the Contributor Covenant. This document is used across many open source communities, and we think it articulates our values well. For more, see the [Code of Conduct](https://swift.org/code-of-conduct/). +Once the repository is clone, configure your local repository so blame ignores some commits: + +``` +git config blame.ignoreRevsFile .git-blame-ignore-revs +``` + + License ------- See https://swift.org/LICENSE.txt for license information. diff --git a/Sources/SWBAndroidPlatform/AndroidSDK.swift b/Sources/SWBAndroidPlatform/AndroidSDK.swift index f5342ed9..e7cc3886 100644 --- a/Sources/SWBAndroidPlatform/AndroidSDK.swift +++ b/Sources/SWBAndroidPlatform/AndroidSDK.swift @@ -159,13 +159,15 @@ public import Foundation self.arch = try container.decode(String.self, forKey: .arch) self.triple = try container.decode(String.self, forKey: .triple) self.llvm_triple = try container.decode(LLVMTriple.self, forKey: .llvm_triple) - self.min_os_version = try container.decodeIfPresent(Int.self, forKey: .min_os_version) ?? { - if ndkVersion < Version(27) { - return 21 // min_os_version wasn't present prior to NDKr27, fill it in with 21, which is the appropriate value - } else { - throw DecodingError.valueNotFound(Int.self, .init(codingPath: container.codingPath, debugDescription: "No value associated with key \(CodingKeys.min_os_version) (\"\(CodingKeys.min_os_version.rawValue)\").")) - } - }() + self.min_os_version = + try container.decodeIfPresent(Int.self, forKey: .min_os_version) + ?? { + if ndkVersion < Version(27) { + return 21 // min_os_version wasn't present prior to NDKr27, fill it in with 21, which is the appropriate value + } else { + throw DecodingError.valueNotFound(Int.self, .init(codingPath: container.codingPath, debugDescription: "No value associated with key \(CodingKeys.min_os_version) (\"\(CodingKeys.min_os_version.rawValue)\").")) + } + }() } } @@ -189,7 +191,7 @@ public import Foundation // Also works on non-x86 archs via binfmt support and qemu (or Rosetta on Apple-hosted VMs). "linux-x86_64" default: - nil // unsupported host + nil // unsupported host } } diff --git a/Sources/SWBAndroidPlatform/JavaProperties.swift b/Sources/SWBAndroidPlatform/JavaProperties.swift index 0a6059d7..4eb7ea86 100644 --- a/Sources/SWBAndroidPlatform/JavaProperties.swift +++ b/Sources/SWBAndroidPlatform/JavaProperties.swift @@ -20,10 +20,12 @@ struct JavaProperties { private let properties: [String: String] init(data: Data) throws { - properties = Dictionary(uniqueKeysWithValues: String(decoding: data, as: UTF8.self).split(whereSeparator: { $0.isNewline }).map(String.init).map { - let (key, value) = $0.split("=") - return (key.trimmingCharacters(in: .whitespaces), value.trimmingCharacters(in: .whitespaces)) - }) + properties = Dictionary( + uniqueKeysWithValues: String(decoding: data, as: UTF8.self).split(whereSeparator: { $0.isNewline }).map(String.init).map { + let (key, value) = $0.split("=") + return (key.trimmingCharacters(in: .whitespaces), value.trimmingCharacters(in: .whitespaces)) + } + ) } subscript(_ propertyName: String) -> String? { diff --git a/Sources/SWBAndroidPlatform/Plugin.swift b/Sources/SWBAndroidPlatform/Plugin.swift index f282f26e..72b451e0 100644 --- a/Sources/SWBAndroidPlatform/Plugin.swift +++ b/Sources/SWBAndroidPlatform/Plugin.swift @@ -67,7 +67,7 @@ struct AndroidPlatformSpecsExtension: SpecificationsExtension { findResourceBundle(nameWhenInstalledInToolchain: "SwiftBuild_SWBAndroidPlatform", resourceSearchPaths: resourceSearchPaths, defaultBundle: Bundle.module) } - func specificationDomains() -> [String : [String]] { + func specificationDomains() -> [String: [String]] { ["android": ["linux"]] } } @@ -98,15 +98,18 @@ struct AndroidEnvironmentExtension: EnvironmentExtension { struct AndroidPlatformExtension: PlatformInfoExtension { func additionalPlatforms(context: any PlatformInfoExtensionAdditionalPlatformsContext) throws -> [(path: Path, data: [String: PropertyListItem])] { [ - (.root, [ - "Type": .plString("Platform"), - "Name": .plString("android"), - "Identifier": .plString("android"), - "Description": .plString("android"), - "FamilyName": .plString("Android"), - "FamilyIdentifier": .plString("android"), - "IsDeploymentPlatform": .plString("YES"), - ]) + ( + .root, + [ + "Type": .plString("Platform"), + "Name": .plString("android"), + "Identifier": .plString("android"), + "Description": .plString("android"), + "FamilyName": .plString("Android"), + "FamilyIdentifier": .plString("android"), + "IsDeploymentPlatform": .plString("YES"), + ] + ) ] } } @@ -147,7 +150,7 @@ struct AndroidPlatformExtension: PlatformInfoExtension { let allPossibleTriples = try abis.values.flatMap { abi in try (max(deploymentTargetRange.min, abi.min_os_version)...deploymentTargetRange.max).map { deploymentTarget in var triple = abi.llvm_triple - triple.vendor = "unknown" // Android NDK uses "none", Swift SDKs use "unknown" + triple.vendor = "unknown" // Android NDK uses "none", Swift SDKs use "unknown" guard let env = triple.environment else { throw StubError.error("Android triples must have an environment") } @@ -156,11 +159,12 @@ struct AndroidPlatformExtension: PlatformInfoExtension { } }.map(\.description) - let androidSwiftSDKs = (try? SwiftSDK.findSDKs( - targetTriples: allPossibleTriples, - fs: context.fs, - hostOperatingSystem: context.hostOperatingSystem - )) ?? [] + let androidSwiftSDKs = + (try? SwiftSDK.findSDKs( + targetTriples: allPossibleTriples, + fs: context.fs, + hostOperatingSystem: context.hostOperatingSystem + )) ?? [] return try androidSwiftSDKs.map { androidSwiftSDK in let perArchSwiftResourceDirs = try Dictionary(grouping: androidSwiftSDK.targetTriples, by: { try LLVMTriple($0.key).arch }).mapValues { @@ -179,14 +183,21 @@ struct AndroidPlatformExtension: PlatformInfoExtension { customProperties: [ "SWIFT_TARGET_TRIPLE": .plString("$(CURRENT_ARCH)-unknown-$(SWIFT_PLATFORM_TARGET_PREFIX)$(LLVM_TARGET_TRIPLE_SUFFIX)"), "LIBRARY_SEARCH_PATHS": "$(inherited) $(SWIFT_RESOURCE_DIR)/../$(__ANDROID_TRIPLE_$(CURRENT_ARCH))", - ].merging(perArchSwiftResourceDirs.map { - [ - ("SWIFT_LIBRARY_PATH[arch=\($0.key)]", .plString($0.value.join("android").str)), - ("SWIFT_RESOURCE_DIR[arch=\($0.key)]", .plString($0.value.str)), - ] - }.flatMap { $0 }, uniquingKeysWith: { _, new in new }).merging(abis.map { - ("__ANDROID_TRIPLE_\($0.value.llvm_triple.arch)", .plString($0.value.triple)) - }, uniquingKeysWith: { _, new in new })) + ].merging( + perArchSwiftResourceDirs.map { + [ + ("SWIFT_LIBRARY_PATH[arch=\($0.key)]", .plString($0.value.join("android").str)), + ("SWIFT_RESOURCE_DIR[arch=\($0.key)]", .plString($0.value.str)), + ] + }.flatMap { $0 }, + uniquingKeysWith: { _, new in new } + ).merging( + abis.map { + ("__ANDROID_TRIPLE_\($0.value.llvm_triple.arch)", .plString($0.value.triple)) + }, + uniquingKeysWith: { _, new in new } + ) + ) } + [ // Fallback SDK for when there are no Swift SDKs (Android SDK is still usable for C/C++-only code) sdk(androidPlatform: androidPlatform, androidNdk: androidNdk, defaultProperties: defaultProperties) @@ -194,38 +205,45 @@ struct AndroidPlatformExtension: PlatformInfoExtension { } private func sdk(canonicalName: String? = nil, androidPlatform: Platform, androidNdk: AndroidSDK.NDK, defaultProperties: [String: PropertyListItem], customProperties: [String: PropertyListItem] = [:]) -> (path: Path, platform: SWBCore.Platform?, data: [String: PropertyListItem]) { - return (androidNdk.sysroot.path, androidPlatform, [ - "Type": .plString("SDK"), - "Version": .plString(androidNdk.version.description), - "CanonicalName": .plString(canonicalName ?? "android\(androidNdk.version.description)"), - // "android.ndk" is an alias for the "Android SDK without a Swift SDK" scenario in order for tests to deterministically pick a single Android destination regardless of how many Android Swift SDKs may be installed. - "Aliases": .plArray([.plString("android")] + (canonicalName == nil ? [.plString("android.ndk")] : [])), - "IsBaseSDK": .plBool(true), - "DefaultProperties": .plDict([ - "PLATFORM_NAME": .plString("android"), - ].merging(defaultProperties, uniquingKeysWith: { _, new in new })), - "CustomProperties": .plDict([ - // Unlike most platforms, the Android version goes on the environment field rather than the system field - // FIXME: Make this configurable in a better way so we don't need to push build settings at the SDK definition level - "LLVM_TARGET_TRIPLE_OS_VERSION": .plString("$(SWIFT_PLATFORM_TARGET_PREFIX)"), - "LLVM_TARGET_TRIPLE_SUFFIX": .plString("-android$($(DEPLOYMENT_TARGET_SETTING_NAME))"), - ].merging(customProperties, uniquingKeysWith: { _, new in new })), - "SupportedTargets": .plDict([ - "android": .plDict([ - "Archs": .plArray(androidNdk.abis.map { .plString($0.value.llvm_triple.arch) }), - "DeploymentTargetSettingName": .plString("ANDROID_DEPLOYMENT_TARGET"), - "DefaultDeploymentTarget": .plString("\(androidNdk.deploymentTargetRange.min)"), - "MinimumDeploymentTarget": .plString("\(androidNdk.deploymentTargetRange.min)"), - "MaximumDeploymentTarget": .plString("\(androidNdk.deploymentTargetRange.max)"), - "LLVMTargetTripleEnvironment": .plString("android"), // FIXME: androideabi for armv7! - "LLVMTargetTripleSys": .plString("linux"), - "LLVMTargetTripleVendor": .plString("none"), - ]) - ]), - "Toolchains": .plArray([ - .plString("android") - ]) - ]) + return ( + androidNdk.sysroot.path, androidPlatform, + [ + "Type": .plString("SDK"), + "Version": .plString(androidNdk.version.description), + "CanonicalName": .plString(canonicalName ?? "android\(androidNdk.version.description)"), + // "android.ndk" is an alias for the "Android SDK without a Swift SDK" scenario in order for tests to deterministically pick a single Android destination regardless of how many Android Swift SDKs may be installed. + "Aliases": .plArray([.plString("android")] + (canonicalName == nil ? [.plString("android.ndk")] : [])), + "IsBaseSDK": .plBool(true), + "DefaultProperties": .plDict( + [ + "PLATFORM_NAME": .plString("android") + ].merging(defaultProperties, uniquingKeysWith: { _, new in new }) + ), + "CustomProperties": .plDict( + [ + // Unlike most platforms, the Android version goes on the environment field rather than the system field + // FIXME: Make this configurable in a better way so we don't need to push build settings at the SDK definition level + "LLVM_TARGET_TRIPLE_OS_VERSION": .plString("$(SWIFT_PLATFORM_TARGET_PREFIX)"), + "LLVM_TARGET_TRIPLE_SUFFIX": .plString("-android$($(DEPLOYMENT_TARGET_SETTING_NAME))"), + ].merging(customProperties, uniquingKeysWith: { _, new in new }) + ), + "SupportedTargets": .plDict([ + "android": .plDict([ + "Archs": .plArray(androidNdk.abis.map { .plString($0.value.llvm_triple.arch) }), + "DeploymentTargetSettingName": .plString("ANDROID_DEPLOYMENT_TARGET"), + "DefaultDeploymentTarget": .plString("\(androidNdk.deploymentTargetRange.min)"), + "MinimumDeploymentTarget": .plString("\(androidNdk.deploymentTargetRange.min)"), + "MaximumDeploymentTarget": .plString("\(androidNdk.deploymentTargetRange.max)"), + "LLVMTargetTripleEnvironment": .plString("android"), // FIXME: androideabi for armv7! + "LLVMTargetTripleSys": .plString("linux"), + "LLVMTargetTripleVendor": .plString("none"), + ]) + ]), + "Toolchains": .plArray([ + .plString("android") + ]), + ] + ) } } @@ -251,7 +269,8 @@ struct AndroidToolchainRegistryExtension: ToolchainRegistryExtension { defaultSettingsWhenPrimary: [:], executableSearchPaths: [toolchainPath.path.join("bin")], testingLibraryPlatformNames: [], - fs: context.fs) + fs: context.fs + ) ] } } diff --git a/Sources/SWBApplePlatform/ActoolInputFileGroupingStrategy.swift b/Sources/SWBApplePlatform/ActoolInputFileGroupingStrategy.swift index daf36099..4658d2da 100644 --- a/Sources/SWBApplePlatform/ActoolInputFileGroupingStrategy.swift +++ b/Sources/SWBApplePlatform/ActoolInputFileGroupingStrategy.swift @@ -49,8 +49,7 @@ import Foundation file.fileType.conformsTo(stringsFileType) && file.absolutePath.basenameWithoutSuffix == stickerPackName } } - } - catch { + } catch { context.error("\(error)", location: .unknown, component: .default) return [] } diff --git a/Sources/SWBApplePlatform/AppIntentsMetadataCompiler.swift b/Sources/SWBApplePlatform/AppIntentsMetadataCompiler.swift index 76bc843c..61ff5ed6 100644 --- a/Sources/SWBApplePlatform/AppIntentsMetadataCompiler.swift +++ b/Sources/SWBApplePlatform/AppIntentsMetadataCompiler.swift @@ -27,7 +27,7 @@ private struct AppIntentsLocalizationPayload: TaskPayload { self.path = path } - func serialize(to serializer: T) where T : Serializer { + func serialize(to serializer: T) where T: Serializer { serializer.serializeAggregate(3) { serializer.serialize(buildVariant) serializer.serialize(architecture) @@ -51,7 +51,7 @@ private struct AppIntentsLocalizationPayload: TaskPayload { self.stringsdata = stringsdata } - func serialize(to serializer: T) where T : Serializer { + func serialize(to serializer: T) where T: Serializer { serializer.serializeAggregate(2) { serializer.serialize(effectivePlatformName) serializer.serialize(stringsdata) @@ -68,9 +68,7 @@ private struct AppIntentsLocalizationPayload: TaskPayload { final public class AppIntentsMetadataCompilerSpec: GenericCommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier = "com.apple.compilers.appintentsmetadata" public func shouldConstructAppIntentsMetadataTask(_ cbc: CommandBuildContext) -> Bool { - return cbc.scope.evaluate(BuiltinMacros.CURRENT_VARIANT) == "normal" && - cbc.producer.canConstructAppIntentsMetadataTask && - !cbc.inputs.filter({ $0.fileType.extensions.contains("swift") }).isEmpty + return cbc.scope.evaluate(BuiltinMacros.CURRENT_VARIANT) == "normal" && cbc.producer.canConstructAppIntentsMetadataTask && !cbc.inputs.filter({ $0.fileType.extensions.contains("swift") }).isEmpty } override public func constructTasks(_ cbc: CommandBuildContext, _ delegate: any TaskGenerationDelegate) async { @@ -185,8 +183,9 @@ final public class AppIntentsMetadataCompilerSpec: GenericCommandLineToolSpec, S return cbc.scope.table.namespace.parseLiteralStringList(stringDataFiles.map(\.path.str)) case BuiltinMacros.LM_COMPILE_TIME_EXTRACTION: if cbc.scope.evaluate(BuiltinMacros.LM_COMPILE_TIME_EXTRACTION), - let toolSpecInfo, - toolSpecInfo.hasFeature(DiscoveredSwiftCompilerToolSpecInfo.FeatureFlag.constExtractCompleteMetadata.rawValue) { + let toolSpecInfo, + toolSpecInfo.hasFeature(DiscoveredSwiftCompilerToolSpecInfo.FeatureFlag.constExtractCompleteMetadata.rawValue) + { return cbc.scope.table.namespace.parseLiteralString("YES") } return cbc.scope.table.namespace.parseLiteralString("NO") @@ -210,17 +209,19 @@ final public class AppIntentsMetadataCompilerSpec: GenericCommandLineToolSpec, S let nodeName = (isObject || isStaticLibrary) ? "ExtractAppIntentsMetadata \(cbc.scope.evaluate(BuiltinMacros.TARGET_BUILD_DIR)) \(cbc.scope.evaluate(BuiltinMacros.PRODUCT_MODULE_NAME) + "/Metadata.appintents")" : "ExtractAppIntentsMetadata \(cbc.resourcesDir?.join("Metadata.appintents").str ?? "")" let orderingNode = delegate.createVirtualNode(nodeName) let commandLine = await commandLineFromTemplate(cbc, delegate, optionContext: discoveredCommandLineToolSpecInfo(cbc.producer, cbc.scope, delegate), lookup: lookup).map(\.asString) - delegate.createTask(type: self, - payload: payload, - ruleInfo: defaultRuleInfo(cbc, delegate), - commandLine: commandLine, - environment: environmentFromSpec(cbc, delegate), - workingDirectory: cbc.producer.defaultWorkingDirectory, - inputs: allInputs, - outputs: outputs + [orderingNode], - action: nil, - execDescription: resolveExecutionDescription(cbc, delegate), - enableSandboxing: enableSandboxing) + delegate.createTask( + type: self, + payload: payload, + ruleInfo: defaultRuleInfo(cbc, delegate), + commandLine: commandLine, + environment: environmentFromSpec(cbc, delegate), + workingDirectory: cbc.producer.defaultWorkingDirectory, + inputs: allInputs, + outputs: outputs + [orderingNode], + action: nil, + execDescription: resolveExecutionDescription(cbc, delegate), + enableSandboxing: enableSandboxing + ) } public override var payloadType: (any TaskPayload.Type)? { return AppIntentsLocalizationPayload.self } diff --git a/Sources/SWBApplePlatform/AppIntentsMetadataTaskProducer.swift b/Sources/SWBApplePlatform/AppIntentsMetadataTaskProducer.swift index cf8b9a03..e864c88e 100644 --- a/Sources/SWBApplePlatform/AppIntentsMetadataTaskProducer.swift +++ b/Sources/SWBApplePlatform/AppIntentsMetadataTaskProducer.swift @@ -37,8 +37,9 @@ final class AppIntentsMetadataTaskProducer: PhasedTaskProducer, TaskProducer { return fileTypes.flatMap { fileType in buildFiles.compactMap { buildFile in guard let resolvedBuildFileInfo = try? self.context.resolveBuildFileReference(buildFile), - !buildFilesProcessingContext.isExcluded(resolvedBuildFileInfo.absolutePath, filters: buildFile.platformFilters), - resolvedBuildFileInfo.fileType.conformsTo(fileType) else { + !buildFilesProcessingContext.isExcluded(resolvedBuildFileInfo.absolutePath, filters: buildFile.platformFilters), + resolvedBuildFileInfo.fileType.conformsTo(fileType) + else { return nil } @@ -69,8 +70,9 @@ final class AppIntentsMetadataTaskProducer: PhasedTaskProducer, TaskProducer { let dependencies = transitiveClosure([configuredTarget], successors: self.targetContext.globalProductPlan.dependencies(of:)) for dependency in dependencies.0 { if let standardTarget = dependency.target as? StandardTarget, - let bundleProductType = self.context.getSpec(standardTarget.productTypeIdentifier), - bundleProductType.conformsTo(identifier: "com.apple.product-type.bundle") { + let bundleProductType = self.context.getSpec(standardTarget.productTypeIdentifier), + bundleProductType.conformsTo(identifier: "com.apple.product-type.bundle") + { let targetScope = self.targetContext.globalProductPlan.getTargetSettings(dependency).globalScope let dependencyMetadataPath = targetScope.evaluate(BuiltinMacros.TARGET_BUILD_DIR) .join(targetScope.evaluate(BuiltinMacros.UNLOCALIZED_RESOURCES_FOLDER_PATH)) @@ -80,7 +82,7 @@ final class AppIntentsMetadataTaskProducer: PhasedTaskProducer, TaskProducer { } let targetScope = self.targetContext.globalProductPlan.getTargetSettings(dependency).globalScope let machOType = targetScope.evaluate(BuiltinMacros.MACH_O_TYPE) - if machOType == "staticlib" || machOType == "mh_object" { + if machOType == "staticlib" || machOType == "mh_object" { let dependencyMetadataPath = targetScope.evaluate(BuiltinMacros.TARGET_BUILD_DIR) .join(targetScope.evaluate(BuiltinMacros.PRODUCT_MODULE_NAME) + ".appintents") .join("Metadata.appintents/extract.actionsdata") @@ -141,7 +143,6 @@ final class AppIntentsMetadataTaskProducer: PhasedTaskProducer, TaskProducer { let cbc = CommandBuildContext(producer: self.context, scope: scope, inputs: swiftSources + constMetadataFilesToBuild + appShortcutStringsSources, resourcesDir: buildFilesProcessingContext.resourcesDir) - let assistantIntentsStringsSources: [FileToBuild] = self.filterBuildFiles(buildPhaseTarget.resourcesBuildPhase?.buildFiles, identifiers: ["text.plist.strings", "text.json.xcstrings"], buildFilesProcessingContext: buildFilesProcessingContext).filter { ["AssistantIntents.strings", "AssistantIntents.xcstrings"].contains($0.absolutePath.basename) } await self.appendGeneratedTasks(&deferredTasks) { delegate in let shouldConstructAppIntentsMetadataTask = self.context.appIntentsMetadataCompilerSpec.shouldConstructAppIntentsMetadataTask(cbc) @@ -156,10 +157,7 @@ final class AppIntentsMetadataTaskProducer: PhasedTaskProducer, TaskProducer { // Only construct SSU task by default for public SDK clients. Internal default behavior should skip SSU task construction. let isSSUEnabled = scope.evaluate(BuiltinMacros.APP_SHORTCUTS_ENABLE_FLEXIBLE_MATCHING) - if isSSUEnabled && - self.context.settings.platform?.familyName == "iOS" && - self.context.productType?.hasInfoPlist == true && - ((!scope.effectiveInputInfoPlistPath().isEmpty && shouldConstructAppIntentsMetadataTask) || isInstallLoc) { + if isSSUEnabled && self.context.settings.platform?.familyName == "iOS" && self.context.productType?.hasInfoPlist == true && ((!scope.effectiveInputInfoPlistPath().isEmpty && shouldConstructAppIntentsMetadataTask) || isInstallLoc) { var infoPlistSources: [FileToBuild] if isInstallLoc { infoPlistSources = self.filterBuildFiles(buildPhaseTarget.resourcesBuildPhase?.buildFiles, identifiers: ["text.plist.strings", "text.json.xcstrings"], buildFilesProcessingContext: buildFilesProcessingContext).filter { $0.absolutePath.basename.hasSuffix("InfoPlist.strings") || $0.absolutePath.basename.hasSuffix("InfoPlist.xcstrings") } diff --git a/Sources/SWBApplePlatform/AppIntentsSSUTrainingCompiler.swift b/Sources/SWBApplePlatform/AppIntentsSSUTrainingCompiler.swift index 19796296..309108b5 100644 --- a/Sources/SWBApplePlatform/AppIntentsSSUTrainingCompiler.swift +++ b/Sources/SWBApplePlatform/AppIntentsSSUTrainingCompiler.swift @@ -46,21 +46,23 @@ final public class AppIntentsSSUTrainingCompilerSpec: GenericCommandLineToolSpec if cbc.scope.evaluate(BuiltinMacros.BUILD_COMPONENTS).contains("installLoc") { guard cbc.inputs[0].fileType.conformsTo(stringsFileType) || cbc.inputs[0].fileType.conformsTo(xcstringsFileType), - cbc.inputs[0].absolutePath.basename.hasSuffix("InfoPlist.strings") || - cbc.inputs[0].absolutePath.basename.hasSuffix("InfoPlist.xcstrings") else { + cbc.inputs[0].absolutePath.basename.hasSuffix("InfoPlist.strings") || cbc.inputs[0].absolutePath.basename.hasSuffix("InfoPlist.xcstrings") + else { assertionFailure("AppIntents YAML Generation task construction was passed context without InfoPlist.strings file.") return false } } else { guard cbc.inputs[0].fileType.conformsTo(plistFileType), - cbc.inputs[0].absolutePath.basename == "Info.plist" else { + cbc.inputs[0].absolutePath.basename == "Info.plist" + else { assertionFailure("AppIntents YAML Generation task construction was passed context without Info.plist file.") return false } } if cbc.inputs.count == 2 { guard cbc.inputs[1].fileType.conformsTo(stringsFileType) || cbc.inputs[1].fileType.conformsTo(xcstringsFileType), - ["AppShortcuts.strings", "AppShortcuts.xcstrings"].contains(cbc.inputs[1].absolutePath.basename) else { + ["AppShortcuts.strings", "AppShortcuts.xcstrings"].contains(cbc.inputs[1].absolutePath.basename) + else { assertionFailure("AppIntents YAML Generation task construction was passed context without AppShortcuts.strings file.") return false } @@ -92,7 +94,7 @@ final public class AppIntentsSSUTrainingCompilerSpec: GenericCommandLineToolSpec } else { inputNodeIdentifier = "ValidateAppShortcutStringsMetadata \(cbc.inputs[1].absolutePath.str)" } - let inputOrderingNode = delegate.createVirtualNode(inputNodeIdentifier) // Create the NL training task only if AppShortcuts.strings is validated + let inputOrderingNode = delegate.createVirtualNode(inputNodeIdentifier) // Create the NL training task only if AppShortcuts.strings is validated inputs.append(inputOrderingNode) } @@ -118,16 +120,17 @@ final public class AppIntentsSSUTrainingCompilerSpec: GenericCommandLineToolSpec } let commandLine = await commandLineFromTemplate(cbc, delegate, optionContext: discoveredCommandLineToolSpecInfo(cbc.producer, cbc.scope, delegate), lookup: lookup).map(\.asString) - delegate.createTask(type: self, - ruleInfo: defaultRuleInfo(cbc, delegate), - commandLine: commandLine, - environment: environmentFromSpec(cbc, delegate), - workingDirectory: cbc.producer.defaultWorkingDirectory, - inputs: inputs, - outputs: outputs, - action: nil, - execDescription: resolveExecutionDescription(cbc, delegate), - enableSandboxing: enableSandboxing) + delegate.createTask( + type: self, + ruleInfo: defaultRuleInfo(cbc, delegate), + commandLine: commandLine, + environment: environmentFromSpec(cbc, delegate), + workingDirectory: cbc.producer.defaultWorkingDirectory, + inputs: inputs, + outputs: outputs, + action: nil, + execDescription: resolveExecutionDescription(cbc, delegate), + enableSandboxing: enableSandboxing + ) } } - diff --git a/Sources/SWBApplePlatform/AssetCatalogCompiler.swift b/Sources/SWBApplePlatform/AssetCatalogCompiler.swift index a0e1117c..6bb45d7b 100644 --- a/Sources/SWBApplePlatform/AssetCatalogCompiler.swift +++ b/Sources/SWBApplePlatform/AssetCatalogCompiler.swift @@ -16,7 +16,7 @@ public import SWBCore import SWBTaskConstruction public import SWBMacro -public final class ActoolCompilerSpec : GenericCompilerSpec, SpecIdentifierType, IbtoolCompilerSupport, @unchecked Sendable { +public final class ActoolCompilerSpec: GenericCompilerSpec, SpecIdentifierType, IbtoolCompilerSupport, @unchecked Sendable { public static let identifier = "com.apple.compilers.assetcatalog" private func constructAssetPackOutputSpecificationsTask(catalogInputs inputs: [FileToBuild], _ cbc: CommandBuildContext, _ delegate: any TaskGenerationDelegate) async -> Path? { @@ -44,8 +44,7 @@ public final class ActoolCompilerSpec : GenericCompilerSpec, SpecIdentifierType, let data: [UInt8] do { data = try plist.asBytes(.xml) - } - catch { + } catch { delegate.error("failed to format plist for \(path): \(plist): \(error)") return nil } @@ -158,7 +157,7 @@ public final class ActoolCompilerSpec : GenericCompilerSpec, SpecIdentifierType, if let assetPackInfoFile = await constructAssetPackOutputSpecificationsTask(catalogInputs: catalogInputs, cbc, delegate) { specialArgs += [ "--asset-pack-output-specifications", - assetPackInfoFile.str + assetPackInfoFile.str, ] odrInputs.append(delegate.createNode(assetPackInfoFile)) @@ -170,12 +169,14 @@ public final class ActoolCompilerSpec : GenericCompilerSpec, SpecIdentifierType, return cbc.scope.table.namespace.parseLiteralStringList(catalogInputPaths.map({ $0.str })) case BuiltinMacros.ASSETCATALOG_COMPILER_STICKER_PACK_STRINGS: - return cbc.scope.table.namespace.parseLiteralStringList(stringsInputPaths.map({ stringsFile in - let base = stringsFile.absolutePath.basenameWithoutSuffix - let region = stringsFile.regionVariantName ?? "" - let path = stringsFile.absolutePath.str - return [base, region, path].joined(separator: ":") - })) + return cbc.scope.table.namespace.parseLiteralStringList( + stringsInputPaths.map({ stringsFile in + let base = stringsFile.absolutePath.basenameWithoutSuffix + let region = stringsFile.regionVariantName ?? "" + let path = stringsFile.absolutePath.str + return [base, region, path].joined(separator: ":") + }) + ) default: return nil @@ -239,12 +240,14 @@ public final class ActoolCompilerSpec : GenericCompilerSpec, SpecIdentifierType, // which it currently receives a listing of via the assetcatalog_dependencies file produced by actool. let carFiles = [cbc.resourcesDir?.join("Assets.car")].compactMap { $0 }.map(delegate.createNode) - let outputs = evaluatedOutputsResult + (additionalEvaluatedOutputsResult.outputs).map { output in - if let fileTypeIdentifier = output.fileType, let fileType = cbc.producer.lookupFileType(identifier: fileTypeIdentifier) { - delegate.declareOutput(FileToBuild(absolutePath: output.path, fileType: fileType)) + let outputs = + evaluatedOutputsResult + + (additionalEvaluatedOutputsResult.outputs).map { output in + if let fileTypeIdentifier = output.fileType, let fileType = cbc.producer.lookupFileType(identifier: fileTypeIdentifier) { + delegate.declareOutput(FileToBuild(absolutePath: output.path, fileType: fileType)) + } + return delegate.createNode(output.path) } - return delegate.createNode(output.path) - } guard !outputs.isEmpty else { preconditionFailure("ActoolCompilerSpec.constructTasks() invoked with no outputs defined") } let assetSymbolInputs = cbc.inputs @@ -414,13 +417,13 @@ public final class ActoolCompilerSpec : GenericCompilerSpec, SpecIdentifierType, // Only compile asset catalogs during the build action, not installapi or installhdrs. // Never compile asset catalogs in main package targets with synthesized resource bundles since they're compiled in the latter. if buildComponents.contains("build") && !isMainPackageWithResourceBundle { - let variants: [(variant: AssetCatalogVariant, node: PlannedDirectoryTreeNode)] = await [.thinned, .unthinned].asyncMap { variant in - await createAssetCatalogTask(variant: variant).map { (variant, $0) } - }.compactMap { $0 } + let variants: [(variant: AssetCatalogVariant, node: PlannedDirectoryTreeNode)] = await [.thinned, .unthinned].asyncMap { variant in + await createAssetCatalogTask(variant: variant).map { (variant, $0) } + }.compactMap { $0 } - let signaturePath = cbc.scope.evaluate(BuiltinMacros.TARGET_TEMP_DIR).join("assetcatalog_signature") + let signaturePath = cbc.scope.evaluate(BuiltinMacros.TARGET_TEMP_DIR).join("assetcatalog_signature") - delegate.createTask(type: self, ruleInfo: ["LinkAssetCatalogSignature"], commandLine: ["builtin-linkAssetCatalogSignature", signaturePath.str], environment: EnvironmentBindings(), workingDirectory: cbc.producer.defaultWorkingDirectory, inputs: [], outputs: [signaturePath], action: delegate.taskActionCreationDelegate.createLinkAssetCatalogTaskAction(), enableSandboxing: false, alwaysExecuteTask: true, showInLog: false) + delegate.createTask(type: self, ruleInfo: ["LinkAssetCatalogSignature"], commandLine: ["builtin-linkAssetCatalogSignature", signaturePath.str], environment: EnvironmentBindings(), workingDirectory: cbc.producer.defaultWorkingDirectory, inputs: [], outputs: [signaturePath], action: delegate.taskActionCreationDelegate.createLinkAssetCatalogTaskAction(), enableSandboxing: false, alwaysExecuteTask: true, showInLog: false) guard let plistOutputPath = await additionalEvaluatedOutputs(cbc, delegate).generatedInfoPlistContent else { delegate.error("Unable to determine output path for asset catalog Info.plist content") diff --git a/Sources/SWBApplePlatform/AssetCatalogCompilerOutputParser.swift b/Sources/SWBApplePlatform/AssetCatalogCompilerOutputParser.swift index e7413dc2..4ea48586 100644 --- a/Sources/SWBApplePlatform/AssetCatalogCompilerOutputParser.swift +++ b/Sources/SWBApplePlatform/AssetCatalogCompilerOutputParser.swift @@ -13,7 +13,7 @@ public import SWBCore public import SWBUtil -public final class AssetCatalogCompilerOutputParser : GenericOutputParser { +public final class AssetCatalogCompilerOutputParser: GenericOutputParser { /// Regex to extract location information from a diagnostic prefix (capture group 0 is the name, 1 is the line number or the object identifier). static let locRegex = RegEx(patternLiteral: "^([^:]+):(?:([^:]+):)? +$") diff --git a/Sources/SWBApplePlatform/CoreDataCompiler.swift b/Sources/SWBApplePlatform/CoreDataCompiler.swift index 9e3bc3ea..99cab72c 100644 --- a/Sources/SWBApplePlatform/CoreDataCompiler.swift +++ b/Sources/SWBApplePlatform/CoreDataCompiler.swift @@ -14,7 +14,7 @@ import SWBUtil public import SWBCore import SWBMacro -public final class CoreDataModelCompilerSpec : GenericCompilerSpec, SpecIdentifierType, @unchecked Sendable { +public final class CoreDataModelCompilerSpec: GenericCompilerSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier = "com.apple.compilers.model.coredata" public override var supportsInstallHeaders: Bool { diff --git a/Sources/SWBApplePlatform/CoreMLCompiler.swift b/Sources/SWBApplePlatform/CoreMLCompiler.swift index 1fd193d2..2fdb0e20 100644 --- a/Sources/SWBApplePlatform/CoreMLCompiler.swift +++ b/Sources/SWBApplePlatform/CoreMLCompiler.swift @@ -92,23 +92,23 @@ fileprivate enum CoreMLIndexingInfo: Serializable, SourceFileIndexingInfo, Encod var dict = [String: PropertyListItem]() switch self { - case .success(let generatedFilePaths, let languageToGenerate, let notice): - if let generatedFilePaths { - dict["COREMLCOMPILER_GENERATED_FILE_PATHS"] = PropertyListItem(generatedFilePaths.map({ $0.str })) - } - dict["COREMLCOMPILER_LANGUAGE_TO_GENERATE"] = PropertyListItem(languageToGenerate) - if let notice { - dict["COREMLCOMPILER_GENERATOR_NOTICE"] = PropertyListItem(notice) - } - case .failure(let error): - dict["COREMLCOMPILER_GENERATOR_ERROR"] = PropertyListItem(error) + case .success(let generatedFilePaths, let languageToGenerate, let notice): + if let generatedFilePaths { + dict["COREMLCOMPILER_GENERATED_FILE_PATHS"] = PropertyListItem(generatedFilePaths.map({ $0.str })) + } + dict["COREMLCOMPILER_LANGUAGE_TO_GENERATE"] = PropertyListItem(languageToGenerate) + if let notice { + dict["COREMLCOMPILER_GENERATOR_NOTICE"] = PropertyListItem(notice) + } + case .failure(let error): + dict["COREMLCOMPILER_GENERATOR_ERROR"] = PropertyListItem(error) } return .plDict(dict) } } -public final class CoreMLCompilerSpec : GenericCompilerSpec, SpecIdentifierType, @unchecked Sendable { +public final class CoreMLCompilerSpec: GenericCompilerSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier = "com.apple.compilers.coreml" public override var supportsInstallHeaders: Bool { @@ -178,8 +178,7 @@ public final class CoreMLCompilerSpec : GenericCompilerSpec, SpecIdentifierType, delegate.warning("\(input.absolutePath.basename): Target's predominant language \"\(languageString)\" is not supported for CoreML code generation. Selecting Objective-C by default. Set COREML_CODEGEN_LANGUAGE to preferred language.") codegenLanguage = "Objective-C" } - } - else { + } else { guard Set(["Swift", "Objective-C", "None"]).contains(languageSettingValue) else { // If the setting is set to an unexpected value, then emit an error and return. delegate.error("\(input.absolutePath.basename): COREML_CODEGEN_LANGUAGE set to unsupported language \"\(languageSettingValue)\". Set COREML_CODEGEN_LANGUAGE to preferred language.") @@ -227,10 +226,10 @@ public final class CoreMLCompilerSpec : GenericCompilerSpec, SpecIdentifierType, // Support .mlpackage let inputs = cbc.inputs.map { input -> (any PlannedNode) in - if input.fileType.isWrapper { - return delegate.createDirectoryTreeNode(input.absolutePath) - } - return delegate.createNode(input.absolutePath) + if input.fileType.isWrapper { + return delegate.createDirectoryTreeNode(input.absolutePath) + } + return delegate.createNode(input.absolutePath) } let ruleInfo = ["CoreMLModelCodegen", input.absolutePath.str] @@ -296,7 +295,8 @@ public final class CoreMLCompilerSpec : GenericCompilerSpec, SpecIdentifierType, guard let target = cbc.producer.configuredTarget?.target as? SWBCore.BuildPhaseTarget, let outputPath = headerOutputPath, - target.headersBuildPhase != nil else { continue } + target.headersBuildPhase != nil + else { continue } await cbc.producer.copySpec.constructCopyTasks(CommandBuildContext(producer: cbc.producer, scope: cbc.scope, inputs: [outputFile], output: outputPath, preparesForIndexing: true), delegate, additionalTaskOrderingOptions: .compilationRequirement) } diff --git a/Sources/SWBApplePlatform/DittoTool.swift b/Sources/SWBApplePlatform/DittoTool.swift index 0a765c9a..a05fa1e7 100644 --- a/Sources/SWBApplePlatform/DittoTool.swift +++ b/Sources/SWBApplePlatform/DittoTool.swift @@ -12,7 +12,7 @@ import SWBCore -final class DittoToolSpec : CommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { +final class DittoToolSpec: CommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { static let identifier = "com.apple.tools.ditto" required convenience init(_ parser: SpecParser, _ basedOnSpec: Spec?) { diff --git a/Sources/SWBApplePlatform/EXUtil.swift b/Sources/SWBApplePlatform/EXUtil.swift index 86fb6679..4203c83e 100644 --- a/Sources/SWBApplePlatform/EXUtil.swift +++ b/Sources/SWBApplePlatform/EXUtil.swift @@ -27,14 +27,13 @@ final class ExtensionPointExtractorSpec: GenericCommandLineToolSpec, SpecIdentif let isAppProductType = productType?.conformsTo(identifier: "com.apple.product-type.application") ?? false let extensionPointExtractorEnabled = scope.evaluate(BuiltinMacros.EX_ENABLE_EXTENSION_POINT_GENERATION) - let result = ( - isBuild - && isNormalVariant - && extensionPointExtractorEnabled - && !indexEnableBuildArena - && isAppProductType - && isApplePlatform - ) + let result = + (isBuild + && isNormalVariant + && extensionPointExtractorEnabled + && !indexEnableBuildArena + && isAppProductType + && isApplePlatform) return result } @@ -55,16 +54,18 @@ final class ExtensionPointExtractorSpec: GenericCommandLineToolSpec, SpecIdentif let commandLine = await commandLineFromTemplate(cbc, delegate, optionContext: discoveredCommandLineToolSpecInfo(cbc.producer, cbc.scope, delegate)).map(\.asString) - delegate.createTask(type: self, - ruleInfo: defaultRuleInfo(cbc, delegate), - commandLine: commandLine, - environment: environmentFromSpec(cbc, delegate), - workingDirectory: cbc.producer.defaultWorkingDirectory, - inputs: inputs, - outputs: outputs, - action: nil, - execDescription: resolveExecutionDescription(cbc, delegate), - enableSandboxing: enableSandboxing) + delegate.createTask( + type: self, + ruleInfo: defaultRuleInfo(cbc, delegate), + commandLine: commandLine, + environment: environmentFromSpec(cbc, delegate), + workingDirectory: cbc.producer.defaultWorkingDirectory, + inputs: inputs, + outputs: outputs, + action: nil, + execDescription: resolveExecutionDescription(cbc, delegate), + enableSandboxing: enableSandboxing + ) } } @@ -79,12 +80,13 @@ final class AppExtensionPlistGeneratorSpec: GenericCommandLineToolSpec, SpecIden let isAppExtensionProductType = productType?.conformsTo(identifier: "com.apple.product-type.extensionkit-extension") ?? false let extensionPointAttributesGenerationEnabled = !scope.evaluate(BuiltinMacros.EX_DISABLE_APPEXTENSION_ATTRIBUTES_GENERATION) - let result = ( isBuild - && isNormalVariant - && extensionPointAttributesGenerationEnabled - && !indexEnableBuildArena - && (isAppExtensionProductType) - && isApplePlatform ) + let result = + (isBuild + && isNormalVariant + && extensionPointAttributesGenerationEnabled + && !indexEnableBuildArena + && (isAppExtensionProductType) + && isApplePlatform) return result } @@ -106,19 +108,19 @@ final class AppExtensionPlistGeneratorSpec: GenericCommandLineToolSpec, SpecIden let outputPath = cbc.output outputs.append(delegate.createNode(outputPath)) - let commandLine = await commandLineFromTemplate(cbc, delegate, optionContext: discoveredCommandLineToolSpecInfo(cbc.producer, cbc.scope, delegate)).map(\.asString) - delegate.createTask(type: self, - ruleInfo: defaultRuleInfo(cbc, delegate), - commandLine: commandLine, - environment: environmentFromSpec(cbc, delegate), - workingDirectory: cbc.producer.defaultWorkingDirectory, - inputs: inputs, - outputs: outputs, - action: nil, - execDescription: resolveExecutionDescription(cbc, delegate), - enableSandboxing: enableSandboxing + delegate.createTask( + type: self, + ruleInfo: defaultRuleInfo(cbc, delegate), + commandLine: commandLine, + environment: environmentFromSpec(cbc, delegate), + workingDirectory: cbc.producer.defaultWorkingDirectory, + inputs: inputs, + outputs: outputs, + action: nil, + execDescription: resolveExecutionDescription(cbc, delegate), + enableSandboxing: enableSandboxing ) } } diff --git a/Sources/SWBApplePlatform/ExtensionPointExtractorTaskProducer.swift b/Sources/SWBApplePlatform/ExtensionPointExtractorTaskProducer.swift index e842bdb9..ff0d9782 100644 --- a/Sources/SWBApplePlatform/ExtensionPointExtractorTaskProducer.swift +++ b/Sources/SWBApplePlatform/ExtensionPointExtractorTaskProducer.swift @@ -33,8 +33,9 @@ final class ExtensionPointExtractorTaskProducer: PhasedTaskProducer, TaskProduce return fileTypes.flatMap { fileType in buildFiles.compactMap { buildFile in guard let resolvedBuildFileInfo = try? self.context.resolveBuildFileReference(buildFile), - !buildFilesProcessingContext.isExcluded(resolvedBuildFileInfo.absolutePath, filters: buildFile.platformFilters), - resolvedBuildFileInfo.fileType.conformsTo(fileType) else { + !buildFilesProcessingContext.isExcluded(resolvedBuildFileInfo.absolutePath, filters: buildFile.platformFilters), + resolvedBuildFileInfo.fileType.conformsTo(fileType) + else { return nil } @@ -78,7 +79,7 @@ final class ExtensionPointExtractorTaskProducer: PhasedTaskProducer, TaskProduce let cbc = CommandBuildContext(producer: self.context, scope: scope, inputs: inputs, resourcesDir: buildFilesProcessingContext.resourcesDir) await self.appendGeneratedTasks(&deferredTasks) { delegate in let domain = self.context.settings.platform?.name ?? "" - guard let spec = self.context.specRegistry.getSpec("com.apple.compilers.extract-appextensionpoints", domain:domain) as? ExtensionPointExtractorSpec else { + guard let spec = self.context.specRegistry.getSpec("com.apple.compilers.extract-appextensionpoints", domain: domain) as? ExtensionPointExtractorSpec else { return } await spec.constructTasks(cbc, delegate) @@ -90,7 +91,6 @@ final class ExtensionPointExtractorTaskProducer: PhasedTaskProducer, TaskProduce } } - final class AppExtensionInfoPlistGeneratorTaskProducer: PhasedTaskProducer, TaskProducer { override var defaultTaskOrderingOptions: TaskOrderingOptions { @@ -109,8 +109,9 @@ final class AppExtensionInfoPlistGeneratorTaskProducer: PhasedTaskProducer, Task return fileTypes.flatMap { fileType in buildFiles.compactMap { buildFile in guard let resolvedBuildFileInfo = try? self.context.resolveBuildFileReference(buildFile), - !buildFilesProcessingContext.isExcluded(resolvedBuildFileInfo.absolutePath, filters: buildFile.platformFilters), - resolvedBuildFileInfo.fileType.conformsTo(fileType) else { + !buildFilesProcessingContext.isExcluded(resolvedBuildFileInfo.absolutePath, filters: buildFile.platformFilters), + resolvedBuildFileInfo.fileType.conformsTo(fileType) + else { return nil } @@ -157,7 +158,7 @@ final class AppExtensionInfoPlistGeneratorTaskProducer: PhasedTaskProducer, Task await self.appendGeneratedTasks(&deferredTasks) { delegate in let domain = self.context.settings.platform?.name ?? "" - guard let spec = self.context.specRegistry.getSpec("com.apple.compilers.appextension-plist-generator",domain: domain) as? AppExtensionPlistGeneratorSpec else { + guard let spec = self.context.specRegistry.getSpec("com.apple.compilers.appextension-plist-generator", domain: domain) as? AppExtensionPlistGeneratorSpec else { return } await spec.constructTasks(cbc, delegate) diff --git a/Sources/SWBApplePlatform/ImageScaleFactorsInputFileGroupingStrategy.swift b/Sources/SWBApplePlatform/ImageScaleFactorsInputFileGroupingStrategy.swift index a9382539..01d0782e 100644 --- a/Sources/SWBApplePlatform/ImageScaleFactorsInputFileGroupingStrategy.swift +++ b/Sources/SWBApplePlatform/ImageScaleFactorsInputFileGroupingStrategy.swift @@ -16,7 +16,7 @@ import SWBUtil // FIXME: Presently we have no good way during grouping to detect whether we're matching a single file against tiffutil or multiple files against tiffutil. In principle we could handle that in FilesBasedBuildPhaseTaskProducer once all grouping is completed. But at present this is handled in a hackier manner in TiffUtilToolSpec.constructTasks() - see the FIXME comment therein for more about this. // /// A grouping strategy that groups image files containing scale factors of the form "@x" at the end of their base names. -@_spi(Testing) public final class ImageScaleFactorsInputFileGroupingStrategy : InputFileGroupingStrategy { +@_spi(Testing) public final class ImageScaleFactorsInputFileGroupingStrategy: InputFileGroupingStrategy { /// Name of the tool to which the grouping strategy belongs (used as a part of the returned group identifier). let toolName: String diff --git a/Sources/SWBApplePlatform/InstrumentsPackageBuilderSpec.swift b/Sources/SWBApplePlatform/InstrumentsPackageBuilderSpec.swift index cc0d38c9..65e3af23 100644 --- a/Sources/SWBApplePlatform/InstrumentsPackageBuilderSpec.swift +++ b/Sources/SWBApplePlatform/InstrumentsPackageBuilderSpec.swift @@ -22,9 +22,11 @@ public final class InstrumentsPackageBuilderSpec: GenericCompilerSpec, SpecIdent // Copy the cbc with an additional virtual node as an output for postprocessing mutating tasks to use to find this task. var orderingOutputs: [any PlannedNode] = [] - orderingOutputs.append(contentsOf: evaluatedOutputs(cbc, delegate)?.map({ output in - delegate.createVirtualNode("BuildInstrumentsPackage \(output.path.str)") - }) ?? []) + orderingOutputs.append( + contentsOf: evaluatedOutputs(cbc, delegate)?.map({ output in + delegate.createVirtualNode("BuildInstrumentsPackage \(output.path.str)") + }) ?? [] + ) let dependencyData: DependencyDataStyle? let infoFilePath = cbc.scope.evaluate(BuiltinMacros.INSTRUMENTS_PACKAGE_BUILDER_DEPENDENCY_INFO_FILE) diff --git a/Sources/SWBApplePlatform/IntentsCompiler.swift b/Sources/SWBApplePlatform/IntentsCompiler.swift index ccffd2ff..9fbf1ce8 100644 --- a/Sources/SWBApplePlatform/IntentsCompiler.swift +++ b/Sources/SWBApplePlatform/IntentsCompiler.swift @@ -92,7 +92,7 @@ fileprivate enum IntentsIndexingInfo: Serializable, SourceFileIndexingInfo, Enco } } -public final class IntentsCompilerSpec : GenericCompilerSpec, SpecIdentifierType, @unchecked Sendable { +public final class IntentsCompilerSpec: GenericCompilerSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier = "com.apple.compilers.intents" public override var supportsInstallAPI: Bool { @@ -130,7 +130,8 @@ public final class IntentsCompilerSpec : GenericCompilerSpec, SpecIdentifierType guard let target = cbc.producer.configuredTarget?.target as? SWBCore.BuildPhaseTarget, target.sourcesBuildPhase != nil, - let intentsCodegenVisibility = input.intentsCodegenVisibility else { return } + let intentsCodegenVisibility = input.intentsCodegenVisibility + else { return } if case .noCodegen = intentsCodegenVisibility { return @@ -244,7 +245,8 @@ public final class IntentsCompilerSpec : GenericCompilerSpec, SpecIdentifierType guard let target = cbc.producer.configuredTarget?.target as? SWBCore.BuildPhaseTarget, let outputPath = headerOutputPath, - target.headersBuildPhase != nil else { continue } + target.headersBuildPhase != nil + else { continue } await cbc.producer.copySpec.constructCopyTasks(CommandBuildContext(producer: cbc.producer, scope: cbc.scope, inputs: [outputFile], output: outputPath, preparesForIndexing: true), delegate, additionalTaskOrderingOptions: .compilationRequirement) } diff --git a/Sources/SWBApplePlatform/InterfaceBuilderCompiler.swift b/Sources/SWBApplePlatform/InterfaceBuilderCompiler.swift index b8a181bf..c5433f50 100644 --- a/Sources/SWBApplePlatform/InterfaceBuilderCompiler.swift +++ b/Sources/SWBApplePlatform/InterfaceBuilderCompiler.swift @@ -15,7 +15,7 @@ public import SWBCore public import SWBMacro import Synchronization -public class IbtoolCompilerSpec : GenericCompilerSpec, IbtoolCompilerSupport, @unchecked Sendable { +public class IbtoolCompilerSpec: GenericCompilerSpec, IbtoolCompilerSupport, @unchecked Sendable { /// The info object collects information across the build phase so that an ibtool task doesn't try to produce a ~device output which is already being explicitly produced from another input. private final class BuildPhaseInfo: BuildPhaseInfoForToolSpec, Sendable { let allInputFilenames = SWBMutex>([]) @@ -26,7 +26,7 @@ public class IbtoolCompilerSpec : GenericCompilerSpec, IbtoolCompilerSupport, @u guard ftb.fileType.identifier == "file.xib" else { return } - allInputFilenames.withLock{ $0.insert(ftb.absolutePath.basenameWithoutSuffix) } + allInputFilenames.withLock { $0.insert(ftb.absolutePath.basenameWithoutSuffix) } } func filterOutputFiles(_ outputs: [any PlannedNode], inputs: [Path]) -> [any PlannedNode] { @@ -171,7 +171,8 @@ public final class IbtoolCompilerSpecStoryboard: IbtoolCompilerSpec, SpecIdentif sandboxDirectory: cbc.scope.evaluate(BuiltinMacros.TEMP_SANDBOX_DIR), extraSandboxSubdirectories: [], developerDirectory: cbc.scope.evaluate(BuiltinMacros.DEVELOPER_DIR), - casOptions: casOptions) + casOptions: casOptions + ) } else { return nil } diff --git a/Sources/SWBApplePlatform/InterfaceBuilderCompilerOutputParser.swift b/Sources/SWBApplePlatform/InterfaceBuilderCompilerOutputParser.swift index 89c34d05..6dbbc592 100644 --- a/Sources/SWBApplePlatform/InterfaceBuilderCompilerOutputParser.swift +++ b/Sources/SWBApplePlatform/InterfaceBuilderCompilerOutputParser.swift @@ -13,7 +13,7 @@ public import SWBUtil public import SWBCore -public final class InterfaceBuilderCompilerOutputParser : GenericOutputParser { +public final class InterfaceBuilderCompilerOutputParser: GenericOutputParser { /// Regex to extract location information from a diagnostic prefix (capture group 0 is the name, 1 is the line number or the object identifier). static let locRegex = RegEx(patternLiteral: "^([^:]+):(?:([^:]+):)? .*$") diff --git a/Sources/SWBApplePlatform/MetalCompiler.swift b/Sources/SWBApplePlatform/MetalCompiler.swift index 6b5f1631..d4671ffb 100644 --- a/Sources/SWBApplePlatform/MetalCompiler.swift +++ b/Sources/SWBApplePlatform/MetalCompiler.swift @@ -40,13 +40,15 @@ struct MetalSourceFileIndexingInfo: SourceFileIndexingInfo { /// The indexing info is packaged and sent to the client in the property list format defined here. public var propertyListItem: PropertyListItem { - return .plDict([ - "outputFilePath": .plString(outputFile.str), - "LanguageDialect": .plString("metal"), - "metalASTCommandArguments": .plArray(commandLine.map { .plString($0.asString) }), - "metalASTBuiltProductsDir": .plString(builtProductsDir.str), - "toolchains": .plArray(toolchains.map {.plString($0)}) - ] as [String: PropertyListItem]) + return .plDict( + [ + "outputFilePath": .plString(outputFile.str), + "LanguageDialect": .plString("metal"), + "metalASTCommandArguments": .plArray(commandLine.map { .plString($0.asString) }), + "metalASTBuiltProductsDir": .plString(builtProductsDir.str), + "toolchains": .plArray(toolchains.map { .plString($0) }), + ] as [String: PropertyListItem] + ) } } @@ -64,11 +66,13 @@ fileprivate struct MetalIndexingPayload: Serializable, Encodable { let workingDir: Path let toolchains: [String] - init(sourceFileIndex: Int, - outputFileIndex: Int, - builtProductsDir: Path, - workingDir: Path, - toolchains: [String]) { + init( + sourceFileIndex: Int, + outputFileIndex: Int, + builtProductsDir: Path, + workingDir: Path, + toolchains: [String] + ) { self.sourceFileIndex = sourceFileIndex self.outputFileIndex = outputFileIndex self.builtProductsDir = builtProductsDir @@ -126,7 +130,7 @@ fileprivate struct MetalTaskPayload: TaskPayload, Encodable { } } -public final class MetalCompilerSpec : GenericCompilerSpec, SpecIdentifierType, @unchecked Sendable { +public final class MetalCompilerSpec: GenericCompilerSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier = "com.apple.compilers.metal" public override func constructTasks(_ cbc: CommandBuildContext, _ delegate: any TaskGenerationDelegate) async { @@ -156,7 +160,7 @@ public final class MetalCompilerSpec : GenericCompilerSpec, SpecIdentifierType, outputFileIndex: outputFileIndex, builtProductsDir: cbc.scope.evaluate(BuiltinMacros.BUILT_PRODUCTS_DIR), workingDir: cbc.scope.evaluate(BuiltinMacros.PROJECT_DIR), - toolchains: cbc.producer.toolchains.map{ $0.identifier } + toolchains: cbc.producer.toolchains.map { $0.identifier } ) }() @@ -165,7 +169,8 @@ public final class MetalCompilerSpec : GenericCompilerSpec, SpecIdentifierType, // Create the task payload. let payload = MetalTaskPayload( serializedDiagnosticsPath: diagFilePath, - indexingPayload: indexingPayload) + indexingPayload: indexingPayload + ) await super.constructTasks(cbc, delegate, specialArgs: [], payload: payload, commandLine: commandLine, additionalTaskOrderingOptions: [.compilationForIndexableSourceFile], toolLookup: nil) } @@ -206,6 +211,6 @@ public final class MetalCompilerSpec : GenericCompilerSpec, SpecIdentifierType, override public var payloadType: (any TaskPayload.Type)? { return MetalTaskPayload.self } } -public final class MetalLinkerSpec : GenericCompilerSpec, SpecIdentifierType, @unchecked Sendable { +public final class MetalLinkerSpec: GenericCompilerSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier = "com.apple.compilers.metal-linker" } diff --git a/Sources/SWBApplePlatform/MiGCompiler.swift b/Sources/SWBApplePlatform/MiGCompiler.swift index 7d36416d..4ecd963d 100644 --- a/Sources/SWBApplePlatform/MiGCompiler.swift +++ b/Sources/SWBApplePlatform/MiGCompiler.swift @@ -20,7 +20,7 @@ public struct DiscoveredMiGToolSpecInfo: DiscoveredCommandLineToolSpecInfo { public var toolVersion: Version? } -public final class MigCompilerSpec : CompilerSpec, SpecIdentifierType, @unchecked Sendable { +public final class MigCompilerSpec: CompilerSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier = "com.apple.compilers.mig" required override init(_ parser: SpecParser, _ basedOnSpec: Spec?, isGeneric: Bool) { diff --git a/Sources/SWBApplePlatform/OpenCLCompiler.swift b/Sources/SWBApplePlatform/OpenCLCompiler.swift index 00e3913b..6163f651 100644 --- a/Sources/SWBApplePlatform/OpenCLCompiler.swift +++ b/Sources/SWBApplePlatform/OpenCLCompiler.swift @@ -14,7 +14,7 @@ import SWBUtil import SWBMacro import SWBCore -final class OpenCLCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatibleCompilerCommandLineBuilder, @unchecked Sendable { +final class OpenCLCompilerSpec: CompilerSpec, SpecIdentifierType, GCCCompatibleCompilerCommandLineBuilder, @unchecked Sendable { static let identifier = "com.apple.compilers.opencl" private let openCLOutputs: [MacroStringExpression]? @@ -51,7 +51,7 @@ final class OpenCLCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatible let openclc = scope.evaluate(BuiltinMacros.OPENCLC) let compilerVersionFlag = "-cl-std=" + scope.evaluate(BuiltinMacros.OPENCL_COMPILER_VERSION) - let preprocessorDefinitionsFlags = scope.evaluate(BuiltinMacros.OPENCL_PREPROCESSOR_DEFINITIONS).map{ "-D" + $0 } + let preprocessorDefinitionsFlags = scope.evaluate(BuiltinMacros.OPENCL_PREPROCESSOR_DEFINITIONS).map { "-D" + $0 } let headerSearchPaths = GCCCompatibleCompilerSpecSupport.headerSearchPathArguments(cbc.producer, scope, usesModules: scope.evaluate(BuiltinMacros.CLANG_ENABLE_MODULES)) let headerSearchPathFlags = headerSearchPaths.searchPathArguments(for: self, scope: scope) let frameworkSearchPaths = GCCCompatibleCompilerSpecSupport.frameworkSearchPathArguments(cbc.producer, scope) @@ -71,7 +71,7 @@ final class OpenCLCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatible var commandLine = [await resolveExecutablePath(cbc, Path(openclc), delegate: delegate).str] commandLine += ["-x", "cl", compilerVersionFlag] - optimizationLevelFlag.map{ commandLine.append($0) } + optimizationLevelFlag.map { commandLine.append($0) } commandLine += preprocessorDefinitionsFlags commandLine += headerSearchPathFlags commandLine += frameworkSearchPathFlags @@ -114,7 +114,6 @@ final class OpenCLCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatible } commandLine.append(scope.evaluate(BuiltinMacros.OPENCL_AUTO_VECTORIZE_ENABLE) ? "-cl-auto-vectorize-enable" : "-cl-auto-vectorize-disable") - let bundleIdentifier = scope.evaluate(BuiltinMacros.PRODUCT_BUNDLE_IDENTIFIER) if !bundleIdentifier.isEmpty { commandLine += ["-gcl-bc-bundle-identifier", bundleIdentifier] diff --git a/Sources/SWBApplePlatform/Plugin.swift b/Sources/SWBApplePlatform/Plugin.swift index 5606fb08..1ab6b5eb 100644 --- a/Sources/SWBApplePlatform/Plugin.swift +++ b/Sources/SWBApplePlatform/Plugin.swift @@ -46,9 +46,11 @@ struct TaskProducersExtension: TaskProducerExtension { } var unorderedPostSetupTaskProducers: [any TaskProducerFactory] { - [StubBinaryTaskProducerFactory(), - AppExtensionInfoPlistGeneratorTaskProducerFactory(), - ExtensionPointExtractorTaskProducerFactory()] + [ + StubBinaryTaskProducerFactory(), + AppExtensionInfoPlistGeneratorTaskProducerFactory(), + ExtensionPointExtractorTaskProducerFactory(), + ] } var unorderedPostBuildPhasesTaskProducers: [any TaskProducerFactory] { @@ -99,7 +101,7 @@ struct StubBinaryTaskProducerFactory: TaskProducerFactory, GlobalTaskProducerFac } } -struct AppIntentsMetadataTaskProducerFactory : TaskProducerFactory { +struct AppIntentsMetadataTaskProducerFactory: TaskProducerFactory { var name: String { "AppIntentsMetadataTaskProducer" } @@ -154,7 +156,7 @@ struct ApplePlatformSpecsExtension: SpecificationsExtension { findResourceBundle(nameWhenInstalledInToolchain: "SwiftBuild_SWBApplePlatform", resourceSearchPaths: resourceSearchPaths, defaultBundle: Bundle.module) } - func specificationDomains() -> [String : [String]] { + func specificationDomains() -> [String: [String]] { var mappings = [ "macosx": ["darwin"], "driverkit": ["darwin"], @@ -246,29 +248,29 @@ struct ApplePlatformInfoExtension: PlatformInfoExtension { } struct AppleSettingsBuilderExtension: SettingsBuilderExtension { - func addSDKSettings(_ sdk: SDK, _ variant: SDKVariant?, _ sparseSDKs: [SDK]) throws -> [String : String] { + func addSDKSettings(_ sdk: SDK, _ variant: SDKVariant?, _ sparseSDKs: [SDK]) throws -> [String: String] { guard variant?.llvmTargetTripleVendor == "apple" else { return [:] } return [ - "PER_ARCH_MODULE_FILE_DIR": "$(PER_ARCH_OBJECT_FILE_DIR)", + "PER_ARCH_MODULE_FILE_DIR": "$(PER_ARCH_OBJECT_FILE_DIR)" ] } - func addBuiltinDefaults(fromEnvironment environment: [String : String], parameters: BuildParameters) throws -> [String : String] { + func addBuiltinDefaults(fromEnvironment environment: [String: String], parameters: BuildParameters) throws -> [String: String] { let appIntentsProtocols = "AppIntent EntityQuery AppEntity TransientEntity AppEnum AppShortcutProviding AppShortcutsProvider AnyResolverProviding AppIntentsPackage DynamicOptionsProvider _IntentValueRepresentable _AssistantIntentsProvider _GenerativeFunctionExtractable IntentValueQuery Resolver" let extensionKitProtocols = "AppExtension ExtensionPointDefining" let constValueProtocols = [appIntentsProtocols, extensionKitProtocols].joined(separator: " ") - return ["SWIFT_EMIT_CONST_VALUE_PROTOCOLS" : constValueProtocols] + return ["SWIFT_EMIT_CONST_VALUE_PROTOCOLS": constValueProtocols] } - func addOverrides(fromEnvironment: [String : String], parameters: BuildParameters) throws -> [String : String] { [:] } - func addProductTypeDefaults(productType: ProductTypeSpec) -> [String : String] { [:] } - func addSDKOverridingSettings(_ sdk: SDK, _ variant: SDKVariant?, _ sparseSDKs: [SDK], specLookupContext: any SWBCore.SpecLookupContext, environment: [String: String]) throws -> [String : String] { [:] } - func addPlatformSDKSettings(_ platform: SWBCore.Platform?, _ sdk: SDK, _ sdkVariant: SDKVariant?) -> [String : String] { [:] } + func addOverrides(fromEnvironment: [String: String], parameters: BuildParameters) throws -> [String: String] { [:] } + func addProductTypeDefaults(productType: ProductTypeSpec) -> [String: String] { [:] } + func addSDKOverridingSettings(_ sdk: SDK, _ variant: SDKVariant?, _ sparseSDKs: [SDK], specLookupContext: any SWBCore.SpecLookupContext, environment: [String: String]) throws -> [String: String] { [:] } + func addPlatformSDKSettings(_ platform: SWBCore.Platform?, _ sdk: SDK, _ sdkVariant: SDKVariant?) -> [String: String] { [:] } func xcconfigOverrideData(fromParameters: BuildParameters) -> ByteString { ByteString() } func getTargetTestingSwiftPluginFlags(_ scope: MacroEvaluationScope, toolchainRegistry: ToolchainRegistry, sdkRegistry: SDKRegistry, activeRunDestination: RunDestinationInfo?, project: SWBCore.Project?) -> [String] { [] } func shouldSkipPopulatingValidArchs(platform: SWBCore.Platform, sdk: SDK?) -> Bool { false } func shouldDisableXOJITPreviews(platformName: String, sdk: SDK?) -> Bool { false } - func overridingBuildSettings(_: MacroEvaluationScope, platform: SWBCore.Platform?, productType: ProductTypeSpec) -> [String : String] { [:] } + func overridingBuildSettings(_: MacroEvaluationScope, platform: SWBCore.Platform?, productType: ProductTypeSpec) -> [String: String] { [:] } } diff --git a/Sources/SWBApplePlatform/RealityAssetsCompilerSpec.swift b/Sources/SWBApplePlatform/RealityAssetsCompilerSpec.swift index e41cd491..d7039ffa 100644 --- a/Sources/SWBApplePlatform/RealityAssetsCompilerSpec.swift +++ b/Sources/SWBApplePlatform/RealityAssetsCompilerSpec.swift @@ -87,7 +87,7 @@ package final class RealityAssetsCompilerSpec: GenericCompilerSpec, SpecIdentifi let baseCommandLine = await commandLineFromTemplate(cbc, delegate, optionContext: discoveredCommandLineToolSpecInfo(cbc.producer, cbc.scope, delegate)).map(\.asString) // commandline from template is not usable...create our own for create-schema - var commandLine: [String] = [baseCommandLine[0]] // get executable from template + var commandLine: [String] = [baseCommandLine[0]] // get executable from template commandLine.append("create-schema") commandLine.append("--output-schema") commandLine.append(outputFile) @@ -100,16 +100,18 @@ package final class RealityAssetsCompilerSpec: GenericCompilerSpec, SpecIdentifi let inputs = [targetWithDependenciesPath] + moduleSwiftFilesPaths + dependencySwiftFilesPaths let ruleInfo = ["RealityAssetsSchemaGen", outputFile] - delegate.createTask(type: self, - ruleInfo: ruleInfo, - commandLine: commandLine, - environment: environmentBindings(cbc, delegate), - workingDirectory: cbc.producer.defaultWorkingDirectory, - inputs: inputs, - outputs: [outputPath], - execDescription: "Generate Reality Asset USD schema", - preparesForIndexing: true, - enableSandboxing: true) + delegate.createTask( + type: self, + ruleInfo: ruleInfo, + commandLine: commandLine, + environment: environmentBindings(cbc, delegate), + workingDirectory: cbc.producer.defaultWorkingDirectory, + inputs: inputs, + outputs: [outputPath], + execDescription: "Generate Reality Asset USD schema", + preparesForIndexing: true, + enableSandboxing: true + ) } private func constructRealityAssetCompilerTasks(_ cbc: CommandBuildContext, _ delegate: any TaskGenerationDelegate) async { @@ -193,26 +195,27 @@ package final class RealityAssetsCompilerSpec: GenericCompilerSpec, SpecIdentifi cachingEnabled = false } - let ruleInfo = ["RealityAssetsCompile", cbc.output.str] - delegate.createTask(type: self, - dependencyData: nil, - payload: nil, - ruleInfo: ruleInfo, - additionalSignatureData: "", - commandLine: commandLine, - additionalOutput: [], - environment: environmentBindings(cbc, delegate), - workingDirectory: cbc.producer.defaultWorkingDirectory, - inputs: inputs, - outputs: outputs, - mustPrecede: [], - action: action, - execDescription: "Compile Reality Asset \(rkAssetsPath.basename)", - preparesForIndexing: true, - enableSandboxing: !cachingEnabled, - llbuildControlDisabled: false, - additionalTaskOrderingOptions: []) + delegate.createTask( + type: self, + dependencyData: nil, + payload: nil, + ruleInfo: ruleInfo, + additionalSignatureData: "", + commandLine: commandLine, + additionalOutput: [], + environment: environmentBindings(cbc, delegate), + workingDirectory: cbc.producer.defaultWorkingDirectory, + inputs: inputs, + outputs: outputs, + mustPrecede: [], + action: action, + execDescription: "Compile Reality Asset \(rkAssetsPath.basename)", + preparesForIndexing: true, + enableSandboxing: !cachingEnabled, + llbuildControlDisabled: false, + additionalTaskOrderingOptions: [] + ) } public func constructTasks(_ cbc: CommandBuildContext, _ delegate: any TaskGenerationDelegate, moduleWithDependencies: ModuleWithDependencies) async { diff --git a/Sources/SWBApplePlatform/RealityAssetsTaskProducer.swift b/Sources/SWBApplePlatform/RealityAssetsTaskProducer.swift index 230f330f..6b653bf7 100644 --- a/Sources/SWBApplePlatform/RealityAssetsTaskProducer.swift +++ b/Sources/SWBApplePlatform/RealityAssetsTaskProducer.swift @@ -50,8 +50,9 @@ extension BuildPhaseTarget { let buildFilesProcessingContext = BuildFilesProcessingContext(scope) return buildFiles.compactMap { buildFile in guard let resolvedBuildFileInfo = try? context.resolveBuildFileReference(buildFile), - !buildFilesProcessingContext.isExcluded(resolvedBuildFileInfo.absolutePath, filters: buildFile.platformFilters), - resolvedBuildFileInfo.fileType.conformsTo(fileType) else { + !buildFilesProcessingContext.isExcluded(resolvedBuildFileInfo.absolutePath, filters: buildFile.platformFilters), + resolvedBuildFileInfo.fileType.conformsTo(fileType) + else { return nil } @@ -127,8 +128,9 @@ final class RealityAssetsTaskProducer: PhasedTaskProducer, TaskProducer { let regularConfiguredTargetSettings = context.globalProductPlan.getTargetSettings(regularConfiguredTarget) guard let project = regularConfiguredTargetSettings.project, - project.isPackage, - resourcePackageProject == project else { + project.isPackage, + resourcePackageProject == project + else { return false } @@ -185,7 +187,8 @@ final class RealityAssetsTaskProducer: PhasedTaskProducer, TaskProducer { // regularStandardTarget will be non-nil if this target is a resource build with .rkassets, // and we found the "regular" sources target with this configuredTarget as a dependency if let regularConfiguredTarget = findRegularPackageTarget(for: configuredTarget), - let regularStandardTarget = regularConfiguredTarget.target as? StandardTarget { + let regularStandardTarget = regularConfiguredTarget.target as? StandardTarget + { // regularStandardTarget has sources build phase with .swift files that need to be // preprocessed into a .usda schema that will be used to compile the .rkassets @@ -212,8 +215,7 @@ final class RealityAssetsTaskProducer: PhasedTaskProducer, TaskProducer { if !context.fs.exists(derivedDataPath) { do { try context.fs.createDirectory(derivedDataPath, recursive: true) - } - catch let error as NSError { + } catch let error as NSError { context.error("Could not create directory for '\(moduleWithDependenciesFile)', \(error.localizedDescription)") return [] } @@ -222,8 +224,7 @@ final class RealityAssetsTaskProducer: PhasedTaskProducer, TaskProducer { do { try moduleWithDependencies.write(moduleWithDependenciesPath, fsProxy: context.fs) - } - catch let error as NSError { + } catch let error as NSError { if error.domain == "org.swift.swift-build" { context.error(error.localizedDescription) } else { diff --git a/Sources/SWBApplePlatform/ReferenceObjectCompiler.swift b/Sources/SWBApplePlatform/ReferenceObjectCompiler.swift index 164d9ba7..4a1f3eae 100644 --- a/Sources/SWBApplePlatform/ReferenceObjectCompiler.swift +++ b/Sources/SWBApplePlatform/ReferenceObjectCompiler.swift @@ -19,7 +19,7 @@ public struct DiscoveredReferenceObjectToolSpecInfo: DiscoveredCommandLineToolSp public var toolVersion: Version? } -public final class ReferenceObjectCompilerSpec : GenericCompilerSpec, SpecIdentifierType, @unchecked Sendable { +public final class ReferenceObjectCompilerSpec: GenericCompilerSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier = "com.apple.compilers.referenceobject" public override var enableSandboxing: Bool { diff --git a/Sources/SWBApplePlatform/ResMergerLinkerSpec.swift b/Sources/SWBApplePlatform/ResMergerLinkerSpec.swift index 931bc66c..3dbc7762 100644 --- a/Sources/SWBApplePlatform/ResMergerLinkerSpec.swift +++ b/Sources/SWBApplePlatform/ResMergerLinkerSpec.swift @@ -14,7 +14,7 @@ import SWBUtil public import SWBCore import SWBMacro -public final class ResMergerLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchecked Sendable { +public final class ResMergerLinkerSpec: GenericLinkerSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier = "com.apple.pbx.linkers.resmerger" public override func constructTasks(_ cbc: CommandBuildContext, _ delegate: any TaskGenerationDelegate) async { @@ -35,8 +35,7 @@ public final class ResMergerLinkerSpec : GenericLinkerSpec, SpecIdentifierType, let resmergerSourcesFork = cbc.scope.evaluate(BuiltinMacros.RESMERGER_SOURCES_FORK) if resmergerSourcesFork == "data" { commandLine += ["-srcIs", "DF"] - } - else if resmergerSourcesFork == "resource" { + } else if resmergerSourcesFork == "resource" { commandLine += ["-srcIs", "RSRC"] } @@ -51,7 +50,7 @@ public final class ResMergerLinkerSpec : GenericLinkerSpec, SpecIdentifierType, default: return nil } } - delegate.createTask(type: self, ruleInfo: ["ResMergerCollector", tmpOutputPath.str], commandLine: commandLine, environment: environment, workingDirectory: cbc.producer.defaultWorkingDirectory, inputs: cbc.inputs.map({ $0.absolutePath }), outputs: [tmpOutputPath], action: nil, execDescription: execDescription, enableSandboxing: enableSandboxing) + delegate.createTask(type: self, ruleInfo: ["ResMergerCollector", tmpOutputPath.str], commandLine: commandLine, environment: environment, workingDirectory: cbc.producer.defaultWorkingDirectory, inputs: cbc.inputs.map({ $0.absolutePath }), outputs: [tmpOutputPath], action: nil, execDescription: execDescription, enableSandboxing: enableSandboxing) } // Create the task for the product merge. @@ -76,8 +75,7 @@ public final class ResMergerLinkerSpec : GenericLinkerSpec, SpecIdentifierType, let resmergerSourcesFork = cbc.scope.evaluate(BuiltinMacros.RESMERGER_SOURCES_FORK) if resmergerSourcesFork == "data" { commandLine += ["-srcIs", "DF"] - } - else if resmergerSourcesFork == "resource" { + } else if resmergerSourcesFork == "resource" { commandLine += ["-srcIs", "RSRC"] } diff --git a/Sources/SWBApplePlatform/SceneKitToolSpec.swift b/Sources/SWBApplePlatform/SceneKitToolSpec.swift index d80561be..879c321b 100644 --- a/Sources/SWBApplePlatform/SceneKitToolSpec.swift +++ b/Sources/SWBApplePlatform/SceneKitToolSpec.swift @@ -13,7 +13,7 @@ public import SWBCore import SWBMacro -public final class SceneKitToolSpec : GenericCompilerSpec, SpecIdentifierType, @unchecked Sendable { +public final class SceneKitToolSpec: GenericCompilerSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier = "com.apple.compilers.scntool" /// Override construction to handle the custom RESOURCE_FLAG value. diff --git a/Sources/SWBApplePlatform/StoryboardLinker.swift b/Sources/SWBApplePlatform/StoryboardLinker.swift index d8ec7a84..5de4433d 100644 --- a/Sources/SWBApplePlatform/StoryboardLinker.swift +++ b/Sources/SWBApplePlatform/StoryboardLinker.swift @@ -14,7 +14,7 @@ public import SWBUtil public import SWBCore public import SWBMacro -public final class IBStoryboardLinkerCompilerSpec : GenericCompilerSpec, SpecIdentifierType, IbtoolCompilerSupport, @unchecked Sendable { +public final class IBStoryboardLinkerCompilerSpec: GenericCompilerSpec, SpecIdentifierType, IbtoolCompilerSupport, @unchecked Sendable { public static let identifier = "com.apple.xcode.tools.ibtool.storyboard.linker" /// Override to compute the special arguments. @@ -41,10 +41,9 @@ public final class IBStoryboardLinkerCompilerSpec : GenericCompilerSpec, SpecIde outputs = [ "Interface.plist", "Interface-glance.plist", - "Interface-notification.plist" + "Interface-notification.plist", ].map { (resourcesDir.join($0), false) } - } - else { + } else { // Otherwise compute default values based on the inputs. for input in cbc.inputs.map({ $0.absolutePath }) { let subpath = input.relativeSubpath(from: tmpResourcesDir) ?? input.basename diff --git a/Sources/SWBApplePlatform/StringCatalogCompilerOutputParser.swift b/Sources/SWBApplePlatform/StringCatalogCompilerOutputParser.swift index 74605ed4..c7b599a4 100644 --- a/Sources/SWBApplePlatform/StringCatalogCompilerOutputParser.swift +++ b/Sources/SWBApplePlatform/StringCatalogCompilerOutputParser.swift @@ -13,7 +13,7 @@ public import SWBUtil public import SWBCore -public final class StringCatalogCompilerOutputParser : GenericOutputParser { +public final class StringCatalogCompilerOutputParser: GenericOutputParser { // In String Catalogs, the only unique identifier we really have for an "object" is the string key itself, and that could be a really long piece of text. // Thus, xcstringstool outputs its diagnostics like this: // /Users/mattseaman/Developer/RocketShip/RocketShip/Localizable.xcstrings: error: Referencing undefined substitution 'arg3' (en: Next meeting at %lld %lld) diff --git a/Sources/SWBApplePlatform/StubBinaryTaskProducer.swift b/Sources/SWBApplePlatform/StubBinaryTaskProducer.swift index c20a6644..c41b3044 100644 --- a/Sources/SWBApplePlatform/StubBinaryTaskProducer.swift +++ b/Sources/SWBApplePlatform/StubBinaryTaskProducer.swift @@ -81,10 +81,10 @@ extension TaskProducerContext { let stubBinaries = [ StubBinaryTaskProducer.watchKitStubBinary(scope, productType: productType), StubBinaryTaskProducer.messagesAppStubBinary(scope), - StubBinaryTaskProducer.messagesAppExtensionStubBinary(scope) + StubBinaryTaskProducer.messagesAppExtensionStubBinary(scope), ] - return stubBinaries.filter{ !$0.sourcePath.isEmpty }.only + return stubBinaries.filter { !$0.sourcePath.isEmpty }.only } } @@ -94,8 +94,10 @@ final class StubBinaryTaskProducer: PhasedTaskProducer, TaskProducer { let (bundleSidecarPath, archiveSidecarPath): (Path?, Path?) = { if productType?.identifier == "com.apple.product-type.application.watchapp2" { - return (scope.evaluate(BuiltinMacros.TARGET_BUILD_DIR).join(scope.evaluate(BuiltinMacros.FULL_PRODUCT_NAME)).join("_WatchKitStub/WK"), - Path(scope.evaluate(BuiltinMacros.WATCHKIT_2_SUPPORT_FOLDER_PATH))) + return ( + scope.evaluate(BuiltinMacros.TARGET_BUILD_DIR).join(scope.evaluate(BuiltinMacros.FULL_PRODUCT_NAME)).join("_WatchKitStub/WK"), + Path(scope.evaluate(BuiltinMacros.WATCHKIT_2_SUPPORT_FOLDER_PATH)) + ) } return (nil, nil) }() @@ -154,16 +156,18 @@ final class GlobalStubBinaryTaskProducer: StandardTaskProducer, TaskProducer { let thinArchs: Bool } - let stubBinaries: Set = Set(targetContexts.compactMap { targetContext in - let scope = targetContext.settings.globalScope + let stubBinaries: Set = Set( + targetContexts.compactMap { targetContext in + let scope = targetContext.settings.globalScope - // The stub is copied only when the "build" component is present. - guard scope.evaluate(BuiltinMacros.BUILD_COMPONENTS).contains("build") else { return nil } + // The stub is copied only when the "build" component is present. + guard scope.evaluate(BuiltinMacros.BUILD_COMPONENTS).contains("build") else { return nil } - guard var tmp = targetContext.stubBinary else { return nil } - tmp.bundleSidecarPath = nil - return StubBinaryWithArchs(stubBinary: tmp, archs: Set(scope.evaluate(BuiltinMacros.ARCHS)), thinArchs: scope.evaluate(BuiltinMacros.THIN_PRODUCT_STUB_BINARY)) - }) + guard var tmp = targetContext.stubBinary else { return nil } + tmp.bundleSidecarPath = nil + return StubBinaryWithArchs(stubBinary: tmp, archs: Set(scope.evaluate(BuiltinMacros.ARCHS)), thinArchs: scope.evaluate(BuiltinMacros.THIN_PRODUCT_STUB_BINARY)) + } + ) var tasks = [any PlannedTask]() await appendGeneratedTasks(&tasks) { delegate in diff --git a/Sources/SWBApplePlatform/XCStringsCompiler.swift b/Sources/SWBApplePlatform/XCStringsCompiler.swift index fd7b0d3d..25e47cca 100644 --- a/Sources/SWBApplePlatform/XCStringsCompiler.swift +++ b/Sources/SWBApplePlatform/XCStringsCompiler.swift @@ -120,7 +120,7 @@ public final class XCStringsCompilerSpec: GenericCompilerSpec, SpecIdentifierTyp self.effectivePlatformName = effectivePlatformName } - func serialize(to serializer: T) where T : SWBUtil.Serializer { + func serialize(to serializer: T) where T: SWBUtil.Serializer { serializer.serializeAggregate(1) { serializer.serialize(effectivePlatformName) } @@ -296,7 +296,8 @@ public final class XCStringsCompilerSpec: GenericCompilerSpec, SpecIdentifierTyp var infos = [TaskGenerateLocalizationInfoOutput(compilableXCStringsPaths: task.inputPaths)] if let payload = task.payload as? SymbolGenPayload, - let xcstringsPath = task.inputPaths.only { + let xcstringsPath = task.inputPaths.only + { let generatedSourceFiles = task.outputPaths.filter { $0.fileExtension == "swift" } var info = TaskGenerateLocalizationInfoOutput() info.effectivePlatformName = payload.effectivePlatformName diff --git a/Sources/SWBApplePlatform/XCStringsInputFileGroupingStrategy.swift b/Sources/SWBApplePlatform/XCStringsInputFileGroupingStrategy.swift index 8532692d..b37f2706 100644 --- a/Sources/SWBApplePlatform/XCStringsInputFileGroupingStrategy.swift +++ b/Sources/SWBApplePlatform/XCStringsInputFileGroupingStrategy.swift @@ -30,7 +30,7 @@ import SWBUtil return "tool:\(toolName) name:\(groupable.absolutePath.basenameWithoutSuffix)" } - public func groupAdditionalFiles(to target: FileToBuildGroup, from source: S, context: any InputFileGroupingStrategyContext) -> [FileToBuildGroup] where S : Sequence, S.Element == FileToBuildGroup { + public func groupAdditionalFiles(to target: FileToBuildGroup, from source: S, context: any InputFileGroupingStrategyContext) -> [FileToBuildGroup] where S: Sequence, S.Element == FileToBuildGroup { // Additionally include .strings and .stringsdict files with the same basename. guard let xcstringsBasenameWithoutSuffix = Set(target.files.map({ $0.absolutePath.basenameWithoutSuffix })).only else { diff --git a/Sources/SWBBuildService/BuildDependencyInfo.swift b/Sources/SWBBuildService/BuildDependencyInfo.swift index 07d3509e..9091adba 100644 --- a/Sources/SWBBuildService/BuildDependencyInfo.swift +++ b/Sources/SWBBuildService/BuildDependencyInfo.swift @@ -19,7 +19,6 @@ import SWBMacro // MARK: Creating a BuildDependencyInfo from a BuildRequest - extension BuildDependencyInfo { package init(workspaceContext: WorkspaceContext, buildRequest: BuildRequest, buildRequestContext: BuildRequestContext, operation: BuildDependencyInfoOperation) async throws { @@ -59,8 +58,7 @@ extension BuildDependencyInfo { // I'm not sure how we'd actually encounter this, unless somehow target specialization went awry or we encounter some unforeseen scenario. if seenTargets.contains(target.target) { errors.append("Found multiple identical targets named '\(target.target.targetName)' in project '\(target.target.projectName ?? "nil")' for platform '\(target.target.platformName ?? "nil")") - } - else { + } else { seenTargets.insert(target.target) } } @@ -113,8 +111,7 @@ extension BuildDependencyInfo { let resolvedBuildFile: (reference: Reference, absolutePath: Path, fileType: FileTypeSpec) do { resolvedBuildFile = try buildFileResolver.resolveBuildFileReference(buildFile) - } - catch { + } catch { // FIXME: Figure out how to report an issue in as an error in the data structures. continue } @@ -132,14 +129,11 @@ extension BuildDependencyInfo { if resolvedBuildFile.fileType.conformsTo(identifier: "wrapper.framework") { // TODO: static frameworks? await inputs.addInput(TargetDependencyInfo.Input(inputType: .framework, name: .name(filename), linkType: .searchPath, libraryType: .dynamic)) - } - else if resolvedBuildFile.fileType.conformsTo(identifier: "compiled.mach-o.dylib") { + } else if resolvedBuildFile.fileType.conformsTo(identifier: "compiled.mach-o.dylib") { await inputs.addInput(TargetDependencyInfo.Input(inputType: .library, name: .name(filename), linkType: .searchPath, libraryType: .dynamic)) - } - else if resolvedBuildFile.fileType.conformsTo(identifier: "sourcecode.text-based-dylib-definition") { + } else if resolvedBuildFile.fileType.conformsTo(identifier: "sourcecode.text-based-dylib-definition") { await inputs.addInput(TargetDependencyInfo.Input(inputType: .library, name: .name(filename), linkType: .searchPath, libraryType: .dynamic)) - } - else if resolvedBuildFile.fileType.conformsTo(identifier: "archive.ar") { + } else if resolvedBuildFile.fileType.conformsTo(identifier: "archive.ar") { await inputs.addInput(TargetDependencyInfo.Input(inputType: .library, name: .name(filename), linkType: .searchPath, libraryType: .static)) } // FIXME: Handle wrapper.xcframework @@ -192,7 +186,7 @@ extension BuildDependencyInfo { /// Special `CoreClientDelegate`-conforming struct because our use of `GlobalProductPlan` here should never be running external tools. fileprivate struct UnsupportedCoreClientDelegate: CoreClientDelegate { - func executeExternalTool(commandLine: [String], workingDirectory: Path?, environment: [String : String]) async throws -> ExternalToolResult { + func executeExternalTool(commandLine: [String], workingDirectory: Path?, environment: [String: String]) async throws -> ExternalToolResult { throw StubError.error("Running external tools is not supported when computing build dependency target info.") } } @@ -215,7 +209,7 @@ fileprivate final class BuildDependencyInfoGlobalProductPlanDelegate: GlobalProd let cancelled = false - func updateProgress(statusMessage: String, showInLog: Bool) { } + func updateProgress(statusMessage: String, showInLog: Bool) {} // CoreClientTargetDiagnosticProducingDelegate conformance @@ -237,11 +231,11 @@ fileprivate final class BuildDependencyInfoGlobalProductPlanDelegate: GlobalProd .init(rawValue: -1) } - func endActivity(id: ActivityID, signature: ByteString, status: BuildOperationTaskEnded.Status) { } + func endActivity(id: ActivityID, signature: ByteString, status: BuildOperationTaskEnded.Status) {} - func emit(data: [UInt8], for activity: SWBCore.ActivityID, signature: SWBUtil.ByteString) { } + func emit(data: [UInt8], for activity: SWBCore.ActivityID, signature: SWBUtil.ByteString) {} - func emit(diagnostic: SWBUtil.Diagnostic, for activity: SWBCore.ActivityID, signature: SWBUtil.ByteString) { } + func emit(diagnostic: SWBUtil.Diagnostic, for activity: SWBCore.ActivityID, signature: SWBUtil.ByteString) {} let hadErrors = false diff --git a/Sources/SWBBuildService/BuildDescriptionMessages.swift b/Sources/SWBBuildService/BuildDescriptionMessages.swift index 6f89b553..174b4f03 100644 --- a/Sources/SWBBuildService/BuildDescriptionMessages.swift +++ b/Sources/SWBBuildService/BuildDescriptionMessages.swift @@ -57,7 +57,9 @@ fileprivate extension Request { buildRequestContext: buildRequestContext, workspaceContext: workspaceContext, retain: false - ), clientDelegate: clientDelegate, constructionDelegate: operation + ), + clientDelegate: clientDelegate, + constructionDelegate: operation )?.buildDescription guard let buildDescription else { @@ -167,11 +169,14 @@ struct BuildDescriptionSelectConfiguredTargetsForIndexMsg: MessageHandler { let uniqueTargets = OrderedSet(message.targets.map(\.rawValue)) - let targets: Dictionary = Dictionary(buildDescription.allConfiguredTargets.lazy.filter { uniqueTargets.contains($0.target.guid) }.map { - ($0.target.guid, $0) - }, uniquingKeysWith: { - buildRequestContext.selectConfiguredTargetForIndex($0, $1, hasEnabledIndexBuildArena: buildRequest.enableIndexBuildArena, runDestination: message.request.parameters.activeRunDestination) - }) + let targets: Dictionary = Dictionary( + buildDescription.allConfiguredTargets.lazy.filter { uniqueTargets.contains($0.target.guid) }.map { + ($0.target.guid, $0) + }, + uniquingKeysWith: { + buildRequestContext.selectConfiguredTargetForIndex($0, $1, hasEnabledIndexBuildArena: buildRequest.enableIndexBuildArena, runDestination: message.request.parameters.activeRunDestination) + } + ) let configuredTargets = try uniqueTargets.map { target in guard let configuredTarget = targets[target] else { diff --git a/Sources/SWBBuildService/BuildOperationMessages.swift b/Sources/SWBBuildService/BuildOperationMessages.swift index acc6c5da..dd0908be 100644 --- a/Sources/SWBBuildService/BuildOperationMessages.swift +++ b/Sources/SWBBuildService/BuildOperationMessages.swift @@ -87,7 +87,7 @@ final class ActiveBuild: ActiveBuildOperation { func beginActivity(ruleInfo: String, executionDescription: String, signature: ByteString, target: ConfiguredTarget?, parentActivity: ActivityID?) -> ActivityID { let activity = ActivityID(rawValue: activeBuild.activeTasks.takeID()) - assert(target == nil) // not supported yet + assert(target == nil) // not supported yet activeBuild.request.send(BuildOperationTaskStarted(id: activity.rawValue, targetID: nil, parentID: parentActivity?.rawValue, info: .init(taskName: executionDescription, signature: .activitySignature(signature), ruleInfo: ruleInfo, executionDescription: executionDescription, commandLineDisplayString: nil, interestingPath: nil, serializedDiagnosticsPaths: []))) return activity } @@ -484,7 +484,8 @@ final class ActiveBuild: ActiveBuildOperation { assert(state == .created) state = .started - if case .cleanBuildFolder(_) = buildRequest.buildCommand {} else { + if case .cleanBuildFolder(_) = buildRequest.buildCommand { + } else { // Once we have reached this point, we are done reporting preparation progress. let statusMessage = workspaceContext.userPreferences.activityTextShorteningLevel == .full ? "Starting" : "Starting build" preparationProgressDelegate!.updateProgress(statusMessage: statusMessage, showInLog: false) @@ -540,7 +541,6 @@ final class ActiveBuild: ActiveBuildOperation { } } - // FIXME: This needs to be factored out as a reusable ObjectIDMapping soon (it's not actually set since it doesn't implement the actual Set protocol). private final class ObjectIDMapping { private struct State { @@ -549,7 +549,7 @@ private final class ObjectIDMapping { var objsToIDs: [Ref: Int] = [:] mutating func takeID() -> Int { - defer{ nextID += 1 } + defer { nextID += 1 } return nextID } } @@ -600,16 +600,21 @@ final class ActiveBuildDiagnosticsHandler: TargetDiagnosticProducingDelegate { func diagnosticsEngine(for target: ConfiguredTarget?) -> DiagnosticProducingDelegateProtocolPrivate { return targetDiagnosticsEngines.withLock { targetDiagnosticsEngines in - .init(targetDiagnosticsEngines.getOrInsert(target, { - let engine = DiagnosticsEngine() - if target == nil { - let request = self.request - engine.addHandler { diag in - sendDiagnosticMessage(request, diag, .global) + .init( + targetDiagnosticsEngines.getOrInsert( + target, + { + let engine = DiagnosticsEngine() + if target == nil { + let request = self.request + engine.addHandler { diag in + sendDiagnosticMessage(request, diag, .global) + } + } + return engine } - } - return engine - })) + ) + ) } } @@ -918,8 +923,8 @@ private final class TaskOutputParserHandler: TaskOutputParserDelegate { /// A task output collector which simply discards any data it receives. private final class DiscardingTaskOutputHandler: TaskOutputDelegate { - var counters: [BuildOperationMetrics.Counter : Int] = [:] - var taskCounters: [BuildOperationMetrics.TaskCounter : Int] = [:] + var counters: [BuildOperationMetrics.Counter: Int] = [:] + var taskCounters: [BuildOperationMetrics.TaskCounter: Int] = [:] private let _diagnosticsEngine = DiagnosticsEngine() var result: TaskResult? { nil } @@ -1040,7 +1045,7 @@ final class OperationDelegate: BuildOperationDelegate { return outputCollector } - func reportPathMap(_ operation: BuildOperation, copiedPathMap: [String : String], generatedFilesPathMap: [String : String]) { + func reportPathMap(_ operation: BuildOperation, copiedPathMap: [String: String], generatedFilesPathMap: [String: String]) { request.send(BuildOperationReportPathMap(copiedPathMap: copiedPathMap, generatedFilesPathMap: generatedFilesPathMap)) } @@ -1163,11 +1168,12 @@ final class OperationDelegate: BuildOperationDelegate { // If we haven't started, show a custom message (to prevent a "Building 0" message). if stats.numCommandsStarted == 0 { - if messageShortening != .full || workspaceContext.userPreferences.enableDebugActivityLogs { + if messageShortening != .full || workspaceContext.userPreferences.enableDebugActivityLogs { request.send(BuildOperationProgressUpdated(targetName: targetName, statusMessage: "Scanning build tasks", percentComplete: percentComplete, showInLog: false)) } } else { - let statusMessage = messageShortening > .legacy + let statusMessage = + messageShortening > .legacy ? activityMessageFractionString(stats.numCommandsStarted, over: stats.numPossibleMaxExecutedCommands) : "Building \(stats.numCommandsStarted) of \(stats.numPossibleMaxExecutedCommands) tasks" request.send(BuildOperationProgressUpdated(targetName: targetName, statusMessage: statusMessage, percentComplete: percentComplete, showInLog: false)) @@ -1331,9 +1337,9 @@ final class OperationDelegate: BuildOperationDelegate { ) }) - self.aggregatedCounters.merge(delegate.counters) { (a, b) in a+b } + self.aggregatedCounters.merge(delegate.counters) { (a, b) in a + b } if !delegate.taskCounters.isEmpty { - self.aggregatedTaskCounters[task.ruleInfo[0], default: [:]].merge(delegate.taskCounters) { (a, b) in a+b } + self.aggregatedTaskCounters[task.ruleInfo[0], default: [:]].merge(delegate.taskCounters) { (a, b) in a + b } } request.send(BuildOperationTaskEnded(id: taskID, signature: .taskIdentifier(ByteString(encodingAsUTF8: taskIdentifier.rawValue)), status: status, signalled: status == .cancelled, metrics: metrics)) diff --git a/Sources/SWBBuildService/BuildService.swift b/Sources/SWBBuildService/BuildService.swift index e1219955..f5d06178 100644 --- a/Sources/SWBBuildService/BuildService.swift +++ b/Sources/SWBBuildService/BuildService.swift @@ -58,11 +58,11 @@ open class BuildService: Service, @unchecked Sendable { /// The cache of core objects. /// /// We make this a heavy cache in debug mode, so that it can be explicitly cleared (via `clearAllCaches`), which helps considerably with memory leak debugging. -#if DEBUG - private let sharedCoreCache = HeavyCache() -#else - private let sharedCoreCache = Cache() -#endif + #if DEBUG + private let sharedCoreCache = HeavyCache() + #else + private let sharedCoreCache = Cache() + #endif /// Async lock to guard access to `sharedCoreCache`, since its `getOrInsert` method can't be given an async closure. private var sharedCoreCacheLock = ActorLock() @@ -111,8 +111,7 @@ open class BuildService: Service, @unchecked Sendable { } } } - } - catch { + } catch { // Couldn't get the contents of the frameworks directory - not sure whether this should be an error or if there are development workflows we want to support where it might be missing. } } @@ -129,11 +128,9 @@ open class BuildService: Service, @unchecked Sendable { if latestModDate.map({ latestModDate in modDate > latestModDate }) ?? true { latestModDate = modDate } - } - catch let error as NSError { + } catch let error as NSError { throw StubError.error("Couldn't get timestamp of SWBBuildService.bundle contents: \(error.localizedDescription)") - } - catch { + } catch { throw StubError.error("Couldn't get timestamp of SWBBuildService.bundle contents: \(error)") } } diff --git a/Sources/SWBBuildService/BuildServiceEntryPoint.swift b/Sources/SWBBuildService/BuildServiceEntryPoint.swift index 6c348dfa..9b38c4dc 100644 --- a/Sources/SWBBuildService/BuildServiceEntryPoint.swift +++ b/Sources/SWBBuildService/BuildServiceEntryPoint.swift @@ -22,19 +22,19 @@ import SWBTaskConstruction import SWBTaskExecution #if canImport(System) -import System + import System #else -import SystemPackage + import SystemPackage #endif #if USE_STATIC_PLUGIN_INITIALIZATION -private import SWBAndroidPlatform -private import SWBApplePlatform -private import SWBGenericUnixPlatform -private import SWBQNXPlatform -private import SWBUniversalPlatform -private import SWBWebAssemblyPlatform -private import SWBWindowsPlatform + private import SWBAndroidPlatform + private import SWBApplePlatform + private import SWBGenericUnixPlatform + private import SWBQNXPlatform + private import SWBUniversalPlatform + private import SWBWebAssemblyPlatform + private import SWBWindowsPlatform #endif private struct Options { @@ -56,10 +56,12 @@ private struct Options { while let arg = generator.next() { switch arg { case "--help": - print((OutputByteStream() + print( + (OutputByteStream() <<< "Swift Build Build Service\n" <<< "\n" - <<< " Read the source for help.").bytes.asString) + <<< " Read the source for help.").bytes.asString + ) exit = true default: @@ -76,13 +78,20 @@ extension BuildService { do { try await Service.main { inputFD, outputFD in // Launch the Swift Build service. - try await BuildService.run(inputFD: inputFD, outputFD: outputFD, connectionMode: .outOfProcess, pluginsDirectory: Bundle.main.builtInPlugInsURL, arguments: arguments, pluginLoadingFinished: { - // Already using DYLD_IMAGE_SUFFIX, clear it to avoid propagating ASan to children. - // This must happen after plugin loading. - if let suffix = getEnvironmentVariable("DYLD_IMAGE_SUFFIX"), suffix == "_asan" { - try POSIX.unsetenv("DYLD_IMAGE_SUFFIX") + try await BuildService.run( + inputFD: inputFD, + outputFD: outputFD, + connectionMode: .outOfProcess, + pluginsDirectory: Bundle.main.builtInPlugInsURL, + arguments: arguments, + pluginLoadingFinished: { + // Already using DYLD_IMAGE_SUFFIX, clear it to avoid propagating ASan to children. + // This must happen after plugin loading. + if let suffix = getEnvironmentVariable("DYLD_IMAGE_SUFFIX"), suffix == "_asan" { + try POSIX.unsetenv("DYLD_IMAGE_SUFFIX") + } } - }) + ) } exit(EXIT_SUCCESS) } catch { @@ -132,17 +141,17 @@ extension BuildService { // This MUST be a compile-time check because the module dependencies on the plugins are conditional. // Minimize the amount of code that is conditionally compiled to avoid breaking the build during refactoring. #if USE_STATIC_PLUGIN_INITIALIZATION - staticPluginInitializers = [ - SWBAndroidPlatform.initializePlugin, - SWBApplePlatform.initializePlugin, - SWBGenericUnixPlatform.initializePlugin, - SWBQNXPlatform.initializePlugin, - SWBUniversalPlatform.initializePlugin, - SWBWebAssemblyPlatform.initializePlugin, - SWBWindowsPlatform.initializePlugin, - ] + staticPluginInitializers = [ + SWBAndroidPlatform.initializePlugin, + SWBApplePlatform.initializePlugin, + SWBGenericUnixPlatform.initializePlugin, + SWBQNXPlatform.initializePlugin, + SWBUniversalPlatform.initializePlugin, + SWBWebAssemblyPlatform.initializePlugin, + SWBWindowsPlatform.initializePlugin, + ] #else - staticPluginInitializers = [] + staticPluginInitializers = [] #endif if useStaticPluginInitialization { diff --git a/Sources/SWBBuildService/ClientExchangeDelegate.swift b/Sources/SWBBuildService/ClientExchangeDelegate.swift index b391870e..82d231f4 100644 --- a/Sources/SWBBuildService/ClientExchangeDelegate.swift +++ b/Sources/SWBBuildService/ClientExchangeDelegate.swift @@ -33,7 +33,7 @@ final class ClientExchangeDelegate: ClientDelegate { self.session = session } - func executeExternalTool(commandLine: [String], workingDirectory: Path?, environment: [String : String]) async throws -> ExternalToolResult { + func executeExternalTool(commandLine: [String], workingDirectory: Path?, environment: [String: String]) async throws -> ExternalToolResult { // Create a synchronous client exchange which the session uses to handle the response from the client, to make the communication synchronous from the point of view of our caller. let exchange = SynchronousClientExchange(session) diff --git a/Sources/SWBBuildService/DependencyGraphMessages.swift b/Sources/SWBBuildService/DependencyGraphMessages.swift index 5973b33d..af67b24a 100644 --- a/Sources/SWBBuildService/DependencyGraphMessages.swift +++ b/Sources/SWBBuildService/DependencyGraphMessages.swift @@ -58,17 +58,22 @@ private func constructTargetBuildGraph(for targetGUIDs: [TargetGUID], in workspa case .buildRequest: scope = .buildRequest } - let buildGraph = await TargetBuildGraph(workspaceContext: workspaceContext, - buildRequest: BuildRequest(parameters: parameters, - buildTargets: targets.map { BuildRequest.BuildTargetInfo(parameters: parameters, target: $0) }, dependencyScope: scope, - continueBuildingAfterErrors: false, - hideShellScriptEnvironment: true, - useParallelTargets: false, - useImplicitDependencies: includeImplicitDependencies, - useDryRun: false), - buildRequestContext: buildRequestContext, - delegate: delegate, - purpose: .dependencyGraph) + let buildGraph = await TargetBuildGraph( + workspaceContext: workspaceContext, + buildRequest: BuildRequest( + parameters: parameters, + buildTargets: targets.map { BuildRequest.BuildTargetInfo(parameters: parameters, target: $0) }, + dependencyScope: scope, + continueBuildingAfterErrors: false, + hideShellScriptEnvironment: true, + useParallelTargets: false, + useImplicitDependencies: includeImplicitDependencies, + useDryRun: false + ), + buildRequestContext: buildRequestContext, + delegate: delegate, + purpose: .dependencyGraph + ) if delegate.hasErrors { throw StubError.error("unable to get target build graph:\n" + delegate.diagnostics.map { $0.formatLocalizedDescription(.debug) }.joined(separator: "\n")) } diff --git a/Sources/SWBBuildService/DocumentationInfo.swift b/Sources/SWBBuildService/DocumentationInfo.swift index 7f5f2ad6..d318e3eb 100644 --- a/Sources/SWBBuildService/DocumentationInfo.swift +++ b/Sources/SWBBuildService/DocumentationInfo.swift @@ -74,9 +74,11 @@ extension BuildDescription { func generateDocumentationInfo(workspaceContext: WorkspaceContext, buildRequestContext: BuildRequestContext, input: TaskGenerateDocumentationInfoInput) -> [DocumentationInfoOutput] { var output: [DocumentationInfoOutput] = [] taskStore.forEachTask { task in - output.append(contentsOf: task.generateDocumentationInfo(input: input).map { info in - DocumentationInfoOutput(outputPath: info.outputPath, targetIdentifier: info.targetIdentifier) - }) + output.append( + contentsOf: task.generateDocumentationInfo(input: input).map { info in + DocumentationInfoOutput(outputPath: info.outputPath, targetIdentifier: info.targetIdentifier) + } + ) } return output } diff --git a/Sources/SWBBuildService/LocalizationInfo.swift b/Sources/SWBBuildService/LocalizationInfo.swift index 1d63532c..6a595cec 100644 --- a/Sources/SWBBuildService/LocalizationInfo.swift +++ b/Sources/SWBBuildService/LocalizationInfo.swift @@ -18,7 +18,7 @@ import SWBBuildSystem /// Errors that might be thrown while generating localization info from a build description. enum LocalizationInfoErrors: Error { - case noBuildDescriptionID // The client didn't set buildDescriptionID + case noBuildDescriptionID // The client didn't set buildDescriptionID case noBuildDescription } @@ -92,17 +92,18 @@ extension BuildDescription { guard let targetGUID = task.forTarget?.target.guid else { // This task is not associated with a target at all. // Ignore for now. - return // equivalent to `continue` since we're in a closure-based loop. + return // equivalent to `continue` since we're in a closure-based loop. } let taskLocalizationOutputs = task.generateLocalizationInfo(input: input) guard !taskLocalizationOutputs.isEmpty else { - return // continue + return // continue } let taskXCStringsPaths = Set(taskLocalizationOutputs.flatMap(\.compilableXCStringsPaths)) - let taskStringsdataPaths: [LocalizationBuildPortion: Set] = taskLocalizationOutputs + let taskStringsdataPaths: [LocalizationBuildPortion: Set] = + taskLocalizationOutputs .map(\.producedStringsdataPaths) .reduce([:], { aggregate, partial in aggregate.merging(partial, uniquingKeysWith: +) }) .mapValues { Set($0) } @@ -119,7 +120,8 @@ extension BuildDescription { outputsByTarget[targetGUID]?.effectivePlatformName = effectivePlatformName } - let taskGeneratedSymbolFiles = taskLocalizationOutputs + let taskGeneratedSymbolFiles = + taskLocalizationOutputs .map(\.generatedSymbolFilesByXCStringsPath) .reduce([:], { aggregate, partial in aggregate.merging(partial, uniquingKeysWith: +) }) .mapValues { Set($0) } diff --git a/Sources/SWBBuildService/Messages.swift b/Sources/SWBBuildService/Messages.swift index ea3baaeb..54650c4d 100644 --- a/Sources/SWBBuildService/Messages.swift +++ b/Sources/SWBBuildService/Messages.swift @@ -45,7 +45,7 @@ private struct GetSpecsDumpMsg: MessageHandler { let conformingTo: String? if let idx = message.commandLine.firstIndex(of: "--conforms-to"), idx + 1 < message.commandLine.count { conformingTo = message.commandLine[idx + 1] - } else { + } else { conformingTo = nil } return StringResponse(try request.session(for: message).core.getSpecsDump(conformingTo: conformingTo)) @@ -289,13 +289,15 @@ private struct SetSessionUserPreferencesMsg: MessageHandler { throw MsgParserError.missingWorkspaceContext } - workspaceContext.updateUserPreferences(UserPreferences( - enableDebugActivityLogs: message.enableDebugActivityLogs, - enableBuildDebugging: message.enableBuildDebugging, - enableBuildSystemCaching: message.enableBuildSystemCaching, - activityTextShorteningLevel: message.activityTextShorteningLevel, - usePerConfigurationBuildLocations: message.usePerConfigurationBuildLocations, - allowsExternalToolExecution: message.allowsExternalToolExecution ?? UserPreferences.allowsExternalToolExecutionDefaultValue) + workspaceContext.updateUserPreferences( + UserPreferences( + enableDebugActivityLogs: message.enableDebugActivityLogs, + enableBuildDebugging: message.enableBuildDebugging, + enableBuildSystemCaching: message.enableBuildSystemCaching, + activityTextShorteningLevel: message.activityTextShorteningLevel, + usePerConfigurationBuildLocations: message.usePerConfigurationBuildLocations, + allowsExternalToolExecution: message.allowsExternalToolExecution ?? UserPreferences.allowsExternalToolExecutionDefaultValue + ) ) return VoidResponse() @@ -328,7 +330,7 @@ private struct TransferSessionPIFMsg: MessageHandler { throw StubError.error("incremental PIF transfer did not produce the right contents. Diff: \(diff)") case .incomplete(let missingObjects): - return TransferSessionPIFResponse(missingObjects: missingObjects.map{ TransferSessionPIFResponse.MissingObject(type: $0.type, signature: $0.signature) }) + return TransferSessionPIFResponse(missingObjects: missingObjects.map { TransferSessionPIFResponse.MissingObject(type: $0.type, signature: $0.signature) }) } } @@ -463,11 +465,16 @@ private struct WorkspaceInfoMsg: MessageHandler { throw MsgParserError.missingWorkspaceContext } - return WorkspaceInfoResponse(sessionHandle: session.UID, workspaceInfo: .init(targetInfos: workspaceContext.workspace.projects.flatMap { project in - return project.targets.map { target in - return .init(guid: target.guid, targetName: target.name, projectName: project.name, dynamicTargetVariantGuid: target.dynamicTargetVariantGuid) - } - })) + return WorkspaceInfoResponse( + sessionHandle: session.UID, + workspaceInfo: .init( + targetInfos: workspaceContext.workspace.projects.flatMap { project in + return project.targets.map { target in + return .init(guid: target.guid, targetName: target.name, projectName: project.name, dynamicTargetVariantGuid: target.dynamicTargetVariantGuid) + } + } + ) + ) } } @@ -516,9 +523,9 @@ private struct BuildCancelRequestMsg: MessageHandler { package class InfoOperation { private var isCancelled: LockedValue = .init(false) - package var cancelled: Bool { return isCancelled.withLock{$0} } + package var cancelled: Bool { return isCancelled.withLock { $0 } } package func cancel() { - isCancelled.withLock{$0 = true} + isCancelled.withLock { $0 = true } tasks.withLock { for task in $0 { task.cancel() @@ -554,18 +561,18 @@ final class IndexingOperation: InfoOperation, BuildDescriptionConstructionDelega .init(_diagnosticsEngine) } - var diagnostics: [ConfiguredTarget? : [Diagnostic]] { + var diagnostics: [ConfiguredTarget?: [Diagnostic]] { [nil: _diagnosticsEngine.diagnostics] } - package func updateProgress(statusMessage: String, showInLog: Bool) { } + package func updateProgress(statusMessage: String, showInLog: Bool) {} package func beginActivity(ruleInfo: String, executionDescription: String, signature: ByteString, target: ConfiguredTarget?, parentActivity: ActivityID?) -> ActivityID { .init(rawValue: -1) } - package func endActivity(id: ActivityID, signature: ByteString, status: BuildOperationTaskEnded.Status) { } - package func emit(data: [UInt8], for activity: ActivityID, signature: ByteString) { } - package func emit(diagnostic: Diagnostic, for activity: ActivityID, signature: ByteString) { } + package func endActivity(id: ActivityID, signature: ByteString, status: BuildOperationTaskEnded.Status) {} + package func emit(data: [UInt8], for activity: ActivityID, signature: ByteString) {} + package func emit(diagnostic: Diagnostic, for activity: ActivityID, signature: ByteString) {} - package func buildDescriptionCreated(_ buildDescriptionID: BuildDescriptionID) { } + package func buildDescriptionCreated(_ buildDescriptionID: BuildDescriptionID) {} // We don't care about the individual diagnostics here, only if there was at least one error. package private(set) var hadErrors = false @@ -601,11 +608,11 @@ private struct GetIndexingFileSettingsMsg: MessageHandler { func handle(request: Request, message: IndexingFileSettingsRequest) async throws -> VoidResponse { try await handleIndexingInfoRequest(serializationQueue: Self.serializationQueue, request: request, message: message) { message, workspaceContext, buildRequest, buildRequestContext, buildDescription, target, elapsedTimer in -#if DEBUG - // We record the source files we see and report if a particular source file - // is encountered more than once. - var seenPaths = Set() -#endif + #if DEBUG + // We record the source files we see and report if a particular source file + // is encountered more than once. + var seenPaths = Set() + #endif // Collect and return the indexing info. The indexing info is sent as a property list defined by the SourceFileIndexingInfo object, so there is presently no need for client-side changes to handle this info (and thus little change of revlock issues between the client and the service). In the future we hope to provide strong typing for this data. @@ -616,19 +623,24 @@ private struct GetIndexingFileSettingsMsg: MessageHandler { let path = info.path let indexingInfo = info.indexingInfo -#if DEBUG - if !seenPaths.insert(path).inserted { - log("Duplicate source file for indexing \(path)") - } -#endif + #if DEBUG + if !seenPaths.insert(path).inserted { + log("Duplicate source file for indexing \(path)") + } + #endif guard case .plDict(let indexingInfoDict) = indexingInfo.propertyListItem else { throw StubError.error("Internal error: expected plDict from \(indexingInfo)") } - return try .plDict(indexingInfoDict.merging(["sourceFilePath": .plString(path.str)], uniquingKeysWith: { _, _ in - throw StubError.error("Internal error: unexpected sourceFilePath key in \(indexingInfo)") - })) + return try .plDict( + indexingInfoDict.merging( + ["sourceFilePath": .plString(path.str)], + uniquingKeysWith: { _, _ in + throw StubError.error("Internal error: unexpected sourceFilePath key in \(indexingInfo)") + } + ) + ) } } @@ -645,7 +657,7 @@ private struct GetIndexingFileSettingsMsg: MessageHandler { "seconds": .plDouble(duration.seconds), "nanoseconds": .plDouble(Double(duration.nanoseconds)), ]), - "data": .plArray(resultArray) + "data": .plArray(resultArray), ]) try workspaceContext.fs.write(payloadsDirectory.join("\(dateString).json"), contents: obj.asJSONFragment()) @@ -672,10 +684,12 @@ private struct GetIndexingHeaderInfoMsg: MessageHandler { } let productName = buildRequestContext.getCachedSettings(target.parameters, target: target.target).globalScope.evaluate(BuiltinMacros.PRODUCT_NAME) - let copiedHeaders = Dictionary(uniqueKeysWithValues: allTargetOutputPaths.intersection(buildDescription.copiedPathMap.keys).compactMap { path -> (String, String)? in - guard let copiedPath = buildDescription.copiedPathMap[path], ProjectHeaderInfo.headerFileExtensions.contains(Path(copiedPath).fileExtension) else { return nil } - return (path, copiedPath) - }) + let copiedHeaders = Dictionary( + uniqueKeysWithValues: allTargetOutputPaths.intersection(buildDescription.copiedPathMap.keys).compactMap { path -> (String, String)? in + guard let copiedPath = buildDescription.copiedPathMap[path], ProjectHeaderInfo.headerFileExtensions.contains(Path(copiedPath).fileExtension) else { return nil } + return (path, copiedPath) + } + ) return IndexingHeaderInfoResponse(targetID: message.targetID, productName: productName, copiedPathMap: copiedHeaders) } @@ -813,20 +827,20 @@ private final class DocumentationOperation: InfoOperation, DocumentationInfoDele .init(_diagnosticsEngine) } - var diagnostics: [ConfiguredTarget? : [Diagnostic]] { + var diagnostics: [ConfiguredTarget?: [Diagnostic]] { [nil: _diagnosticsEngine.diagnostics] } let clientDelegate: any ClientDelegate - package func updateProgress(statusMessage: String, showInLog: Bool) { } + package func updateProgress(statusMessage: String, showInLog: Bool) {} package func beginActivity(ruleInfo: String, executionDescription: String, signature: ByteString, target: ConfiguredTarget?, parentActivity: ActivityID?) -> ActivityID { .init(rawValue: -1) } - package func endActivity(id: ActivityID, signature: ByteString, status: BuildOperationTaskEnded.Status) { } - package func emit(data: [UInt8], for activity: ActivityID, signature: ByteString) { } - package func emit(diagnostic: Diagnostic, for activity: ActivityID, signature: ByteString) { } + package func endActivity(id: ActivityID, signature: ByteString, status: BuildOperationTaskEnded.Status) {} + package func emit(data: [UInt8], for activity: ActivityID, signature: ByteString) {} + package func emit(diagnostic: Diagnostic, for activity: ActivityID, signature: ByteString) {} - package func buildDescriptionCreated(_ buildDescriptionID: BuildDescriptionID) { } + package func buildDescriptionCreated(_ buildDescriptionID: BuildDescriptionID) {} package init(clientDelegate: any ClientDelegate, workspace: SWBCore.Workspace) { self.clientDelegate = clientDelegate @@ -850,7 +864,7 @@ private final class LocalizationOperation: InfoOperation, LocalizationInfoDelega .init(_diagnosticsEngine) } - var diagnostics: [ConfiguredTarget? : [Diagnostic]] { + var diagnostics: [ConfiguredTarget?: [Diagnostic]] { [nil: _diagnosticsEngine.diagnostics] } @@ -858,14 +872,14 @@ private final class LocalizationOperation: InfoOperation, LocalizationInfoDelega // We don't care about most of these messages. - package func updateProgress(statusMessage: String, showInLog: Bool) { } + package func updateProgress(statusMessage: String, showInLog: Bool) {} package func beginActivity(ruleInfo: String, executionDescription: String, signature: ByteString, target: ConfiguredTarget?, parentActivity: ActivityID?) -> ActivityID { .init(rawValue: -1) } - package func endActivity(id: ActivityID, signature: ByteString, status: BuildOperationTaskEnded.Status) { } - package func emit(data: [UInt8], for activity: ActivityID, signature: ByteString) { } - package func emit(diagnostic: Diagnostic, for activity: ActivityID, signature: ByteString) { } + package func endActivity(id: ActivityID, signature: ByteString, status: BuildOperationTaskEnded.Status) {} + package func emit(data: [UInt8], for activity: ActivityID, signature: ByteString) {} + package func emit(diagnostic: Diagnostic, for activity: ActivityID, signature: ByteString) {} - package func buildDescriptionCreated(_ buildDescriptionID: BuildDescriptionID) { } + package func buildDescriptionCreated(_ buildDescriptionID: BuildDescriptionID) {} package init(clientDelegate: any ClientDelegate, workspace: SWBCore.Workspace) { self.clientDelegate = clientDelegate @@ -902,15 +916,17 @@ private struct GetLocalizationInfoMsg: MessageHandler { let input = TaskGenerateLocalizationInfoInput(targetIdentifiers: message.targetIdentifiers) let output = try await session.buildDescriptionManager.generateLocalizationInfo(workspaceContext: workspaceContext, buildRequest: buildRequest, buildRequestContext: buildRequestContext, delegate: operation, input: input) - let response = LocalizationInfoResponse(targetInfos: output.map({ infoOutput in - var stringsdataPaths = [LocalizationInfoBuildPortion: Set]() - for (buildPortion, paths) in infoOutput.producedStringsdataPaths { - stringsdataPaths[LocalizationInfoBuildPortion(effectivePlatformName: buildPortion.effectivePlatformName, variant: buildPortion.variant, architecture: buildPortion.architecture)] = paths - } - var payload = LocalizationInfoMessagePayload(targetIdentifier: infoOutput.targetIdentifier, compilableXCStringsPaths: infoOutput.compilableXCStringsPaths, producedStringsdataPaths: stringsdataPaths, effectivePlatformName: infoOutput.effectivePlatformName) - payload.generatedSymbolFilesByXCStringsPath = infoOutput.generatedSymbolFilesByXCStringsPath - return payload - })) + let response = LocalizationInfoResponse( + targetInfos: output.map({ infoOutput in + var stringsdataPaths = [LocalizationInfoBuildPortion: Set]() + for (buildPortion, paths) in infoOutput.producedStringsdataPaths { + stringsdataPaths[LocalizationInfoBuildPortion(effectivePlatformName: buildPortion.effectivePlatformName, variant: buildPortion.variant, architecture: buildPortion.architecture)] = paths + } + var payload = LocalizationInfoMessagePayload(targetIdentifier: infoOutput.targetIdentifier, compilableXCStringsPaths: infoOutput.compilableXCStringsPaths, producedStringsdataPaths: stringsdataPaths, effectivePlatformName: infoOutput.effectivePlatformName) + payload.generatedSymbolFilesByXCStringsPath = infoOutput.generatedSymbolFilesByXCStringsPath + return payload + }) + ) return response } catch { return ErrorResponse("could not generate localization info: \(error)") @@ -949,7 +965,7 @@ private struct BuildDescriptionTargetInfoMsg: MessageHandler { let targets = buildDescription.allConfiguredTargets.reduce(into: Set()) { set, configuredTarget in set.insert(configuredTarget.target) } - return StringListResponse(targets.map{$0.guid}) + return StringListResponse(targets.map { $0.guid }) case .failed(let msg): return msg } @@ -966,7 +982,7 @@ private final class PreviewingOperation: InfoOperation, PreviewInfoDelegate { .init(_diagnosticsEngine) } - var diagnostics: [ConfiguredTarget? : [Diagnostic]] { + var diagnostics: [ConfiguredTarget?: [Diagnostic]] { [nil: _diagnosticsEngine.diagnostics] } @@ -976,14 +992,14 @@ private final class PreviewingOperation: InfoOperation, PreviewInfoDelegate { package var hadErrors: Bool { !errorDiagnostics.isEmpty } - package func updateProgress(statusMessage: String, showInLog: Bool) { } + package func updateProgress(statusMessage: String, showInLog: Bool) {} package func beginActivity(ruleInfo: String, executionDescription: String, signature: ByteString, target: ConfiguredTarget?, parentActivity: ActivityID?) -> ActivityID { .init(rawValue: -1) } - package func endActivity(id: ActivityID, signature: ByteString, status: BuildOperationTaskEnded.Status) { } - package func emit(data: [UInt8], for activity: ActivityID, signature: ByteString) { } - package func emit(diagnostic: Diagnostic, for activity: ActivityID, signature: ByteString) { } + package func endActivity(id: ActivityID, signature: ByteString, status: BuildOperationTaskEnded.Status) {} + package func emit(data: [UInt8], for activity: ActivityID, signature: ByteString) {} + package func emit(diagnostic: Diagnostic, for activity: ActivityID, signature: ByteString) {} - package func buildDescriptionCreated(_ buildDescriptionID: BuildDescriptionID) { } + package func buildDescriptionCreated(_ buildDescriptionID: BuildDescriptionID) {} package init(clientDelegate: any ClientDelegate, workspace: SWBCore.Workspace) { self.clientDelegate = clientDelegate @@ -1057,7 +1073,7 @@ extension MessageHandler { targetIDs: message.targetIDs, input: message.generatePreviewInfoInput ) - responses.append(contentsOf: output.map{ $0.asMessagePayload() }) + responses.append(contentsOf: output.map { $0.asMessagePayload() }) return PreviewInfoResponse( targetIDs: message.targetIDs, @@ -1065,7 +1081,7 @@ extension MessageHandler { ) } catch { if operation.hadErrors { - let errorStrings = operation.errorDiagnostics.map{$0.formatLocalizedDescription(.debug)} + let errorStrings = operation.errorDiagnostics.map { $0.formatLocalizedDescription(.debug) } return ErrorResponse("could not generate preview info: \(error)\ndiagnostics:\n\(errorStrings.joined(separator: "\n"))") } else { return ErrorResponse("could not generate preview info: \(error)") @@ -1221,7 +1237,6 @@ private struct DeveloperPathHandler: MessageHandler { } } - final package class BuildDependencyInfoOperation: InfoOperation, TargetDependencyResolverDelegate { private let _diagnosticsEngine = DiagnosticsEngine() @@ -1229,11 +1244,11 @@ final package class BuildDependencyInfoOperation: InfoOperation, TargetDependenc .init(_diagnosticsEngine) } - var diagnostics: [ConfiguredTarget? : [Diagnostic]] { + var diagnostics: [ConfiguredTarget?: [Diagnostic]] { [nil: _diagnosticsEngine.diagnostics] } - package func updateProgress(statusMessage: String, showInLog: Bool) { } + package func updateProgress(statusMessage: String, showInLog: Bool) {} // We don't care about the individual diagnostics here, only if there was at least one error. package private(set) var hadErrors = false @@ -1268,8 +1283,7 @@ private struct DumpBuildDependencyInfoMsg: MessageHandler { let buildDependencyInfo: BuildDependencyInfo do { buildDependencyInfo = try await BuildDependencyInfo(workspaceContext: workspaceContext, buildRequest: buildRequest, buildRequestContext: buildRequestContext, operation: operation) - } - catch { + } catch { return ErrorResponse(error.localizedDescription) } @@ -1285,15 +1299,13 @@ private struct DumpBuildDependencyInfoMsg: MessageHandler { let encoder = JSONEncoder() encoder.outputFormatting = [.prettyPrinted, .sortedKeys, .withoutEscapingSlashes] jsonData = try encoder.encode(buildDependencyInfo) - } - catch { + } catch { return ErrorResponse("Unable to serialize build dependency info: \(error.localizedDescription)") } do { try localFS.createDirectory(outputPath.dirname, recursive: true) try localFS.write(outputPath, contents: ByteString(jsonData)) - } - catch { + } catch { return ErrorResponse("Unable to write build dependency info: \(error.localizedDescription)") } @@ -1304,10 +1316,8 @@ private struct DumpBuildDependencyInfoMsg: MessageHandler { } } - // MARK: Evaluating macros - /// Returns a `Settings` object for a `MacroEvaluationRequestContext` (creating it if necessary) for a `MacroEvaluationRequestLevel` and `BuildParameters`. private func getSettings(for session: Session, workspaceContext: WorkspaceContext, level: MacroEvaluationRequestLevel, buildParameters: BuildParameters, purpose: SettingsPurpose) throws -> Settings { let buildRequestContext = BuildRequestContext(workspaceContext: workspaceContext) @@ -1367,11 +1377,12 @@ private struct MacroEvaluationMsg: MessageHandler { } return result }() - lookup = lookupOverrides.count > 0 ? { macro in - return lookupOverrides[macro] - } : nil - } - else { + lookup = + lookupOverrides.count > 0 + ? { macro in + return lookupOverrides[macro] + } : nil + } else { lookup = nil } @@ -1395,8 +1406,7 @@ private struct MacroEvaluationMsg: MessageHandler { } else if let macroDefn = macroDefn as? PathListMacroDeclaration { let result: [String] = scope.evaluate(macroDefn, lookup: lookup) return MacroEvaluationResponse(result: .stringList(result)) - } - else { + } else { // This is not a macro string list definition, so evaluate it as a string and return it as a single-element array. let result: String = scope.evaluateAsString(macroDefn, lookup: lookup) return MacroEvaluationResponse(result: .stringList([result])) @@ -1473,7 +1483,6 @@ private struct BuildSettingsEditorInfoMsg: MessageHandler { } } - // MARK: Testing & Debugging Commands private struct ExecuteCommandLineToolMsg: MessageHandler { @@ -1495,11 +1504,17 @@ private struct ExecuteCommandLineToolMsg: MessageHandler { request.service.send(message.replyChannel, BoolResponse(false)) return } - let result = await executeInternalTool(core: core, commandLine: message.commandLine, workingDirectory: message.workingDirectory, stdoutHandler: { - request.service.send(message.replyChannel, StringResponse($0)) - }, stderrHandler: { - request.service.send(message.replyChannel, ErrorResponse($0)) - }) + let result = await executeInternalTool( + core: core, + commandLine: message.commandLine, + workingDirectory: message.workingDirectory, + stdoutHandler: { + request.service.send(message.replyChannel, StringResponse($0)) + }, + stderrHandler: { + request.service.send(message.replyChannel, ErrorResponse($0)) + } + ) request.service.send(message.replyChannel, BoolResponse(result)) } } @@ -1668,12 +1683,14 @@ extension Session { fileprivate func withInfoOperation(operation: InfoOperation, qos: SWBQoS, requestForReply: Request, lock: ActorLock, _ work: @escaping @Sendable () async -> any Message) { registerInfoOperation(operation) - operation.addTask(_Concurrency.Task(priority: .init(buildRequestQoS: qos)) { - await lock.withLock { - let message = await work() - unregisterInfoOperation(operation) - requestForReply.reply(message) + operation.addTask( + _Concurrency.Task(priority: .init(buildRequestQoS: qos)) { + await lock.withLock { + let message = await work() + unregisterInfoOperation(operation) + requestForReply.reply(message) + } } - }) + ) } } diff --git a/Sources/SWBBuildService/PlanningOperation.swift b/Sources/SWBBuildService/PlanningOperation.swift index 3f2d7dfa..37b16d4e 100644 --- a/Sources/SWBBuildService/PlanningOperation.swift +++ b/Sources/SWBBuildService/PlanningOperation.swift @@ -48,8 +48,7 @@ package final class PlanningOperation: Sendable { /// Concurrent queue used to dispatch work to the background. private let workQueue: SWBQueue - init(request: Request, session: Session, workspaceContext: WorkspaceContext, buildRequest: BuildRequest, buildRequestContext: BuildRequestContext, delegate: any PlanningOperationDelegate) - { + init(request: Request, session: Session, workspaceContext: WorkspaceContext, buildRequest: BuildRequest, buildRequestContext: BuildRequestContext, delegate: any PlanningOperationDelegate) { self.request = request self.uuid = UUID() self.session = session @@ -94,32 +93,34 @@ package final class PlanningOperation: Sendable { // We now need to request all the provisioning inputs, which we do in parallel. let provisioningInputs: [ConfiguredTarget: ProvisioningTaskInputs] do { - provisioningInputs = try await Dictionary(uniqueKeysWithValues: withThrowingTaskGroup(of: (ConfiguredTarget, ProvisioningTaskInputs).self) { [delegate] group in - return try await delegate.withActivity(ruleInfo: "GatherProvisioningInputs", executionDescription: "Gather provisioning inputs", signature: "gather_provisioning_inputs", target: nil, parentActivity: nil) { [delegate] activity in - for (index, target) in graph.allTargets.enumerated() { - group.addTask { - try _Concurrency.Task.checkCancellation() - - // Dispatch the request for inputs. - let inputs = await self.getProvisioningTaskInputs(for: target) - - if self.workspaceContext.userPreferences.enableDebugActivityLogs { - delegate.emit(data: Array("Received inputs for target \(target): \(inputs)\n".utf8), for: activity, signature: "gather_provisioning_inputs") + provisioningInputs = try await Dictionary( + uniqueKeysWithValues: withThrowingTaskGroup(of: (ConfiguredTarget, ProvisioningTaskInputs).self) { [delegate] group in + return try await delegate.withActivity(ruleInfo: "GatherProvisioningInputs", executionDescription: "Gather provisioning inputs", signature: "gather_provisioning_inputs", target: nil, parentActivity: nil) { [delegate] activity in + for (index, target) in graph.allTargets.enumerated() { + group.addTask { + try _Concurrency.Task.checkCancellation() + + // Dispatch the request for inputs. + let inputs = await self.getProvisioningTaskInputs(for: target) + + if self.workspaceContext.userPreferences.enableDebugActivityLogs { + delegate.emit(data: Array("Received inputs for target \(target): \(inputs)\n".utf8), for: activity, signature: "gather_provisioning_inputs") + } + + // Register the result. + let numInputs = index + 1 + let provisioningStatus = messageShortening >= .allDynamicText ? "Provisioning \(activityMessageFractionString(numInputs, over: graph.allTargets.count))" : "Getting \(numInputs) of \(graph.allTargets.count) provisioning task inputs" + delegate.updateProgress(statusMessage: provisioningStatus, showInLog: false) + + return (target, inputs) } - - // Register the result. - let numInputs = index + 1 - let provisioningStatus = messageShortening >= .allDynamicText ? "Provisioning \(activityMessageFractionString(numInputs, over: graph.allTargets.count))" : "Getting \(numInputs) of \(graph.allTargets.count) provisioning task inputs" - delegate.updateProgress(statusMessage: provisioningStatus, showInLog: false) - - return (target, inputs) } + return try await group.collect() } - return try await group.collect() } - }) + ) } catch { - return nil // CancellationError + return nil // CancellationError } if messageShortening != .full || self.workspaceContext.userPreferences.enableDebugActivityLogs { @@ -152,14 +153,13 @@ package final class PlanningOperation: Sendable { /// Create the provisioning task inputs for a configured target. private func getProvisioningTaskInputs(for configuredTarget: ConfiguredTarget) async -> ProvisioningTaskInputs { // We only collect provisioning task inputs for standard targets. - if let target = configuredTarget.target as? SWBCore.StandardTarget - { + if let target = configuredTarget.target as? SWBCore.StandardTarget { // Create the settings and collect data we need to ship back to the client. let settings = buildRequestContext.getCachedSettings(configuredTarget.parameters, target: target) // Exit early if code signing is disabled, or if we don't have a valid SDK. guard let project = settings.project, let sdk = settings.sdk, - settings.globalScope.evaluate(BuiltinMacros.CODE_SIGNING_ALLOWED) + settings.globalScope.evaluate(BuiltinMacros.CODE_SIGNING_ALLOWED) else { return ProvisioningTaskInputs() } @@ -211,16 +211,21 @@ package final class PlanningOperation: Sendable { // Based on the conversation from rdar://problem/40909675, the team prefix should have a trailing period (`.`), but **only** if teamID actually has a value. let parsedTeamIdentifierPrefix = settings.userNamespace.parseLiteralString(teamID.flatMap { $0 + "." } ?? "") - return rawEntitlements.byEvaluatingMacros(withScope: settings.globalScope, andDictionaryKeys: true, preserveReferencesToSettings: preserveReferencesToSettings, lookup: { macro in - switch macro { - case BuiltinMacros.CFBundleIdentifier: - return parsedBundleIdentifier - case BuiltinMacros.TeamIdentifierPrefix: - return parsedTeamIdentifierPrefix - default: - return nil + return rawEntitlements.byEvaluatingMacros( + withScope: settings.globalScope, + andDictionaryKeys: true, + preserveReferencesToSettings: preserveReferencesToSettings, + lookup: { macro in + switch macro { + case BuiltinMacros.CFBundleIdentifier: + return parsedBundleIdentifier + case BuiltinMacros.TeamIdentifierPrefix: + return parsedTeamIdentifierPrefix + default: + return nil + } } - }) + ) } // We need to read the entitlements from CODE_SIGN_ENTITLEMENTS_CONTENTS and evaluate build settings in it, to send it to Xcode to generate the provisioning inputs. @@ -228,14 +233,12 @@ package final class PlanningOperation: Sendable { if !entitlementsContentsString.isEmpty { if let rawEntitlementsFromBuildSetting = try? PropertyList.fromString(entitlementsContentsString) { entitlementsFromBuildSetting = processRawEntitlements(rawEntitlementsFromBuildSetting) - } - else { + } else { delegate.emit(.default, .init(behavior: .error, location: .buildSetting(name: "CODE_SIGN_ENTITLEMENTS_CONTENTS"), data: .init("The value of CODE_SIGN_ENTITLEMENTS_CONTENTS could not be parsed as entitlements"))) entitlementsFromBuildSetting = nil } - } - else { + } else { entitlementsFromBuildSetting = nil } @@ -244,13 +247,11 @@ package final class PlanningOperation: Sendable { if let entitlementsFilePath = entitlementsFilePath { if let rawEntitlementsFromFile = try? PropertyList.fromPath(entitlementsFilePath, fs: workspaceContext.fs) { entitlementsFromFile = processRawEntitlements(rawEntitlementsFromFile) - } - else { + } else { // FIXME: We should report an issue if we couldn't read the file. Though presently I think the provisioning inputs generation machinery will do that, it might be clearer to just deal with it ourselves. However, if `CODE_SIGN_ALLOW_ENTITLEMENTS_MODIFICATION` is being used, the file path might point to a generated file which hasn't yet been created, and we won't be able to read it anyways. entitlementsFromFile = nil } - } - else { + } else { entitlementsFromFile = nil } @@ -269,14 +270,11 @@ package final class PlanningOperation: Sendable { } return .plDict(entitlementsFromFileDictionary.addingContents(of: entitlementsFromBuildSettingDictionary)) - } - else if let entitlementsFromFile = entitlementsFromFile { + } else if let entitlementsFromFile = entitlementsFromFile { return entitlementsFromFile - } - else if let entitlementsFromBuildSetting = entitlementsFromBuildSetting { + } else if let entitlementsFromBuildSetting = entitlementsFromBuildSetting { return entitlementsFromBuildSetting - } - else { + } else { // Otherwise, we don't have any entitlements, and can just set it to nil. return nil } @@ -294,16 +292,19 @@ package final class PlanningOperation: Sendable { // Create the outstanding request entry. return await withCheckedContinuation { continuation in workQueue.async { - self.provisioningTaskInputRequests[configuredTargetHandle] = ProvisioningTaskInputRequest(configuredTarget: configuredTarget, settingsHandle: settingsHandle, bundleIdentifier: bundleIdentifier, completion: { inputs in - continuation.resume(returning: inputs) - }) + self.provisioningTaskInputRequests[configuredTargetHandle] = ProvisioningTaskInputRequest( + configuredTarget: configuredTarget, + settingsHandle: settingsHandle, + bundleIdentifier: bundleIdentifier, + completion: { inputs in + continuation.resume(returning: inputs) + } + ) self.request.send(message) } } - } - else - { + } else { // Other target classes get an empty provisioning object. return ProvisioningTaskInputs() } @@ -322,8 +323,7 @@ package final class PlanningOperation: Sendable { let settings: Settings do { settings = try self.session.settings(for: subrequest.settingsHandle) - } - catch let e as SessionError { + } catch let e as SessionError { let error: String switch e { case .noSettings(let str): @@ -332,8 +332,7 @@ package final class PlanningOperation: Sendable { error = "No settings in session for handle '\(subrequest.settingsHandle)': Handle is for a different workspace" } fatalError(error) - } - catch { + } catch { fatalError("no settings in session for handle '\(subrequest.settingsHandle)': Unknown error") } // Now evaluate settings in the entitlements plists we got back from the client. This includes some settings we evaluated before sending the request because the provisioning inputs generation may add content referring to those settings beyond what we passed to it. @@ -361,8 +360,7 @@ package final class PlanningOperation: Sendable { // Clean up. do { try self.session.unregisterSettings(for: subrequest.settingsHandle) - } - catch let e as SessionError { + } catch let e as SessionError { let error: String switch e { case .noSettings(let str): @@ -371,8 +369,7 @@ package final class PlanningOperation: Sendable { error = "No settings to unregister in session for handle '\(subrequest.settingsHandle)': Handle is for a different workspace" } fatalError(error) - } - catch { + } catch { fatalError("no settings to unregister in session for handle '\(subrequest.settingsHandle)': Unknown error") } } @@ -397,11 +394,8 @@ extension PlanningOperation: TargetDependencyResolverDelegate { } } - -extension ProvisioningTaskInputsSourceData -{ - init(configurationName: String, sourceData: ProvisioningSourceData, provisioningProfileSupport: ProvisioningProfileSupport, provisioningProfileSpecifier: String, provisioningProfileUUID: String, bundleIdentifier: String, productTypeEntitlements: PropertyListItem, productTypeIdentifier: String, projectEntitlementsFile: String?, projectEntitlements: PropertyListItem?, signingCertificateIdentifier: String, signingRequiresTeam: Bool, teamID: String?, sdkRoot: String, sdkVariant: String?, supportsEntitlements: Bool, wantsBaseEntitlementInjection: Bool, entitlementsDestination: String, localSigningStyle: String, enableCloudSigning: Bool) - { +extension ProvisioningTaskInputsSourceData { + init(configurationName: String, sourceData: ProvisioningSourceData, provisioningProfileSupport: ProvisioningProfileSupport, provisioningProfileSpecifier: String, provisioningProfileUUID: String, bundleIdentifier: String, productTypeEntitlements: PropertyListItem, productTypeIdentifier: String, projectEntitlementsFile: String?, projectEntitlements: PropertyListItem?, signingCertificateIdentifier: String, signingRequiresTeam: Bool, teamID: String?, sdkRoot: String, sdkVariant: String?, supportsEntitlements: Bool, wantsBaseEntitlementInjection: Bool, entitlementsDestination: String, localSigningStyle: String, enableCloudSigning: Bool) { self.init(configurationName: configurationName, provisioningProfileSupport: provisioningProfileSupport, provisioningProfileSpecifier: provisioningProfileSpecifier, provisioningProfileUUID: provisioningProfileUUID, provisioningStyle: sourceData.provisioningStyle, teamID: teamID, bundleIdentifier: bundleIdentifier, productTypeEntitlements: productTypeEntitlements, productTypeIdentifier: productTypeIdentifier, projectEntitlementsFile: projectEntitlementsFile, projectEntitlements: projectEntitlements, signingCertificateIdentifier: signingCertificateIdentifier, signingRequiresTeam: signingRequiresTeam, sdkRoot: sdkRoot, sdkVariant: sdkVariant, supportsEntitlements: supportsEntitlements, wantsBaseEntitlementInjection: wantsBaseEntitlementInjection, entitlementsDestination: entitlementsDestination, localSigningStyle: localSigningStyle, enableCloudSigning: enableCloudSigning) } } diff --git a/Sources/SWBBuildService/PreviewInfo.swift b/Sources/SWBBuildService/PreviewInfo.swift index 5b9dbf99..7570a94f 100644 --- a/Sources/SWBBuildService/PreviewInfo.swift +++ b/Sources/SWBBuildService/PreviewInfo.swift @@ -149,9 +149,9 @@ extension BuildDescriptionManager { guard let potentialTargets = targetsByGuid[targetID] else { continue } for configuredTarget in potentialTargets { if case .thunkInfo = input, - let packageProductTarget = configuredTarget.target as? PackageProductTarget, - let dynamicTargetVariantGuid = packageProductTarget.dynamicTargetVariantGuid, - let dynamicTargetVariants = targetsByGuid[dynamicTargetVariantGuid] + let packageProductTarget = configuredTarget.target as? PackageProductTarget, + let dynamicTargetVariantGuid = packageProductTarget.dynamicTargetVariantGuid, + let dynamicTargetVariants = targetsByGuid[dynamicTargetVariantGuid] { targets.append(contentsOf: dynamicTargetVariants) } else { @@ -242,7 +242,7 @@ extension BuildDescription { case 1: let info = infos[0] linkInfo = info - assert(requestingTargetDependencyInfo || compileInfo?.output == info.input) // make sure the tasks match up. + assert(requestingTargetDependencyInfo || compileInfo?.output == info.input) // make sure the tasks match up. case 0: // No link task because the target type is a static library (which doesn't support previews), or previews dylib is enabled continue diff --git a/Sources/SWBBuildService/Session.swift b/Sources/SWBBuildService/Session.swift index 56f06a01..b0c8e1df 100644 --- a/Sources/SWBBuildService/Session.swift +++ b/Sources/SWBBuildService/Session.swift @@ -205,10 +205,8 @@ public final class Session { return startPIFTransfer(workspaceSignature: workspaceSignature) } - // MARK: Support for saving macro evaluation scopes to look up by a handle (UUID). - /// The active Settings objects being vended for use by the client. /// - remark: This is presently only used in `PlanningOperation` to be able to evaluate some settings after receiving provisioning inputs from the client, without having to reconstruct the `ConfiguredTarget` in that asyncronous operation. var registeredSettings = Registry() @@ -235,10 +233,8 @@ public final class Session { return settings } - // MARK: Planning operation support - /// The active planning operations, if any. /// /// The client is responsible for closing these. @@ -267,7 +263,6 @@ public final class Session { planningOperation.request.send(PlanningOperationDidFinish(sessionHandle: UID, planningOperationHandle: planningOperation.uuid.description)) } - // MARK: Client exchange objects // FIXME: This should just map on the UUID type, not a string. @@ -291,7 +286,6 @@ public final class Session { activeClientExchanges.removeValue(forKey: exchange.uuid.description) } - // MARK: Information operation support /// The active information operations @@ -317,7 +311,7 @@ public final class Session { /// Cancel ongoing information operations func cancelInfoOperations() { activeInfoOperations.forEach { - $0.1.cancel() + $0.1.cancel() } } @@ -329,7 +323,6 @@ public final class Session { } } - // MARK: Build operation support /// The active build operations @@ -337,12 +330,12 @@ public final class Session { /// Returns the normal build operations, excluding the ones that are for the index. private var activeNormalBuilds: [any ActiveBuildOperation] { - return activeBuilds.values.filter{ !$0.buildRequest.enableIndexBuildArena && !$0.onlyCreatesBuildDescription } + return activeBuilds.values.filter { !$0.buildRequest.enableIndexBuildArena && !$0.onlyCreatesBuildDescription } } /// Returns index build operations. private var activeIndexBuilds: [any ActiveBuildOperation] { - return activeBuilds.values.filter{ $0.buildRequest.enableIndexBuildArena && !$0.onlyCreatesBuildDescription } + return activeBuilds.values.filter { $0.buildRequest.enableIndexBuildArena && !$0.onlyCreatesBuildDescription } } /// Registers a build operation with the session @@ -351,7 +344,8 @@ public final class Session { // But we do allow build description creation operations to run concurrently with normal builds. These are important for index queries to function properly even during a build. // We also allow 'prepare-for-index' build operations to run concurrently with a normal build but only one at a time. These are important for functionality in the Xcode editor to work properly, that the user directly interacts with. if !build.onlyCreatesBuildDescription { - let (buildType, existingBuilds) = build.buildRequest.enableIndexBuildArena + let (buildType, existingBuilds) = + build.buildRequest.enableIndexBuildArena ? ("index", activeIndexBuilds) : ("normal", activeNormalBuilds) @@ -380,14 +374,12 @@ public final class Session { } } - /// A client exchange is used to send a request to the client and handle its response. The service creates and discards these, and is responsible for adding and removing them from the session. protocol ClientExchange { /// The stable UUID of the receiver. var uuid: UUID { get } } - // Session Extensions extension Request { diff --git a/Sources/SWBBuildService/Tools.swift b/Sources/SWBBuildService/Tools.swift index 0d6bfe3b..3843ac36 100644 --- a/Sources/SWBBuildService/Tools.swift +++ b/Sources/SWBBuildService/Tools.swift @@ -155,9 +155,9 @@ private class MsgPackDumpTool { // Consume all the objects in the file. enum DecoderItem { /// A dictionary with N elements remaining. - case dict(count: Int, atKey: Bool) + case dict(count: Int, atKey: Bool) /// An array with N elements remaining. - case array(count: Int) + case array(count: Int) } var stack = [DecoderItem]() var indent: String { @@ -193,7 +193,7 @@ private class MsgPackDumpTool { } if decoder.consumedCount == data.count { - continue // at EOF, go back through and finish the indenting + continue // at EOF, go back through and finish the indenting } // Read the next item from the decoder. @@ -229,7 +229,6 @@ private class MsgPackDumpTool { } } - /// Utilities for working with headermaps. private class HeadermapTool { /// The parsed command line options. @@ -476,8 +475,6 @@ private class ClangScanTool { } } - - private class SerializedDiagnosticsTool { /// The parsed command line options. struct Options { diff --git a/Sources/SWBBuildSystem/BuildManager.swift b/Sources/SWBBuildSystem/BuildManager.swift index 215fd5eb..ff9dab63 100644 --- a/Sources/SWBBuildSystem/BuildManager.swift +++ b/Sources/SWBBuildSystem/BuildManager.swift @@ -31,13 +31,19 @@ package actor BuildManager { /// Public initializer (so external clients can create new BuildManagers). package init() { - self.cachedBuildSystems = HeavyCache(maximumSize: UserDefaults.buildDescriptionInMemoryCacheSize, evictionPolicy: .default(totalCostLimit: UserDefaults.buildDescriptionInMemoryCostLimit, willEvictCallback: { entry in - // Capture the path to a local variable so that the buildDescription instance isn't retained by OSLog's autoclosure message parameter. - let buildDatabasePath = entry.buildDescription?.buildDatabasePath - #if canImport(os) - OSLog.log("Evicted cached build system for '\(buildDatabasePath?.str ?? "")'") - #endif - })) + self.cachedBuildSystems = HeavyCache( + maximumSize: UserDefaults.buildDescriptionInMemoryCacheSize, + evictionPolicy: .default( + totalCostLimit: UserDefaults.buildDescriptionInMemoryCostLimit, + willEvictCallback: { entry in + // Capture the path to a local variable so that the buildDescription instance isn't retained by OSLog's autoclosure message parameter. + let buildDatabasePath = entry.buildDescription?.buildDatabasePath + #if canImport(os) + OSLog.log("Evicted cached build system for '\(buildDatabasePath?.str ?? "")'") + #endif + } + ) + ) } /// Enqueue a build operation. @@ -57,7 +63,7 @@ package actor BuildManager { buildOnlyThesePaths = nil } - let buildOutputMap: [String:String]? + let buildOutputMap: [String: String]? if let buildOnlyThesePaths { buildOutputMap = { var outputMap: [String: String] = [:] diff --git a/Sources/SWBBuildSystem/BuildOperation.swift b/Sources/SWBBuildSystem/BuildOperation.swift index 8129e0aa..0239583c 100644 --- a/Sources/SWBBuildSystem/BuildOperation.swift +++ b/Sources/SWBBuildSystem/BuildOperation.swift @@ -244,7 +244,7 @@ package final class BuildOperation: BuildSystemOperation { private var wasAbortRequested = false /// Optional map of a subset of files to build and their desired output paths - private let buildOutputMap: [String:String]? + private let buildOutputMap: [String: String]? /// Optional list of a subset of nodes to build private let nodesToBuild: [BuildDescription.BuildNodeToPrepareForIndex]? @@ -265,7 +265,7 @@ package final class BuildOperation: BuildSystemOperation { package let cachedBuildSystems: any BuildSystemCache - package init(_ request: BuildRequest, _ requestContext: BuildRequestContext, _ buildDescription: BuildDescription, environment: [String: String]? = nil, _ delegate: any BuildOperationDelegate, _ clientDelegate: any ClientDelegate, _ cachedBuildSystems: any BuildSystemCache, persistent: Bool = false, serial: Bool = false, buildOutputMap: [String:String]? = nil, nodesToBuild: [BuildDescription.BuildNodeToPrepareForIndex]? = nil, workspace: SWBCore.Workspace, core: Core, userPreferences: UserPreferences) { + package init(_ request: BuildRequest, _ requestContext: BuildRequestContext, _ buildDescription: BuildDescription, environment: [String: String]? = nil, _ delegate: any BuildOperationDelegate, _ clientDelegate: any ClientDelegate, _ cachedBuildSystems: any BuildSystemCache, persistent: Bool = false, serial: Bool = false, buildOutputMap: [String: String]? = nil, nodesToBuild: [BuildDescription.BuildNodeToPrepareForIndex]? = nil, workspace: SWBCore.Workspace, core: Core, userPreferences: UserPreferences) { self.uuid = UUID() self.request = request self.requestContext = requestContext @@ -290,7 +290,7 @@ package final class BuildOperation: BuildSystemOperation { buildOutputDelegate = delegate.buildStarted(self) // Report the copied path map. - delegate.reportPathMap(self, copiedPathMap: buildDescription.copiedPathMap, generatedFilesPathMap: buildOutputMap ?? [String:String]()) + delegate.reportPathMap(self, copiedPathMap: buildDescription.copiedPathMap, generatedFilesPathMap: buildOutputMap ?? [String: String]()) // Report the diagnostics from task construction. // @@ -335,9 +335,16 @@ package final class BuildOperation: BuildSystemOperation { switch overrides { case let .table(overrides): if !overrides.isEmpty { - buildOutputDelegate.emit(Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("Applying \(infix) build settings"), childDiagnostics: overrides.sorted(by: \.0).map { (key, value) in - Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("\(key)=\(value)")) - })) + buildOutputDelegate.emit( + Diagnostic( + behavior: .note, + location: .unknown, + data: DiagnosticData("Applying \(infix) build settings"), + childDiagnostics: overrides.sorted(by: \.0).map { (key, value) in + Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("\(key)=\(value)")) + } + ) + ) } case let .file(path): buildOutputDelegate.emit(Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("Applying \(infix) build settings from path: \(path.str)"))) @@ -354,7 +361,7 @@ package final class BuildOperation: BuildSystemOperation { // If the build has been cancelled before it really began, we can bail out early do { - let isCancelled = await queue.sync{ self.wasCancellationRequested } + let isCancelled = await queue.sync { self.wasCancellationRequested } if !UserDefaults.skipEarlyBuildOperationCancellation && isCancelled { let effectiveStatus = BuildOperationEnded.Status.cancelled delegate.buildComplete(self, status: effectiveStatus, delegate: buildOutputDelegate, metrics: nil) @@ -408,7 +415,7 @@ package final class BuildOperation: BuildSystemOperation { debuggingDataPath = nil } - var buildEnvironment: [String:String] = [:] + var buildEnvironment: [String: String] = [:] if let actualEnvironment = environment { buildEnvironment.addContents(of: actualEnvironment) @@ -542,7 +549,7 @@ package final class BuildOperation: BuildSystemOperation { // Build selected nodes, the build fails if one operation failed. var currResult = true for nodeToPrepare in buildOnlyTheseNodes { - let isCancelled = await queue.sync{ self.wasCancellationRequested } + let isCancelled = await queue.sync { self.wasCancellationRequested } if isCancelled { currResult = false break @@ -704,14 +711,14 @@ package final class BuildOperation: BuildSystemOperation { } #if canImport(Darwin) - do { - if let xcbuildDataArchive = getEnvironmentVariable("XCBUILDDATA_ARCHIVE")?.nilIfEmpty.map(Path.init) { - let archive = XCBuildDataArchive(filePath: xcbuildDataArchive) - try archive.appendBuildDataDirectory(from: buildDescription.dir, uuid: uuid) + do { + if let xcbuildDataArchive = getEnvironmentVariable("XCBUILDDATA_ARCHIVE")?.nilIfEmpty.map(Path.init) { + let archive = XCBuildDataArchive(filePath: xcbuildDataArchive) + try archive.appendBuildDataDirectory(from: buildDescription.dir, uuid: uuid) + } + } catch { + self.buildOutputDelegate.error("unable to process build ended event via the BuildOperationExtensionPoint: \(error)") } - } catch { - self.buildOutputDelegate.error("unable to process build ended event via the BuildOperationExtensionPoint: \(error)") - } #endif if let swiftBuildTraceFilePath = getEnvironmentVariable("SWIFTBUILD_TRACE_FILE")?.nilIfEmpty.map(Path.init) ?? getEnvironmentVariable("XCBUILDDATA_ARCHIVE")?.nilIfEmpty.map(Path.init)?.dirname.join(".SWIFTBUILD_TRACE") { @@ -722,7 +729,7 @@ package final class BuildOperation: BuildSystemOperation { let path: String } do { - let traceEntry = SwiftDataTraceEntry( + let traceEntry = SwiftDataTraceEntry( buildDescriptionSignature: buildDescription.signature.asString, isTargetParallelizationEnabled: request.useParallelTargets, name: workspace.name, @@ -743,7 +750,7 @@ package final class BuildOperation: BuildSystemOperation { let effectiveStatus: BuildOperationEnded.Status? switch (isCancelled, isAborted) { case (true, false), (true, true): - effectiveStatus = .cancelled // cancelled always wins over aborted + effectiveStatus = .cancelled // cancelled always wins over aborted case (false, true): effectiveStatus = .failed case (false, false): @@ -755,7 +762,7 @@ package final class BuildOperation: BuildSystemOperation { } func prepareForBuilding() async -> ([String], [String])? { - let warnings = [String]() // Not presently used + let warnings = [String]() // Not presently used var errors = [String]() // Create the module session file if necessary. @@ -765,11 +772,9 @@ package final class BuildOperation: BuildSystemOperation { do { try fs.createDirectory(moduleSessionFilePath.dirname, recursive: true) try fs.write(moduleSessionFilePath, contents: fileContents) - } - catch let err as SWBUtil.POSIXError { + } catch let err as SWBUtil.POSIXError { errors.append("unable to write module session file at '\(moduleSessionFilePath.str)': \(err.description)") - } - catch { + } catch { errors.append("unable to write module session file at '\(moduleSessionFilePath.str)': unknown error") } } @@ -814,7 +819,7 @@ package final class BuildOperation: BuildSystemOperation { ] if let pluginPath = info.options.pluginPath { commandLine.append(contentsOf: [ - "-fcas-plugin-path", pluginPath.str + "-fcas-plugin-path", pluginPath.str, ]) } let result: Processes.ExecutionResult = try await clientDelegate.executeExternalTool(commandLine: commandLine) @@ -959,7 +964,7 @@ extension BuildOperation: Hashable { hasher.combine(ObjectIdentifier(self)) } - package static func ==(lhs: BuildOperation, rhs: BuildOperation) -> Bool { + package static func == (lhs: BuildOperation, rhs: BuildOperation) -> Bool { return lhs === rhs } } @@ -969,9 +974,9 @@ extension BuildOperation: Hashable { // these delegates. final class CustomTaskSerializerDelegate: ConfiguredTargetSerializerDelegate { var currentBuildParametersIndex: Int = 0 - var buildParametersIndexes = [BuildParameters : Int]() + var buildParametersIndexes = [BuildParameters: Int]() var currentConfiguredTargetIndex: Int = 0 - var configuredTargetIndexes = [ConfiguredTarget : Int]() + var configuredTargetIndexes = [ConfiguredTarget: Int]() } final class CustomTaskDeserializerDelegate: ConfiguredTargetDeserializerDelegate { @@ -1062,7 +1067,7 @@ private struct OperatorSystemAdaptorDynamicContext: DynamicTaskExecutionDelegate if singleUse { if adaptor.operation.request.recordBuildBacktraces, let reason = reason { // Since this is a single use task, record a backtrace frame describing why it was requested - adaptor.recordBuildBacktraceFrame(identifier: .task(.taskIdentifier(ByteString(encodingAsUTF8: identifier.rawValue))), previousFrameIdentifier: .task(.taskIdentifier(ByteString(encodingAsUTF8: task.identifier.rawValue))), category: .dynamicTaskRequest, kind: reason.backtraceFrameKind, description: reason.description ) + adaptor.recordBuildBacktraceFrame(identifier: .task(.taskIdentifier(ByteString(encodingAsUTF8: identifier.rawValue))), previousFrameIdentifier: .task(.taskIdentifier(ByteString(encodingAsUTF8: task.identifier.rawValue))), category: .dynamicTaskRequest, kind: reason.backtraceFrameKind, description: reason.description) } commandInterface.commandsNeedsSingleUseInput(key: buildKey, inputID: taskID) } else { @@ -1220,9 +1225,9 @@ private class InProcessCommand: SWBLLBuild.ExternalCommand, SWBLLBuild.ExternalD task, dynamicExecutionDelegate: adaptorInterfaceDelegate, executionDelegate: - adaptor.operation, + adaptor.operation, clientDelegate: - adaptor.operation.clientDelegate, + adaptor.operation.clientDelegate, outputDelegate: outputDelegate ) @@ -1283,9 +1288,7 @@ private class BuildValueValidatingInProcessCommand: InProcessCommand, ProducesCu } } - - -private final class InProcessTool: SWBLLBuild.Tool { +private final class InProcessTool: SWBLLBuild.Tool { let actionType: TaskAction.Type let description: BuildDescription let adaptor: OperationSystemAdaptor @@ -1325,12 +1328,14 @@ private final class InProcessTool: SWBLLBuild.Tool { ) guard let dynamicTask = try? DynamicTask(from: deserializer), - let spec = DynamicTaskSpecRegistry.spec(for: dynamicTask.toolIdentifier) else { + let spec = DynamicTaskSpecRegistry.spec(for: dynamicTask.toolIdentifier) + else { return nil } guard let executableTask = try? spec.buildExecutableTask(dynamicTask: dynamicTask, context: adaptor.dynamicOperationContext), - let taskAction = try? spec.buildTaskAction(dynamicTaskKey: dynamicTask.taskKey, context: adaptor.dynamicOperationContext) else { + let taskAction = try? spec.buildTaskAction(dynamicTaskKey: dynamicTask.taskKey, context: adaptor.dynamicOperationContext) + else { return nil } @@ -1539,7 +1544,7 @@ internal final class OperationSystemAdaptor: SWBLLBuild.BuildSystemDelegate, Act func cleanupCompilationCache() { let settings = operation.requestContext.getCachedSettings(operation.request.parameters) if settings.globalScope.evaluate(BuiltinMacros.COMPILATION_CACHE_KEEP_CAS_DIRECTORY) { - return // Keep the cache directory. + return // Keep the cache directory. } let cachePath = Path(settings.globalScope.evaluate(BuiltinMacros.COMPILATION_CACHE_CAS_PATH)) @@ -1566,7 +1571,7 @@ internal final class OperationSystemAdaptor: SWBLLBuild.BuildSystemDelegate, Act func cleanupGlobalModuleCache() { let settings = operation.requestContext.getCachedSettings(operation.request.parameters) if settings.globalScope.evaluate(BuiltinMacros.KEEP_GLOBAL_MODULE_CACHE_DIRECTORY) { - return // Keep the cache directory. + return // Keep the cache directory. } let cachePath = settings.globalScope.evaluate(BuiltinMacros.MODULE_CACHE_DIR) @@ -1698,7 +1703,7 @@ internal final class OperationSystemAdaptor: SWBLLBuild.BuildSystemDelegate, Act // Get the task for this command. // // FIXME: Find a better way to maintain command associations. - if let registeredTask = description.taskStore.task(for: identifier) { + if let registeredTask = description.taskStore.task(for: identifier) { return registeredTask } else if let dynamicTask = dynamicTask(for: identifier) { return dynamicTask @@ -2032,9 +2037,10 @@ internal final class OperationSystemAdaptor: SWBLLBuild.BuildSystemDelegate, Act } else { let inputsString = inputDescriptions.joined(separator: ", ") let allInputsAreFiles = inputs.map { ($0.kind == .customTask, $0.key) }.filter { $0 == false && $1.hasPrefix("<") }.isEmpty - let adviceString = inputDescriptions.count > 1 - ? "Did you forget to declare these \(allInputsAreFiles ? "file" : "node")s as outputs of any script phases or custom build rules which produce them?" - : "Did you forget to declare this \(allInputsAreFiles ? "file" : "node") as an output of a script phase or custom build rule which produces it?" + let adviceString = + inputDescriptions.count > 1 + ? "Did you forget to declare these \(allInputsAreFiles ? "file" : "node")s as outputs of any script phases or custom build rules which produce them?" + : "Did you forget to declare this \(allInputsAreFiles ? "file" : "node") as an output of a script phase or custom build rule which produces it?" message = "Build input\(allInputsAreFiles ? " file" : "")\(inputDescriptions.count > 1 ? "s" : "") cannot be found: \(inputsString). \(adviceString)" } // This error happens before the command has started, so we do this here in order to have a command output delegate. @@ -2091,7 +2097,8 @@ internal final class OperationSystemAdaptor: SWBLLBuild.BuildSystemDelegate, Act let cmdInfos: [CommandInfo] = commands.compactMap { command in guard let task = lookupTask(TaskIdentifier(command: command)), - let target = task.forTarget else { return nil } + let target = task.forTarget + else { return nil } return CommandInfo(command: command, task: task, target: target) } @@ -2101,9 +2108,11 @@ internal final class OperationSystemAdaptor: SWBLLBuild.BuildSystemDelegate, Act } // Make sure we're stable between runs and then pick the "best" target - guard let selectedTarget = cmdInfos.map(\.target).sorted().one(by: { - self.operation.requestContext.selectConfiguredTargetForIndex($0, $1, hasEnabledIndexBuildArena: true, runDestination: buildRequest.parameters.activeRunDestination) - }) else { + guard + let selectedTarget = cmdInfos.map(\.target).sorted().one(by: { + self.operation.requestContext.selectConfiguredTargetForIndex($0, $1, hasEnabledIndexBuildArena: true, runDestination: buildRequest.parameters.activeRunDestination) + }) + else { // This shouldn't actually be possible - `one` returns `nil` only // the initial array is empty, which it isn't. return nil @@ -2114,10 +2123,12 @@ internal final class OperationSystemAdaptor: SWBLLBuild.BuildSystemDelegate, Act }).richFormattedRuleInfo(workspace: workspace) self.buildOutputDelegate.emit( - Diagnostic(behavior: .warning, - location: .unknown, - data: DiagnosticData("Multiple commands produce '\(output.key)', picked with target '\(selectedTarget.guid)'"), - childDiagnostics: childDiagnostics) + Diagnostic( + behavior: .warning, + location: .unknown, + data: DiagnosticData("Multiple commands produce '\(output.key)', picked with target '\(selectedTarget.guid)'"), + childDiagnostics: childDiagnostics + ) ) return cmdInfos.first(where: { $0.target === selectedTarget })?.command @@ -2203,12 +2214,12 @@ internal final class OperationSystemAdaptor: SWBLLBuild.BuildSystemDelegate, Act case let buildKey as BuildKey.CustomTask: return .task(BuildOperationTaskSignature.taskIdentifier(ByteString(encodingAsUTF8: buildKey.name))) case is BuildKey.DirectoryContents, - is BuildKey.FilteredDirectoryContents, - is BuildKey.DirectoryTreeSignature, - is BuildKey.DirectoryTreeStructureSignature, - is BuildKey.Node, - is BuildKey.Target, - is BuildKey.Stat: + is BuildKey.FilteredDirectoryContents, + is BuildKey.DirectoryTreeSignature, + is BuildKey.DirectoryTreeStructureSignature, + is BuildKey.Node, + is BuildKey.Target, + is BuildKey.Stat: return .genericBuildKey(buildKey.description) default: return nil @@ -2455,7 +2466,7 @@ extension OperationSystemAdaptor: SubtaskProgressReporter { } } -private func ==(lhs: [K: V]?, rhs: [K: V]?) -> Bool { +private func == (lhs: [K: V]?, rhs: [K: V]?) -> Bool { switch (lhs, rhs) { case (let lhs?, let rhs?): return lhs == rhs diff --git a/Sources/SWBBuildSystem/BuildOperationExtension.swift b/Sources/SWBBuildSystem/BuildOperationExtension.swift index 9bca060d..02ba82bc 100644 --- a/Sources/SWBBuildSystem/BuildOperationExtension.swift +++ b/Sources/SWBBuildSystem/BuildOperationExtension.swift @@ -25,13 +25,16 @@ package struct BuildOperationExtensionPoint: ExtensionPoint { package static func additionalEnvironmentVariables(pluginManager: any PluginManager, fromEnvironment: @autoclosure () -> [String: String], parameters: @autoclosure () -> BuildParameters) throws -> [String: String] { let (fromEnvironment, parameters) = (fromEnvironment(), parameters()) - return try pluginManager.extensions(of: Self.self).reduce([:], { environment, ext in - try environment.addingContents(of: ext.additionalEnvironmentVariables(fromEnvironment: fromEnvironment, parameters: parameters)) - }) + return try pluginManager.extensions(of: Self.self).reduce( + [:], + { environment, ext in + try environment.addingContents(of: ext.additionalEnvironmentVariables(fromEnvironment: fromEnvironment, parameters: parameters)) + } + ) } } package protocol BuildOperationExtension: Sendable { /// Provides a dictionary of additional environment variables - func additionalEnvironmentVariables(fromEnvironment: [String:String], parameters: BuildParameters) throws -> [String:String] + func additionalEnvironmentVariables(fromEnvironment: [String: String], parameters: BuildParameters) throws -> [String: String] } diff --git a/Sources/SWBBuildSystem/CleanOperation.swift b/Sources/SWBBuildSystem/CleanOperation.swift index 3c5c7054..ab212346 100644 --- a/Sources/SWBBuildSystem/CleanOperation.swift +++ b/Sources/SWBBuildSystem/CleanOperation.swift @@ -160,7 +160,7 @@ package final class CleanOperation: BuildSystemOperation, TargetDependencyResolv } private final class CleanExecutableTask: ExecutableTask { - init(commandLine: [String], workingDirectory: Path, environment: [String:String], configuredTarget: ConfiguredTarget, type: any TaskTypeDescription) { + init(commandLine: [String], workingDirectory: Path, environment: [String: String], configuredTarget: ConfiguredTarget, type: any TaskTypeDescription) { self.commandLine = commandLine.map { .literal(ByteString(encodingAsUTF8: $0)) } self.workingDirectory = workingDirectory self.environment = EnvironmentBindings(environment) @@ -242,7 +242,7 @@ package final class CleanOperation: BuildSystemOperation, TargetDependencyResolv if let reason = error.localizedFailureReason { description += " (\(reason))" } - return NSError(domain: "org.swift.swift-build", code: 0, userInfo: [ NSLocalizedDescriptionKey: "\(message): \(description)" ]) + return NSError(domain: "org.swift.swift-build", code: 0, userInfo: [NSLocalizedDescriptionKey: "\(message): \(description)"]) } private func cleanBuildFolders(buildFolders: Set, buildOutputDelegate: any BuildOutputDelegate) { @@ -283,9 +283,16 @@ package final class CleanOperation: BuildSystemOperation, TargetDependencyResolv } else { message += "." } - buildOutputDelegate.emit(Diagnostic(behavior: .error, location: .unknown, data: DiagnosticData(message), childDiagnostics: [ - Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("To mark this directory as deletable by the build system, run `\(UNIXShellCommandCodec(encodingStrategy: .singleQuotes, encodingBehavior: .fullCommandLine).encode(fs.commandLineArgumentsToApplyCreatedByBuildSystemAttribute(to: buildFolderPath)))` when it is created.")) - ])) + buildOutputDelegate.emit( + Diagnostic( + behavior: .error, + location: .unknown, + data: DiagnosticData(message), + childDiagnostics: [ + Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("To mark this directory as deletable by the build system, run `\(UNIXShellCommandCodec(encodingStrategy: .singleQuotes, encodingBehavior: .fullCommandLine).encode(fs.commandLineArgumentsToApplyCreatedByBuildSystemAttribute(to: buildFolderPath)))` when it is created.")) + ] + ) + ) } } } diff --git a/Sources/SWBBuildSystem/DependencyCycleFormatter.swift b/Sources/SWBBuildSystem/DependencyCycleFormatter.swift index 88e83e99..33e17a31 100644 --- a/Sources/SWBBuildSystem/DependencyCycleFormatter.swift +++ b/Sources/SWBBuildSystem/DependencyCycleFormatter.swift @@ -25,7 +25,7 @@ private enum TaskType: Equatable { case other } -private func ==(lhs: TaskType, rhs: TaskType) -> Bool { +private func == (lhs: TaskType, rhs: TaskType) -> Bool { switch (lhs, rhs) { case (.beginTargetTask(_), .beginTargetTask(_)): return true case (.endTargetTask(_), .endTargetTask(_)): return true @@ -157,8 +157,7 @@ struct DependencyCycleFormatter { let rulesInCycle: [BuildKey] do { rulesInCycle = try rulesInsideCycle() - } - catch { + } catch { return error.localizedDescription } @@ -186,8 +185,7 @@ struct DependencyCycleFormatter { } else { (cycleOutput, involvesManualTargetOrder) = try formattedMultiTargetCycleOutput(rulesInCycle) } - } - catch { + } catch { return error.localizedDescription } @@ -335,94 +333,93 @@ struct DependencyCycleFormatter { var previousConfiguredTarget: ConfiguredTarget? = nil var previousTargetDependencies = [ResolvedTargetDependency]() var savedTasks = [(rule: BuildKey, task: any ExecutableTask)]() - let cycleMessagePayloads: [DependencyCycleMessagePayload] = (try cycleRules.compactMap({ rule in - // The custom rule explaining the reason for the target dependency, if we create one for this rule. - var messagePayloads: [DependencyCycleMessagePayload]? = nil - - // Get the task for the rule, if any. - let task: (any ExecutableTask)? = try { - if let task = executableTask(for: rule) { - return task - } else if let customRule = rule as? BuildKey.CustomTask, let task = try executableTaskForCustomRule(customRule) { - return task - } else { - return nil - } - }() - - if let task { - if let configuredTarget = task.forTarget { - // We're only going to report anything if we've crossed a target boundary. - // In the future we can adjust this if we find multi-target cycles where tasks within a target are relevant, either materially or to provide useful context to humans. - if configuredTarget != previousConfiguredTarget { - // We only create a message if we have saved tasks. - if !savedTasks.isEmpty, let previousConfiguredTarget { - // If both this and the saved task are gate tasks, then we almost certainly have a target dependency, either explicit, implicit, or due to ordering. - // It would be weird if there is a task without a target between the previous task and this task, which is why we use .first here. If we find a scenario where that happens, we can revise this. (We use .first rather than .only so we don't drop something on the floor that we are capable of reporting.) - if task.isGate, let previousTask = savedTasks.first?.task, previousTask.isGate { - // Resolve the dependency between the previous target and this one. - let resolvedTargetDependency = previousTargetDependencies.filter({ $0.target == configuredTarget }).only - - // We only note that manual target order is in use if we see that we've crossed a target boundary but we couldn't find an expressed target dependency. Otherwise it (in theory) doesn't matter whether manual target order is in use, because the target dependencies in the cycle should be the same even if it weren't. - if resolvedTargetDependency == nil, !buildDescription.targetsBuildInParallel { - involvesManualTargetOrder = true - } + let cycleMessagePayloads: [DependencyCycleMessagePayload] = + (try cycleRules.compactMap({ rule in + // The custom rule explaining the reason for the target dependency, if we create one for this rule. + var messagePayloads: [DependencyCycleMessagePayload]? = nil + + // Get the task for the rule, if any. + let task: (any ExecutableTask)? = try { + if let task = executableTask(for: rule) { + return task + } else if let customRule = rule as? BuildKey.CustomTask, let task = try executableTaskForCustomRule(customRule) { + return task + } else { + return nil + } + }() + + if let task { + if let configuredTarget = task.forTarget { + // We're only going to report anything if we've crossed a target boundary. + // In the future we can adjust this if we find multi-target cycles where tasks within a target are relevant, either materially or to provide useful context to humans. + if configuredTarget != previousConfiguredTarget { + // We only create a message if we have saved tasks. + if !savedTasks.isEmpty, let previousConfiguredTarget { + // If both this and the saved task are gate tasks, then we almost certainly have a target dependency, either explicit, implicit, or due to ordering. + // It would be weird if there is a task without a target between the previous task and this task, which is why we use .first here. If we find a scenario where that happens, we can revise this. (We use .first rather than .only so we don't drop something on the floor that we are capable of reporting.) + if task.isGate, let previousTask = savedTasks.first?.task, previousTask.isGate { + // Resolve the dependency between the previous target and this one. + let resolvedTargetDependency = previousTargetDependencies.filter({ $0.target == configuredTarget }).only + + // We only note that manual target order is in use if we see that we've crossed a target boundary but we couldn't find an expressed target dependency. Otherwise it (in theory) doesn't matter whether manual target order is in use, because the target dependencies in the cycle should be the same even if it weren't. + if resolvedTargetDependency == nil, !buildDescription.targetsBuildInParallel { + involvesManualTargetOrder = true + } - // Create a new custom task with the message describing the dependency between these targets. - // Note that we would only not have a previous target here if we're still in the first target of the cycle. But we should find the dependency again later on. - // FIXME: This doesn't handle the case where the workspace has multiple targets with the same name (in different projects). - let targetName = configuredTarget.target.name - let previousTargetName = previousConfiguredTarget.target.name - let message: String - switch resolvedTargetDependency?.reason { - case .explicit?: - message = "Target '\(previousTargetName)' has an explicit dependency on Target '\(targetName)'" - case let .implicitBuildPhaseLinkage(filename, _, buildPhase)?: - message = "Target '\(previousTargetName)' has an implicit dependency on Target '\(targetName)' because '\(previousTargetName)' references the file '\(filename)' in the build phase '\(buildPhase)'" - case let .implicitBuildSetting(settingName, options)?: - message = "Target '\(previousTargetName)' has an implicit dependency on Target '\(targetName)' because '\(previousTargetName)' defines the option '\(options.joined(separator: " "))' in the build setting '\(settingName)'" - case let .impliedByTransitiveDependencyViaRemovedTargets(intermediateTargetName: intermediateTargetName): - message = "Target '\(previousTargetName)' has a dependency on Target '\(targetName)' via its transitive dependency through '\(intermediateTargetName)'" - case nil: - if !buildDescription.targetsBuildInParallel { - message = "Target '\(previousTargetName)' is ordered after Target '\(targetName)' in a “Target Dependencies” build phase" + (buildRequest.schemeCommand != nil ? " or in the scheme" : "") - } else { - message = "Target '\(previousTargetName)' depends on Target '\(targetName)', but the reason for the dependency could not be determined" + // Create a new custom task with the message describing the dependency between these targets. + // Note that we would only not have a previous target here if we're still in the first target of the cycle. But we should find the dependency again later on. + // FIXME: This doesn't handle the case where the workspace has multiple targets with the same name (in different projects). + let targetName = configuredTarget.target.name + let previousTargetName = previousConfiguredTarget.target.name + let message: String + switch resolvedTargetDependency?.reason { + case .explicit?: + message = "Target '\(previousTargetName)' has an explicit dependency on Target '\(targetName)'" + case let .implicitBuildPhaseLinkage(filename, _, buildPhase)?: + message = "Target '\(previousTargetName)' has an implicit dependency on Target '\(targetName)' because '\(previousTargetName)' references the file '\(filename)' in the build phase '\(buildPhase)'" + case let .implicitBuildSetting(settingName, options)?: + message = "Target '\(previousTargetName)' has an implicit dependency on Target '\(targetName)' because '\(previousTargetName)' defines the option '\(options.joined(separator: " "))' in the build setting '\(settingName)'" + case let .impliedByTransitiveDependencyViaRemovedTargets(intermediateTargetName: intermediateTargetName): + message = "Target '\(previousTargetName)' has a dependency on Target '\(targetName)' via its transitive dependency through '\(intermediateTargetName)'" + case nil: + if !buildDescription.targetsBuildInParallel { + message = "Target '\(previousTargetName)' is ordered after Target '\(targetName)' in a “Target Dependencies” build phase" + (buildRequest.schemeCommand != nil ? " or in the scheme" : "") + } else { + message = "Target '\(previousTargetName)' depends on Target '\(targetName)', but the reason for the dependency could not be determined" + } } + + messagePayloads = [DependencyCycleMessagePayload(configuredTarget, .message(message))] } - messagePayloads = [DependencyCycleMessagePayload(configuredTarget, .message(message))] + // If the two tasks are not gate tasks, then we're crossing a target boundary for a different reason and we need to create a message from the tasks and the intervening node. + else { + messagePayloads = savedTasks.enumerated().map({ index, saved in DependencyCycleMessagePayload(index == 0 ? previousConfiguredTarget : nil, .rule(saved.rule)) }) + [DependencyCycleMessagePayload(configuredTarget, .rule(rule))] + } } - // If the two tasks are not gate tasks, then we're crossing a target boundary for a different reason and we need to create a message from the tasks and the intervening node. - else { - messagePayloads = savedTasks.enumerated().map({ index, saved in DependencyCycleMessagePayload( index == 0 ? previousConfiguredTarget : nil, .rule(saved.rule)) }) + - [DependencyCycleMessagePayload(configuredTarget, .rule(rule))] - } + // Record the current target as the new previous target. Also zero out previousTargetDependencies since we're in a new target. + previousConfiguredTarget = configuredTarget + previousTargetDependencies = [] } - // Record the current target as the new previous target. Also zero out previousTargetDependencies since we're in a new target. - previousConfiguredTarget = configuredTarget - previousTargetDependencies = [] + // Save this task rule and task. + savedTasks = [(rule, task)] + } else { + // If this task doesn't have a target, then add this task rule and task to the saved list so we have tasks going back to the last task with a target. + savedTasks.append((rule, task)) } - // Save this task rule and task. - savedTasks = [(rule, task)] - } - else { - // If this task doesn't have a target, then add this task rule and task to the saved list so we have tasks going back to the last task with a target. - savedTasks.append((rule, task)) - } - - // If this task has target dependencies, then record them as the previous target's dependencies. - // Note that only (some?) gate tasks populate this property; see the comment in ExecutableTask.targetDependencies. - if !task.targetDependencies.isEmpty { - previousTargetDependencies = task.targetDependencies + // If this task has target dependencies, then record them as the previous target's dependencies. + // Note that only (some?) gate tasks populate this property; see the comment in ExecutableTask.targetDependencies. + if !task.targetDependencies.isEmpty { + previousTargetDependencies = task.targetDependencies + } } - } - return messagePayloads - }) as [[DependencyCycleMessagePayload]]).reduce([], +) + return messagePayloads + }) as [[DependencyCycleMessagePayload]]).reduce([], +) // Create the formatted output for the cycle. let messages: [(target: ConfiguredTarget?, message: String)] = try cycleMessagePayloads.compactMap { payload in @@ -462,14 +459,16 @@ struct DependencyCycleFormatter { // For target dependencies, remove the task that led to it. var effectiveRules = cycleRules for rule in cycleRules.enumerated().dropLast(4) { - let nodePairs = [("entry", "end"), - ("begin-compiling", "modules-ready"), - ("begin-linking", "linker-inputs-ready")] + let nodePairs = [ + ("entry", "end"), + ("begin-compiling", "modules-ready"), + ("begin-linking", "linker-inputs-ready"), + ] for (begin, end) in nodePairs { // Check if the successor build keys match a target dependency: // target entry node => target entry gate task => target end node => target end gate task - if let cmd = self.executableTask(for: cycleRules[rule.offset + 2]), cmd.taskType == .beginTargetTask(targetName: ""), cycleRules[rule.offset + 1].key.hasSuffix("-\(begin)>") { + if let cmd = self.executableTask(for: cycleRules[rule.offset + 2]), cmd.taskType == .beginTargetTask(targetName: ""), cycleRules[rule.offset + 1].key.hasSuffix("-\(begin)>") { if let cmd = self.executableTask(for: cycleRules[rule.offset + 4]), cmd.taskType == .endTargetTask(targetName: ""), cycleRules[rule.offset + 3].key.hasSuffix("-\(end)>") { // If they do, replace the current task, because only the target's name is relevant but not the concrete task. if let targetName = self.executableTask(for: rule.element)?.forTarget?.target.name { @@ -594,15 +593,15 @@ struct DependencyCycleFormatter { } else { rawDescription = String(bytes: rule.keyData, encoding: .utf8) ?? rule.key } - default: - rawDescription = rule.key + default: + rawDescription = rule.key } return "\(rule.kind): \(rawDescription)" } /// Look up and return an `ExecutableTask` from the `BuildDescription` if `buildKey` is a command. private func executableTask(for buildKey: BuildKey) -> (any ExecutableTask)? { - return buildKey.kind == .command ? buildDescription.taskStore.task(for: TaskIdentifier(rawValue: buildKey.key)) : nil + return buildKey.kind == .command ? buildDescription.taskStore.task(for: TaskIdentifier(rawValue: buildKey.key)) : nil } /// Grab target names from the rules involved in the cycle. diff --git a/Sources/SWBBuildSystem/SandboxViolations.swift b/Sources/SWBBuildSystem/SandboxViolations.swift index be69d624..b92e3b19 100644 --- a/Sources/SWBBuildSystem/SandboxViolations.swift +++ b/Sources/SWBBuildSystem/SandboxViolations.swift @@ -15,7 +15,7 @@ import SWBUtil package import SWBCore #if os(macOS) -import OSLog + import OSLog #endif extension ExecutableTask { @@ -31,45 +31,46 @@ extension ExecutableTask { package func extractSandboxViolationMessages_ASYNC_UNSAFE(startTime: Date) -> [String] { var res: [String] = [] #if os(macOS) - withUnsafeCurrentTask { task in - if task != nil { - preconditionFailure("This function should not be invoked from the Swift Concurrency thread pool as it may lead to deadlock via thread starvation.") + withUnsafeCurrentTask { task in + if task != nil { + preconditionFailure("This function should not be invoked from the Swift Concurrency thread pool as it may lead to deadlock via thread starvation.") + } } - } - if let store = try? OSLogStore.local() { - let query = String("((processID == 0 AND senderImagePath CONTAINS[c] \"/Sandbox\") OR (process == \"sandboxd\" AND subsystem == \"com.apple.sandbox.reporting\")) AND (eventMessage CONTAINS[c] %@)") - let endTime = Date() - let duration = -DateInterval(start: startTime, end: endTime).duration + if let store = try? OSLogStore.local() { + let query = String("((processID == 0 AND senderImagePath CONTAINS[c] \"/Sandbox\") OR (process == \"sandboxd\" AND subsystem == \"com.apple.sandbox.reporting\")) AND (eventMessage CONTAINS[c] %@)") + let endTime = Date() + let duration = -DateInterval(start: startTime, end: endTime).duration - let position = store.position(timeIntervalSinceEnd: duration) + let position = store.position(timeIntervalSinceEnd: duration) - let sentinel = identifier.sandboxProfileSentinel + let sentinel = identifier.sandboxProfileSentinel - if let entries = try? store.getEntries(with: [], at: position, matching: NSPredicate(format: query, sentinel)) { - for entry in entries { - if entry is (any OSLogEntryWithPayload) { - let fullViolation = entry.composedMessage - if let strippedViolation = fullViolation.components(separatedBy: "\n").first { - // strip the guid from the emitted diagnostic - res.append(strippedViolation) - } else { - // this should never happen - res.append("Failed to parse sandbox violation: \(fullViolation)") + if let entries = try? store.getEntries(with: [], at: position, matching: NSPredicate(format: query, sentinel)) { + for entry in entries { + if entry is (any OSLogEntryWithPayload) { + let fullViolation = entry.composedMessage + if let strippedViolation = fullViolation.components(separatedBy: "\n").first { + // strip the guid from the emitted diagnostic + res.append(strippedViolation) + } else { + // this should never happen + res.append("Failed to parse sandbox violation: \(fullViolation)") + } } - } - if let entryWithPayload = entry as? (any OSLogEntryWithPayload), - entryWithPayload.components.count == 5, - entryWithPayload.components[3].argumentCategory == .string, - let violationMessage = entryWithPayload.components[3].argumentStringValue { - res.append(violationMessage) + if let entryWithPayload = entry as? (any OSLogEntryWithPayload), + entryWithPayload.components.count == 5, + entryWithPayload.components[3].argumentCategory == .string, + let violationMessage = entryWithPayload.components[3].argumentStringValue + { + res.append(violationMessage) + } } } } - } #else - res.append("Cannot obtain list of violations on non-macOS platforms") + res.append("Cannot obtain list of violations on non-macOS platforms") #endif return res } diff --git a/Sources/SWBCAS/CASFSNode.swift b/Sources/SWBCAS/CASFSNode.swift index 970d68d4..58346af6 100644 --- a/Sources/SWBCAS/CASFSNode.swift +++ b/Sources/SWBCAS/CASFSNode.swift @@ -13,9 +13,9 @@ public import SWBUtil import Foundation #if canImport(System) -import System + import System #else -import SystemPackage + import SystemPackage #endif /// A CAS object representing a filesystem node @@ -49,7 +49,7 @@ public struct CASFSNode: Sendable { self.destination = destination } - func serialize(to serializer: T) where T : SWBUtil.Serializer { + func serialize(to serializer: T) where T: SWBUtil.Serializer { serializer.beginAggregate(2) serializer.serialize(name) serializer.serialize(destination) diff --git a/Sources/SWBCAS/ToolchainCASPlugin.swift b/Sources/SWBCAS/ToolchainCASPlugin.swift index 8caee630..0ef9ee08 100644 --- a/Sources/SWBCAS/ToolchainCASPlugin.swift +++ b/Sources/SWBCAS/ToolchainCASPlugin.swift @@ -112,64 +112,85 @@ public final class ToolchainCAS: @unchecked Sendable, CASProtocol, ActionCachePr public func cache(objectID: ToolchainDataID, forKeyID key: ToolchainDataID) async throws { let keyDigest = api.llcas_objectid_get_digest(cCas, key.id) let cancellationHandler = CancellationHandler(api: api) - try await withTaskCancellationHandler(operation: { - try await withCheckedThrowingContinuation { continuation in - let box = ContextBox(continuation: continuation, llcas_string_dispose: api.llcas_string_dispose) - var cancellationToken: llcas_cancellable_t? = nil - api.llcas_actioncache_put_for_digest_async(cCas, keyDigest, objectID.id, false, Unmanaged.passRetained(box).toOpaque(), { ctx, failed, error in - let context = Unmanaged>.fromOpaque(ctx!).takeRetainedValue() - if failed { - var detailedError: String? - if let error = error { - detailedError = String(cString: error) - context.llcas_string_dispose(error) - } - context.continuation.resume(throwing: ToolchainCASPluginError.cacheInsertionFailed(detailedError)) - } else { - context.continuation.resume(returning: ()) + try await withTaskCancellationHandler( + operation: { + try await withCheckedThrowingContinuation { continuation in + let box = ContextBox(continuation: continuation, llcas_string_dispose: api.llcas_string_dispose) + var cancellationToken: llcas_cancellable_t? = nil + api.llcas_actioncache_put_for_digest_async( + cCas, + keyDigest, + objectID.id, + false, + Unmanaged.passRetained(box).toOpaque(), + { ctx, failed, error in + let context = Unmanaged>.fromOpaque(ctx!).takeRetainedValue() + if failed { + var detailedError: String? + if let error = error { + detailedError = String(cString: error) + context.llcas_string_dispose(error) + } + context.continuation.resume(throwing: ToolchainCASPluginError.cacheInsertionFailed(detailedError)) + } else { + context.continuation.resume(returning: ()) + } + }, + &cancellationToken + ) + if let cancellationToken { + cancellationHandler.registerCancellationToken(cancellationToken) } - }, &cancellationToken) - if let cancellationToken { - cancellationHandler.registerCancellationToken(cancellationToken) } + }, + onCancel: { + cancellationHandler.cancel() } - }, onCancel: { - cancellationHandler.cancel() - }) + ) } public func lookupCachedObject(for keyID: ToolchainDataID) async throws -> ToolchainDataID? { let keyDigest = api.llcas_objectid_get_digest(cCas, keyID.id) let cancellationHandler = CancellationHandler(api: api) - return try await withTaskCancellationHandler(operation: { - return try await withCheckedThrowingContinuation { (continuation: CheckedContinuation) in - let box = ContextBox(continuation: continuation, llcas_string_dispose: api.llcas_string_dispose) - var cancellationToken: llcas_cancellable_t? = nil - api.llcas_actioncache_get_for_digest_async(cCas, keyDigest, false, Unmanaged.passRetained(box).toOpaque(), { ctx, lookupResult, objectID, error in - let context = Unmanaged>.fromOpaque(ctx!).takeRetainedValue() - switch lookupResult { - case LLCAS_LOOKUP_RESULT_SUCCESS: - context.continuation.resume(returning: ToolchainDataID(id: objectID)) - case LLCAS_LOOKUP_RESULT_NOTFOUND: - context.continuation.resume(returning: nil) - case LLCAS_LOOKUP_RESULT_ERROR: - var detailedError: String? - if let error { - detailedError = String(cString: error) - context.llcas_string_dispose(error) - } - context.continuation.resume(throwing: ToolchainCASPluginError.cacheLookupFailed(detailedError)) - default: - context.continuation.resume(throwing: ToolchainCASPluginError.cacheLookupFailed(nil)) + return try await withTaskCancellationHandler( + operation: { + return try await withCheckedThrowingContinuation { (continuation: CheckedContinuation) in + let box = ContextBox(continuation: continuation, llcas_string_dispose: api.llcas_string_dispose) + var cancellationToken: llcas_cancellable_t? = nil + api.llcas_actioncache_get_for_digest_async( + cCas, + keyDigest, + false, + Unmanaged.passRetained(box).toOpaque(), + { ctx, lookupResult, objectID, error in + let context = Unmanaged>.fromOpaque(ctx!).takeRetainedValue() + switch lookupResult { + case LLCAS_LOOKUP_RESULT_SUCCESS: + context.continuation.resume(returning: ToolchainDataID(id: objectID)) + case LLCAS_LOOKUP_RESULT_NOTFOUND: + context.continuation.resume(returning: nil) + case LLCAS_LOOKUP_RESULT_ERROR: + var detailedError: String? + if let error { + detailedError = String(cString: error) + context.llcas_string_dispose(error) + } + context.continuation.resume(throwing: ToolchainCASPluginError.cacheLookupFailed(detailedError)) + default: + context.continuation.resume(throwing: ToolchainCASPluginError.cacheLookupFailed(nil)) + } + }, + &cancellationToken + ) + if let cancellationToken { + cancellationHandler.registerCancellationToken(cancellationToken) } - }, &cancellationToken) - if let cancellationToken { - cancellationHandler.registerCancellationToken(cancellationToken) } + }, + onCancel: { + cancellationHandler.cancel() } - }, onCancel: { - cancellationHandler.cancel() - }) + ) } public func getOnDiskSize() throws -> Int64 { @@ -195,7 +216,7 @@ public final class ToolchainCAS: @unchecked Sendable, CASProtocol, ActionCachePr public func setOnDiskSizeLimit(_ limit: Int64) throws { var error: UnsafeMutablePointer? = nil - guard let llcas_cas_set_ondisk_size_limit = api.llcas_cas_set_ondisk_size_limit else { + guard let llcas_cas_set_ondisk_size_limit = api.llcas_cas_set_ondisk_size_limit else { throw ToolchainCASPluginError.casSizeOperationUnsupported } if llcas_cas_set_ondisk_size_limit(cCas, limit, &error) { diff --git a/Sources/SWBCAS/plugin_api_t.swift b/Sources/SWBCAS/plugin_api_t.swift index abdba805..8d669bcf 100644 --- a/Sources/SWBCAS/plugin_api_t.swift +++ b/Sources/SWBCAS/plugin_api_t.swift @@ -18,45 +18,47 @@ extension plugin_api_t { func loadRequired(_ symbol: String) throws -> T { guard let sym: T = Library.lookup(handle, symbol) else { throw ToolchainCASPluginError.missingRequiredSymbol(symbol) - } - return sym + } + return sym } func loadOptional(_ symbol: String) -> T? { guard let sym: T = Library.lookup(handle, symbol) else { return nil - } - return sym + } + return sym } - self.init(llcas_get_plugin_version: try loadRequired("llcas_get_plugin_version"), - llcas_string_dispose: try loadRequired("llcas_string_dispose"), - llcas_cancellable_cancel: loadOptional("llcas_cancellable_cancel"), - llcas_cancellable_dispose: loadOptional("llcas_cancellable_dispose"), - llcas_cas_options_create: try loadRequired("llcas_cas_options_create"), - llcas_cas_options_dispose: try loadRequired("llcas_cas_options_dispose"), - llcas_cas_options_set_client_version: try loadRequired("llcas_cas_options_set_client_version"), - llcas_cas_options_set_ondisk_path: try loadRequired("llcas_cas_options_set_ondisk_path"), - llcas_cas_options_set_option: try loadRequired("llcas_cas_options_set_option"), - llcas_cas_create: try loadRequired("llcas_cas_create"), - llcas_cas_dispose: try loadRequired("llcas_cas_dispose"), - llcas_cas_get_ondisk_size: loadOptional("llcas_cas_get_ondisk_size"), - llcas_cas_set_ondisk_size_limit: loadOptional("llcas_cas_set_ondisk_size_limit"), - llcas_cas_prune_ondisk_data: loadOptional("llcas_cas_prune_ondisk_data"), - llcas_cas_get_hash_schema_name: try loadRequired("llcas_cas_get_hash_schema_name"), - llcas_digest_parse: try loadRequired("llcas_digest_parse"), - llcas_digest_print: try loadRequired("llcas_digest_print"), - llcas_cas_get_objectid: try loadRequired("llcas_cas_get_objectid"), - llcas_objectid_get_digest: try loadRequired("llcas_objectid_get_digest"), - llcas_cas_contains_object: try loadRequired("llcas_cas_contains_object"), - llcas_cas_load_object: try loadRequired("llcas_cas_load_object"), - llcas_cas_load_object_async: try loadRequired("llcas_cas_load_object_async"), - llcas_cas_store_object: try loadRequired("llcas_cas_store_object"), - llcas_loaded_object_get_data: try loadRequired("llcas_loaded_object_get_data"), - llcas_loaded_object_get_refs: try loadRequired("llcas_loaded_object_get_refs"), - llcas_object_refs_get_count: try loadRequired("llcas_object_refs_get_count"), - llcas_object_refs_get_id: try loadRequired("llcas_object_refs_get_id"), - llcas_actioncache_get_for_digest: try loadRequired("llcas_actioncache_get_for_digest"), - llcas_actioncache_get_for_digest_async: try loadRequired("llcas_actioncache_get_for_digest_async"), - llcas_actioncache_put_for_digest: try loadRequired("llcas_actioncache_put_for_digest"), - llcas_actioncache_put_for_digest_async: try loadRequired("llcas_actioncache_put_for_digest_async")) + self.init( + llcas_get_plugin_version: try loadRequired("llcas_get_plugin_version"), + llcas_string_dispose: try loadRequired("llcas_string_dispose"), + llcas_cancellable_cancel: loadOptional("llcas_cancellable_cancel"), + llcas_cancellable_dispose: loadOptional("llcas_cancellable_dispose"), + llcas_cas_options_create: try loadRequired("llcas_cas_options_create"), + llcas_cas_options_dispose: try loadRequired("llcas_cas_options_dispose"), + llcas_cas_options_set_client_version: try loadRequired("llcas_cas_options_set_client_version"), + llcas_cas_options_set_ondisk_path: try loadRequired("llcas_cas_options_set_ondisk_path"), + llcas_cas_options_set_option: try loadRequired("llcas_cas_options_set_option"), + llcas_cas_create: try loadRequired("llcas_cas_create"), + llcas_cas_dispose: try loadRequired("llcas_cas_dispose"), + llcas_cas_get_ondisk_size: loadOptional("llcas_cas_get_ondisk_size"), + llcas_cas_set_ondisk_size_limit: loadOptional("llcas_cas_set_ondisk_size_limit"), + llcas_cas_prune_ondisk_data: loadOptional("llcas_cas_prune_ondisk_data"), + llcas_cas_get_hash_schema_name: try loadRequired("llcas_cas_get_hash_schema_name"), + llcas_digest_parse: try loadRequired("llcas_digest_parse"), + llcas_digest_print: try loadRequired("llcas_digest_print"), + llcas_cas_get_objectid: try loadRequired("llcas_cas_get_objectid"), + llcas_objectid_get_digest: try loadRequired("llcas_objectid_get_digest"), + llcas_cas_contains_object: try loadRequired("llcas_cas_contains_object"), + llcas_cas_load_object: try loadRequired("llcas_cas_load_object"), + llcas_cas_load_object_async: try loadRequired("llcas_cas_load_object_async"), + llcas_cas_store_object: try loadRequired("llcas_cas_store_object"), + llcas_loaded_object_get_data: try loadRequired("llcas_loaded_object_get_data"), + llcas_loaded_object_get_refs: try loadRequired("llcas_loaded_object_get_refs"), + llcas_object_refs_get_count: try loadRequired("llcas_object_refs_get_count"), + llcas_object_refs_get_id: try loadRequired("llcas_object_refs_get_id"), + llcas_actioncache_get_for_digest: try loadRequired("llcas_actioncache_get_for_digest"), + llcas_actioncache_get_for_digest_async: try loadRequired("llcas_actioncache_get_for_digest_async"), + llcas_actioncache_put_for_digest: try loadRequired("llcas_actioncache_put_for_digest"), + llcas_actioncache_put_for_digest_async: try loadRequired("llcas_actioncache_put_for_digest_async") + ) } } diff --git a/Sources/SWBCore/Apple/DeviceFamily.swift b/Sources/SWBCore/Apple/DeviceFamily.swift index 768fe138..e9fedfcd 100644 --- a/Sources/SWBCore/Apple/DeviceFamily.swift +++ b/Sources/SWBCore/Apple/DeviceFamily.swift @@ -96,9 +96,10 @@ public struct DeviceFamilies: Hashable, Sendable { public init(families: [DeviceFamily]) throws { self.list = families - self.explicitTargetDeviceName = list.count == 1 - ? list.filter { $0.identifier == nil }.only?.name - : nil + self.explicitTargetDeviceName = + list.count == 1 + ? list.filter { $0.identifier == nil }.only?.name + : nil let familiesWithNumericIdentifiers = families.compactMap { family -> (Int, DeviceFamily)? in guard let identifier = family.identifier else { @@ -131,7 +132,7 @@ public struct DeviceFamilies: Hashable, Sendable { /// Convenience property which returns the target device identifiers as strings public var targetDeviceIdentifierStrings: Set { - return Set(list.compactMap({$0.identifier?.toString()})) + return Set(list.compactMap({ $0.identifier?.toString() })) } } diff --git a/Sources/SWBCore/Apple/InterfaceBuilderShared.swift b/Sources/SWBCore/Apple/InterfaceBuilderShared.swift index 3d68ac80..be8f4124 100644 --- a/Sources/SWBCore/Apple/InterfaceBuilderShared.swift +++ b/Sources/SWBCore/Apple/InterfaceBuilderShared.swift @@ -22,51 +22,58 @@ extension SDKVariant { // Special case / optimization: by default, don't include iPad assets for Mac Catalyst apps/appexts deploying to 14.0+ if they include Mac assets return skipIt - || (name == MacCatalystInfo.sdkVariantName - && productType?.onlyPreferredAssets == true - && ((try? Version(scope.evaluate(BuiltinMacros.IPHONEOS_DEPLOYMENT_TARGET))) ?? Version()) >= Version(14) - && targetDeviceName == "ipad" - && targetDeviceIdentifierStrings.contains("6")) + || (name == MacCatalystInfo.sdkVariantName + && productType?.onlyPreferredAssets == true + && ((try? Version(scope.evaluate(BuiltinMacros.IPHONEOS_DEPLOYMENT_TARGET))) ?? Version()) >= Version(14) + && targetDeviceName == "ipad" + && targetDeviceIdentifierStrings.contains("6")) } public func evaluateTargetedDeviceFamilyBuildSetting(_ scope: MacroEvaluationScope, _ productType: ProductTypeSpec?) -> (filteredDeviceIdentifiers: Set, effectiveDeviceIdentifiers: Set, effectiveDeviceNames: [String], unexpectedValues: [String]) { var unexpectedValues = [String]() // int values, filtered to ones supported by the current platform - let filteredDeviceIdentifiers = Set(scope.targetedDeviceFamily.sorted().compactMap { string -> Int? in - guard !string.isEmpty else { return nil } - if let int = Int(string), int != 0 { - // Platforms with an explicit target device name don't use identifiers at all - if deviceFamilies.explicitTargetDeviceName != nil { - return nil - } - - // Only add this value to the list if it's actually supported by this platform. - // That is, we filter out values not associated with the current platform in order to - // allow TARGETED_DEVICE_FAMILY to contain all possible values in cross platform targets. - if deviceFamilies.targetDeviceIdentifiers.contains(int) { - if let targetDeviceName = deviceFamilies.targetDeviceName(for: int), !targetDeviceName.isEmpty, shouldSkip(targetDeviceName: targetDeviceName, productType: productType, in: scope, usingDefaultTargetDeviceIdentifiers: false) { + let filteredDeviceIdentifiers = Set( + scope.targetedDeviceFamily.sorted().compactMap { string -> Int? in + guard !string.isEmpty else { return nil } + if let int = Int(string), int != 0 { + // Platforms with an explicit target device name don't use identifiers at all + if deviceFamilies.explicitTargetDeviceName != nil { return nil } - return int - } + // Only add this value to the list if it's actually supported by this platform. + // That is, we filter out values not associated with the current platform in order to + // allow TARGETED_DEVICE_FAMILY to contain all possible values in cross platform targets. + if deviceFamilies.targetDeviceIdentifiers.contains(int) { + if let targetDeviceName = deviceFamilies.targetDeviceName(for: int), !targetDeviceName.isEmpty, shouldSkip(targetDeviceName: targetDeviceName, productType: productType, in: scope, usingDefaultTargetDeviceIdentifiers: false) { + return nil + } - return nil - } else { - unexpectedValues.append(string) - return nil + return int + } + + return nil + } else { + unexpectedValues.append(string) + return nil + } } - }) + ) // int values we're actually going to use (if we got no values compatible, we use the default set) - let effectiveDeviceIdentifiers = !filteredDeviceIdentifiers.isEmpty ? filteredDeviceIdentifiers : Set(deviceFamilies.targetDeviceIdentifiers.compactMap { int -> Int? in - // perform any shouldSkip filtering of values from the default deviceFamilies.targetDeviceIdentifiers set - if let targetDeviceName = deviceFamilies.targetDeviceName(for: int), !targetDeviceName.isEmpty, shouldSkip(targetDeviceName: targetDeviceName, productType: productType, in: scope, usingDefaultTargetDeviceIdentifiers: true) { - return nil - } - return int - }) + let effectiveDeviceIdentifiers = + !filteredDeviceIdentifiers.isEmpty + ? filteredDeviceIdentifiers + : Set( + deviceFamilies.targetDeviceIdentifiers.compactMap { int -> Int? in + // perform any shouldSkip filtering of values from the default deviceFamilies.targetDeviceIdentifiers set + if let targetDeviceName = deviceFamilies.targetDeviceName(for: int), !targetDeviceName.isEmpty, shouldSkip(targetDeviceName: targetDeviceName, productType: productType, in: scope, usingDefaultTargetDeviceIdentifiers: true) { + return nil + } + return int + } + ) // string values we're actually going to use (for platforms with an explicit name we use that, otherwise the effective IDs mapped to strings) let effectiveDeviceNames = { () -> [String] in diff --git a/Sources/SWBCore/BuildDependencyInfo.swift b/Sources/SWBCore/BuildDependencyInfo.swift index f6f915b2..2805b34b 100644 --- a/Sources/SWBCore/BuildDependencyInfo.swift +++ b/Sources/SWBCore/BuildDependencyInfo.swift @@ -11,9 +11,9 @@ //===----------------------------------------------------------------------===// #if canImport(System) -import struct System.FilePath + import struct System.FilePath #else -import struct SystemPackage.FilePath + import struct SystemPackage.FilePath #endif import struct Foundation.Data @@ -24,7 +24,6 @@ import SWBUtil // MARK: Data structures - /// Hierarchy of data structures containing the dependencies for all targets in a build. /// /// These structures can be encoded to and decoded from JSON. The JSON is an API used by clients, and the data structures may become such an API eventually if we decide to share them directly with clients. @@ -33,7 +32,6 @@ import SWBUtil /// /// Presently the main way to instantiate these structures is to use `init(workspaceContext:buildRequest:buildRequestContext:operation:)`, which is defined below after the data structures. - /// The input and output dependencies for all targets in a build. package struct BuildDependencyInfo: Codable { package init(targets: [BuildDependencyInfo.TargetDependencyInfo], errors: [String]) { @@ -199,10 +197,8 @@ package struct BuildDependencyInfo: Codable { } - // MARK: Encoding and decoding - extension BuildDependencyInfo.TargetDependencyInfo { package func encode(to encoder: any Encoder) throws { @@ -297,14 +293,11 @@ extension BuildDependencyInfo.TargetDependencyInfo.Input.NameType { let container = try decoder.container(keyedBy: CodingKeys.self) if let path = try container.decodeIfPresent(String.self, forKey: .path) { self = .absolutePath(path) - } - else if let name = try container.decodeIfPresent(String.self, forKey: .name) { + } else if let name = try container.decodeIfPresent(String.self, forKey: .name) { self = .name(name) - } - else if let stem = try container.decodeIfPresent(String.self, forKey: .stem) { + } else if let stem = try container.decodeIfPresent(String.self, forKey: .stem) { self = .stem(stem) - } - else { + } else { throw StubError.error("unknown type for input name") } } @@ -317,10 +310,8 @@ extension BuildDependencyInfo.TargetDependencyInfo.Input.NameType { } - // MARK: Custom string definitions for better debugging - extension BuildDependencyInfo.TargetDependencyInfo.Target: CustomStringConvertible { package var description: String { return "\(type(of: self))" diff --git a/Sources/SWBCore/BuildFileResolution.swift b/Sources/SWBCore/BuildFileResolution.swift index 8c4ab089..09165354 100644 --- a/Sources/SWBCore/BuildFileResolution.swift +++ b/Sources/SWBCore/BuildFileResolution.swift @@ -86,8 +86,7 @@ extension BuildFileResolution { if let productRefTarget = productRef.target, let parameters = configuredTarget?.parameters { settingsForRef = settingsForProductReferenceTarget(productRefTarget, parameters: parameters) specLookupContext = SpecLookupCtxt(specRegistry: settingsForRef.platform?.specRegistry ?? workspaceContext.core.specRegistry, platform: settingsForRef.platform) - } - else { + } else { // If the product reference doesn't have a producing target, or we don't have a configured target, then... that's very weird. settingsForRef = settings specLookupContext = self @@ -100,8 +99,8 @@ extension BuildFileResolution { // Resolve the path and file type. let absolutePath: Path, fileType: FileTypeSpec? switch reference { - // Variant groups always resolve the path and file type of the first reference. - // FIXME: This is historical, and should be cleaned up by making the input model more explicit. This also isn't exactly what Xcode would do, which is very risky. It is possible that we should extend Xcode to pass this information down with the top-level variant group itself. (This FIXME is from 2017 and was ported from TaskProducer.) + // Variant groups always resolve the path and file type of the first reference. + // FIXME: This is historical, and should be cleaned up by making the input model more explicit. This also isn't exactly what Xcode would do, which is very risky. It is possible that we should extend Xcode to pass this information down with the top-level variant group itself. (This FIXME is from 2017 and was ported from TaskProducer.) case let asVariantGroup as VariantGroup where !asVariantGroup.children.isEmpty: absolutePath = settingsForRef.filePathResolver.resolveAbsolutePath(asVariantGroup.children[0]) fileType = specLookupContext.lookupFileType(reference: asVariantGroup.children[0]) @@ -114,4 +113,3 @@ extension BuildFileResolution { } } - diff --git a/Sources/SWBCore/BuildParameters.swift b/Sources/SWBCore/BuildParameters.swift index 8b8fced8..66caa40b 100644 --- a/Sources/SWBCore/BuildParameters.swift +++ b/Sources/SWBCore/BuildParameters.swift @@ -138,7 +138,7 @@ public struct BuildParameters: Hashable, SerializableCodable, Sendable { hasher.combine(precomputedHash) } - public static func ==(lhs: BuildParameters, rhs: BuildParameters) -> Bool { + public static func == (lhs: BuildParameters, rhs: BuildParameters) -> Bool { // Compare all properties except the signature which isn't stable. if lhs.action != rhs.action { return false } if lhs.configuration != rhs.configuration { return false } @@ -181,7 +181,7 @@ public struct BuildParameters: Hashable, SerializableCodable, Sendable { } enum CodingKeys: String, CodingKey { - case action, configuration, packageConfigurationOverride, activeRunDestination, activeArchitecture, arena, overrides, commandLineOverrides, commandLineConfigOverridesPath, commandLineConfigOverrides, environmentConfigOverridesPath, environmentConfigOverrides, toolchainOverride + case action, configuration, packageConfigurationOverride, activeRunDestination, activeArchitecture, arena, overrides, commandLineOverrides, commandLineConfigOverridesPath, commandLineConfigOverrides, environmentConfigOverridesPath, environmentConfigOverrides, toolchainOverride } public func encode(to encoder: any Encoder) throws { var container = encoder.container(keyedBy: CodingKeys.self) @@ -253,7 +253,8 @@ extension BuildParameters { environmentConfigOverridesPath: environmentConfigOverridesPath, environmentConfigOverrides: environmentConfigOverrides, toolchainOverride: toolchainOverride, - arena: arena) + arena: arena + ) } // Returns these `BuildParameters` after filtering out any overrides. @@ -270,7 +271,8 @@ extension BuildParameters { environmentConfigOverridesPath: nil, environmentConfigOverrides: [:], toolchainOverride: toolchainOverride, - arena: arena) + arena: arena + ) } // Returns these `BuildParameters` after modifying `activeRunDestination` and `activeArchitecture`. @@ -287,7 +289,8 @@ extension BuildParameters { environmentConfigOverridesPath: environmentConfigOverridesPath, environmentConfigOverrides: environmentConfigOverrides, toolchainOverride: toolchainOverride, - arena: arena) + arena: arena + ) } /// Removes any of the potentially imposed settings **unless** those have been specified via explicit overrides which have come in via the initial build request. @@ -315,7 +318,8 @@ extension BuildParameters { environmentConfigOverridesPath: environmentConfigOverridesPath, environmentConfigOverrides: environmentConfigOverrides, toolchainOverride: toolchainOverride, - arena: arena) + arena: arena + ) } } diff --git a/Sources/SWBCore/BuildRequest.swift b/Sources/SWBCore/BuildRequest.swift index 622ead43..f599de82 100644 --- a/Sources/SWBCore/BuildRequest.swift +++ b/Sources/SWBCore/BuildRequest.swift @@ -116,12 +116,15 @@ extension SWBCore.BuildCommand { case let .singleFileBuild(buildOnlyTheseFiles): self = .singleFileBuild(buildOnlyTheseFiles: buildOnlyTheseFiles.map(Path.init)) case let .prepareForIndexing(buildOnlyTheseTargets, enableIndexBuildArena): - self = try .prepareForIndexing(buildOnlyTheseTargets: buildOnlyTheseTargets?.map { - guard let target = workspace.target(for: $0) else { - throw MsgParserError.missingTarget(guid: $0) - } - return target - } ?? nil, enableIndexBuildArena: enableIndexBuildArena) + self = try .prepareForIndexing( + buildOnlyTheseTargets: buildOnlyTheseTargets?.map { + guard let target = workspace.target(for: $0) else { + throw MsgParserError.missingTarget(guid: $0) + } + return target + } ?? nil, + enableIndexBuildArena: enableIndexBuildArena + ) case .migrate: throw MsgParserError.swiftMigrationNoLongerAvailable case let .cleanBuildFolder(style): @@ -336,7 +339,7 @@ extension BuildRequest { case .buildRequest: dependencyScope = .buildRequest } - try self.init(parameters: parameters, buildTargets: payload.configuredTargets.map{ try BuildRequest.BuildTargetInfo(from: $0, defaultParameters: parameters, workspace: workspace) }, dependencyScope: dependencyScope, continueBuildingAfterErrors: payload.continueBuildingAfterErrors, hideShellScriptEnvironment: payload.hideShellScriptEnvironment, useParallelTargets: payload.useParallelTargets, useImplicitDependencies: payload.useImplicitDependencies, useDryRun: payload.useDryRun, enableStaleFileRemoval: nil, showNonLoggedProgress: payload.showNonLoggedProgress, recordBuildBacktraces: payload.recordBuildBacktraces, generatePrecompiledModulesReport: payload.generatePrecompiledModulesReport, buildDescriptionID: payload.buildDescriptionID.map(BuildDescriptionID.init), qos: qos, schedulerLaneWidthOverride: payload.schedulerLaneWidthOverride, buildPlanDiagnosticsDirPath: payload.buildPlanDiagnosticsDirPath, buildCommand: buildCommand, schemeCommand: payload.schemeCommand?.coreRepresentation, containerPath: payload.containerPath, jsonRepresentation: payload.jsonRepresentation) + try self.init(parameters: parameters, buildTargets: payload.configuredTargets.map { try BuildRequest.BuildTargetInfo(from: $0, defaultParameters: parameters, workspace: workspace) }, dependencyScope: dependencyScope, continueBuildingAfterErrors: payload.continueBuildingAfterErrors, hideShellScriptEnvironment: payload.hideShellScriptEnvironment, useParallelTargets: payload.useParallelTargets, useImplicitDependencies: payload.useImplicitDependencies, useDryRun: payload.useDryRun, enableStaleFileRemoval: nil, showNonLoggedProgress: payload.showNonLoggedProgress, recordBuildBacktraces: payload.recordBuildBacktraces, generatePrecompiledModulesReport: payload.generatePrecompiledModulesReport, buildDescriptionID: payload.buildDescriptionID.map(BuildDescriptionID.init), qos: qos, schedulerLaneWidthOverride: payload.schedulerLaneWidthOverride, buildPlanDiagnosticsDirPath: payload.buildPlanDiagnosticsDirPath, buildCommand: buildCommand, schemeCommand: payload.schemeCommand?.coreRepresentation, containerPath: payload.containerPath, jsonRepresentation: payload.jsonRepresentation) } /// Whether the build request _explicitly_ contains the specified `target`. @@ -382,7 +385,7 @@ extension BuildRequest { private extension BuildRequest.BuildTargetInfo { init(from payload: ConfiguredTargetMessagePayload, defaultParameters: BuildParameters, workspace: SWBCore.Workspace) throws { guard let target = workspace.target(for: payload.guid) else { throw MsgParserError.missingTarget(guid: payload.guid) } - try self.init(parameters: payload.parameters.map{ try BuildParameters(from: $0) } ?? defaultParameters, target: target) + try self.init(parameters: payload.parameters.map { try BuildParameters(from: $0) } ?? defaultParameters, target: target) } } diff --git a/Sources/SWBCore/BuildRequestContext.swift b/Sources/SWBCore/BuildRequestContext.swift index d971d45c..68a6e328 100644 --- a/Sources/SWBCore/BuildRequestContext.swift +++ b/Sources/SWBCore/BuildRequestContext.swift @@ -94,12 +94,14 @@ public final class BuildRequestContext: Sendable { "XCTest", "XCTestCore", "XCUIAutomation", - "XCUnit" + "XCUnit", ] - suffixes.append(contentsOf: frameworkNames.flatMap { name in - [Path("\(name).framework/\(name)"), Path("/\(name).framework/Versions/A/\(name)")] - }) + suffixes.append( + contentsOf: frameworkNames.flatMap { name in + [Path("\(name).framework/\(name)"), Path("/\(name).framework/Versions/A/\(name)")] + } + ) for platformExtension in workspaceContext.core.pluginManager.extensions(of: PlatformInfoExtensionPoint.self) { suffixes.append(contentsOf: platformExtension.additionalKnownTestLibraryPathSuffixes()) @@ -110,7 +112,7 @@ public final class BuildRequestContext: Sendable { extension BuildRequestContext { /// Certain file types allow multiple files with the same name, in which case we unique the output file. - private static let fileTypesWhichUseUniquing = [ "sourcecode.c.c", "sourcecode.c.objc", "sourcecode.cpp.cpp", "sourcecode.cpp.objcpp", "sourcecode.asm" ] + private static let fileTypesWhichUseUniquing = ["sourcecode.c.c", "sourcecode.c.objc", "sourcecode.cpp.cpp", "sourcecode.cpp.objcpp", "sourcecode.asm"] private func computeOutputParameters(for input: FileToBuild, command: BuildCommand, settings: Settings, lookup: @escaping (MacroDeclaration) -> (MacroExpression?)) -> (Path, String) { let outputDir = settings.globalScope.evaluate(BuiltinMacros.PER_ARCH_OBJECT_FILE_DIR, lookup: lookup) @@ -150,7 +152,7 @@ extension BuildRequestContext { let currentPlatformFilter = PlatformFilter(settings.globalScope) // FIXME: It is a bit unfortunate that we need to compute all this for the `uniquingSuffix` behavior. - var sourceCodeFileToBuildableReference = [Path:Reference]() + var sourceCodeFileToBuildableReference = [Path: Reference]() if let target = target.target as? StandardTarget { if let buildableReferences = try! target.sourcesBuildPhase?.buildFiles.compactMap({ (buildFile) -> Reference? in guard currentPlatformFilter.matches(buildFile.platformFilters) else { return nil } @@ -196,8 +198,9 @@ extension BuildRequestContext { } func platformAndSDKVariant(for target: ConfiguredTarget) -> PlatformAndSDKVariant { if hasEnabledIndexBuildArena, - let activeRunDestination = target.parameters.activeRunDestination, - let platform = workspaceContext.core.platformRegistry.lookup(name: activeRunDestination.platform) { + let activeRunDestination = target.parameters.activeRunDestination, + let platform = workspaceContext.core.platformRegistry.lookup(name: activeRunDestination.platform) + { // Configured targets include their platform in parameters, we can use it directly and avoid the expense of `getCachedSettings()` calls. // If in future `ConfiguredTarget` carries along an instance of its Settings, we can avoid this check and go back to using `Settings` without the cost of `getCachedSettings`. return PlatformAndSDKVariant(platform: platform, sdkVariant: activeRunDestination.sdkVariant) diff --git a/Sources/SWBCore/BuildRuleAction.swift b/Sources/SWBCore/BuildRuleAction.swift index ff61bfe5..1bf27866 100644 --- a/Sources/SWBCore/BuildRuleAction.swift +++ b/Sources/SWBCore/BuildRuleAction.swift @@ -13,7 +13,6 @@ import SWBUtil public import SWBMacro - /// Encapsulates a type of action for a build rule. Concrete types of actions involve the ability to invoke a build tool or to run a custom shell script. public protocol BuildRuleAction: AnyObject, CustomStringConvertible, Sendable { var inputFileGroupingStrategies: [any InputFileGroupingStrategy] { get } @@ -43,7 +42,6 @@ public protocol BuildRuleAction: AnyObject, CustomStringConvertible, Sendable { var name: String { get } } - /// A build rule action that creates a build task to invoke a build tool. public final class BuildRuleTaskAction: BuildRuleAction { public let toolSpec: CommandLineToolSpec @@ -89,7 +87,6 @@ public final class BuildRuleTaskAction: BuildRuleAction { } } - /// A build rule action that creates a build task to run a script. public final class BuildRuleScriptAction: BuildRuleAction { public struct OutputFileInfo: Sendable { diff --git a/Sources/SWBCore/BuildRuleCondition.swift b/Sources/SWBCore/BuildRuleCondition.swift index 14440810..0aeef60a 100644 --- a/Sources/SWBCore/BuildRuleCondition.swift +++ b/Sources/SWBCore/BuildRuleCondition.swift @@ -78,15 +78,13 @@ public final class BuildRuleFileTypeCondition: BuildRuleCondition { public var description: String { if fileTypes.count == 1, let fileType = fileTypes.first { return "<\(type(of: self)):\(fileType)>" - } - else { + } else { let allFileTypes = fileTypes.map({ $0.identifier }).joined(separator: ":") return "<\(type(of: self)):\(allFileTypes)>" } } } - /// A condition of a build rule that uses file name pattern as the match criterion. public final class BuildRuleFileNameCondition: BuildRuleCondition { let namePatterns: [MacroStringExpression] diff --git a/Sources/SWBCore/BuildRuleSet.swift b/Sources/SWBCore/BuildRuleSet.swift index 65e2d518..6a5db1fe 100644 --- a/Sources/SWBCore/BuildRuleSet.swift +++ b/Sources/SWBCore/BuildRuleSet.swift @@ -99,13 +99,16 @@ public final class DisambiguatingBuildRuleSet: BuildRuleSet { } public func match(_ candidate: FileToBuild, _ scope: MacroEvaluationScope) -> MatchResult { - let actions = Dictionary(grouping: rules.compactMap { (condition, action) -> (action: any BuildRuleAction, priority: BuildRuleConditionMatchPriority)? in - let priority = condition.match(candidate, scope) - guard priority != .none else { - return nil - } - return (action, priority) - }, by: { $0.priority }).mapValues { $0.map { $0.0 } } + let actions = Dictionary( + grouping: rules.compactMap { (condition, action) -> (action: any BuildRuleAction, priority: BuildRuleConditionMatchPriority)? in + let priority = condition.match(candidate, scope) + guard priority != .none else { + return nil + } + return (action, priority) + }, + by: { $0.priority } + ).mapValues { $0.map { $0.0 } } let priorityLevels = [BuildRuleConditionMatchPriority.normal, .low] @@ -113,7 +116,8 @@ public final class DisambiguatingBuildRuleSet: BuildRuleSet { for priority in priorityLevels { // NOTE: There might be multiple matches for the same action because our input data structure pairs an action multiple times, based on the number of input conditions it accepts. Due to type system limitations w.r.t. Hashable, we can't have an OrderedSet of BuildRuleActions, but we can use a generic Hashable Pair of data to convey identifier and name pairs. if let matches = actions[priority].map({ OrderedSet($0.map({ Pair($0.identifier, $0.name) })) }), - matches.count > 1 { + matches.count > 1 + { let identifiers = matches.map { $0.first } diff --git a/Sources/SWBCore/ClangModuleVerifier/ModuleVerifierFramework.swift b/Sources/SWBCore/ClangModuleVerifier/ModuleVerifierFramework.swift index 614c3939..7c5585d5 100644 --- a/Sources/SWBCore/ClangModuleVerifier/ModuleVerifierFramework.swift +++ b/Sources/SWBCore/ClangModuleVerifier/ModuleVerifierFramework.swift @@ -128,7 +128,7 @@ public struct ModuleVerifierFramework { return headers } - private static func modules(ofKind kind:ModuleMapKind, rootPath: Path, fs: any FSProxy, frameworkName: String) throws -> [ModuleVerifierModuleMap] { + private static func modules(ofKind kind: ModuleMapKind, rootPath: Path, fs: any FSProxy, frameworkName: String) throws -> [ModuleVerifierModuleMap] { var moduleMaps: [ModuleVerifierModuleMap] = [] for path in ModuleVerifierModuleMap.paths(for: kind) { @@ -210,12 +210,12 @@ extension ModuleVerifierFramework { } return filteredHeaders.sorted { (lhp, rhp) -> Bool in lhp.include(language: language) < rhp.include(language: language) - }.map {$0.include(language: language)}.joined(separator: "\n").appending("\n") + }.map { $0.include(language: language) }.joined(separator: "\n").appending("\n") } } extension String { - fileprivate func hasSuffix(_ strings:[String]) -> Bool { + fileprivate func hasSuffix(_ strings: [String]) -> Bool { for string in strings { if self.hasSuffix(string) { return true diff --git a/Sources/SWBCore/ClangModuleVerifier/ModuleVerifierHeader.swift b/Sources/SWBCore/ClangModuleVerifier/ModuleVerifierHeader.swift index 9bb47f05..6a604ae2 100644 --- a/Sources/SWBCore/ClangModuleVerifier/ModuleVerifierHeader.swift +++ b/Sources/SWBCore/ClangModuleVerifier/ModuleVerifierHeader.swift @@ -19,7 +19,7 @@ enum HeaderKind: String { } @_spi(Testing) -public struct ModuleVerifierHeader : Hashable { +public struct ModuleVerifierHeader: Hashable { @_spi(Testing) public var file: Path @_spi(Testing) public var unresolvedFile: Path @_spi(Testing) public var framework: String @@ -50,7 +50,7 @@ public struct ModuleVerifierHeader : Hashable { var subFolder: [String] = [] while true { let component = tail.basename - if component.isEmpty || component == "Headers" || component == "PrivateHeaders" { + if component.isEmpty || component == "Headers" || component == "PrivateHeaders" { break } tail = tail.dirname @@ -60,7 +60,7 @@ public struct ModuleVerifierHeader : Hashable { } } -extension ModuleVerifierHeader : Comparable { +extension ModuleVerifierHeader: Comparable { @_spi(Testing) public static func < (lhs: ModuleVerifierHeader, rhs: ModuleVerifierHeader) -> Bool { lhs.include(language: .c) < rhs.include(language: .c) diff --git a/Sources/SWBCore/ClangModuleVerifier/ModuleVerifierModuleMap.swift b/Sources/SWBCore/ClangModuleVerifier/ModuleVerifierModuleMap.swift index a5a0be65..b54f8677 100644 --- a/Sources/SWBCore/ClangModuleVerifier/ModuleVerifierModuleMap.swift +++ b/Sources/SWBCore/ClangModuleVerifier/ModuleVerifierModuleMap.swift @@ -19,7 +19,7 @@ import class Foundation.NSRegularExpression @_spi(Testing) public enum ModuleMapKind: String { case publicModule = "public" - case privateModule = "private" + case privateModule = "private" } @_spi(Testing) @@ -53,7 +53,7 @@ public struct ModuleVerifierModuleMap: Hashable { let regularExpression = try! NSRegularExpression(pattern: pattern, options: .dotMatchesLineSeparators) // The range conversions are all real awkward, . let wholeString = NSRange(moduleContents.startIndex..., in: moduleContents) - return regularExpression.matches(in: moduleContents, range: wholeString).map {match in + return regularExpression.matches(in: moduleContents, range: wholeString).map { match in return String(moduleContents[Range(match.range(at: 1), in: moduleContents)!]) } } @@ -117,7 +117,7 @@ public struct ModuleVerifierModuleMap: Hashable { ] let lines = moduleContents.components(separatedBy: "\n") - let moduleNames:[String] = lines.filter { line in + let moduleNames: [String] = lines.filter { line in let line = line.trimmingCharacters(in: .whitespacesAndNewlines) return line.starts(withOneOf: moduleNamesPrefix) && !line.contains(oneOf: notModuleNameFilters) }.compactMap { line in @@ -141,7 +141,7 @@ extension ModuleVerifierModuleMap: Comparable { } extension String { - fileprivate func contains(oneOf strings:[String]) -> Bool { + fileprivate func contains(oneOf strings: [String]) -> Bool { for string in strings { if self.contains(string) { return true @@ -150,7 +150,7 @@ extension String { return false } - fileprivate func starts(withOneOf strings:[String]) -> Bool { + fileprivate func starts(withOneOf strings: [String]) -> Bool { for string in strings { if self.hasPrefix(string) { return true @@ -159,7 +159,7 @@ extension String { return false } - fileprivate func remove(strings:[String], options mask: CompareOptions = []) -> String { + fileprivate func remove(strings: [String], options mask: CompareOptions = []) -> String { var finalString = self for string in strings { diff --git a/Sources/SWBCore/ClangModuleVerifier/ModuleVerifierModuleMapFileVerifier.swift b/Sources/SWBCore/ClangModuleVerifier/ModuleVerifierModuleMapFileVerifier.swift index 6a87a358..812b0229 100644 --- a/Sources/SWBCore/ClangModuleVerifier/ModuleVerifierModuleMapFileVerifier.swift +++ b/Sources/SWBCore/ClangModuleVerifier/ModuleVerifierModuleMapFileVerifier.swift @@ -29,10 +29,12 @@ public struct ModuleVerifierModuleMapFileVerifier { } if let publicModuleMap = framework.publicModuleMap { - let publicDiagnostics = self.moduleMapCheck(moduleMapKind: .publicModule, - path: publicModuleMap.path, - hasHeaders: framework.hasPublicHeaders, - modulesCount: publicModuleMap.modulesHaveContents ? publicModuleMap.modules.count : 0) + let publicDiagnostics = self.moduleMapCheck( + moduleMapKind: .publicModule, + path: publicModuleMap.path, + hasHeaders: framework.hasPublicHeaders, + modulesCount: publicModuleMap.modulesHaveContents ? publicModuleMap.modules.count : 0 + ) if publicDiagnostics.count > 0 { verifyPublic = false @@ -41,10 +43,12 @@ public struct ModuleVerifierModuleMapFileVerifier { } if let privateModuleMap = framework.privateModuleMap { - let privateDiagnostics = self.moduleMapCheck(moduleMapKind: .privateModule, - path: privateModuleMap.path, - hasHeaders: framework.hasPrivateHeaders, - modulesCount: privateModuleMap.modules.count) + let privateDiagnostics = self.moduleMapCheck( + moduleMapKind: .privateModule, + path: privateModuleMap.path, + hasHeaders: framework.hasPrivateHeaders, + modulesCount: privateModuleMap.modules.count + ) if privateDiagnostics.count > 0 { verifyPrivate = false @@ -55,7 +59,7 @@ public struct ModuleVerifierModuleMapFileVerifier { return (verifyPublic, verifyPrivate, diagnostics) } - private static func moduleMapCheck(moduleMapKind:ModuleMapKind, path: Path, hasHeaders: Bool, modulesCount: Int) -> [Diagnostic] { + private static func moduleMapCheck(moduleMapKind: ModuleMapKind, path: Path, hasHeaders: Bool, modulesCount: Int) -> [Diagnostic] { // Xcode currently does not properly support a framework with only private headers being modularized // In order to make it work an empty module map has to be supplied which will throw a bunch of errors unless we bail early if moduleMapKind == .publicModule && !hasHeaders && modulesCount == 0 { diff --git a/Sources/SWBCore/ClangModuleVerifier/ModuleVerifierTarget.swift b/Sources/SWBCore/ClangModuleVerifier/ModuleVerifierTarget.swift index d8e5bbe0..5c9e73eb 100644 --- a/Sources/SWBCore/ClangModuleVerifier/ModuleVerifierTarget.swift +++ b/Sources/SWBCore/ClangModuleVerifier/ModuleVerifierTarget.swift @@ -66,10 +66,12 @@ extension ModuleVerifierTargetSet { } public static func combinations(languages: [ModuleVerifierLanguage], targets: [String], targetVariants: [String], standards: [ModuleVerifierLanguage.Standard]) -> [ModuleVerifierTargetSet] { - return ModuleVerifierTargetSet.combinations(languages: languages, - targets: ModuleVerifierTarget.targets(from: targets), - targetVariants: ModuleVerifierTarget.targets(from: targetVariants), - standards: standards) + return ModuleVerifierTargetSet.combinations( + languages: languages, + targets: ModuleVerifierTarget.targets(from: targets), + targetVariants: ModuleVerifierTarget.targets(from: targetVariants), + standards: standards + ) } private static func combinations(languages: [ModuleVerifierLanguage], targets: [ModuleVerifierTarget], targetVariants: [ModuleVerifierTarget], standards: [ModuleVerifierLanguage.Standard]) -> [ModuleVerifierTargetSet] { @@ -109,8 +111,10 @@ extension ModuleVerifierTargetSet { extension ModuleVerifierTargetSet { public static func verifyTargets(targets: [String], targetVariants: [String]) -> [Diagnostic] { - return ModuleVerifierTargetSet.verifyTargets(targets: ModuleVerifierTarget.targets(from: targets), - targetVariants: ModuleVerifierTarget.targets(from: targetVariants)) + return ModuleVerifierTargetSet.verifyTargets( + targets: ModuleVerifierTarget.targets(from: targets), + targetVariants: ModuleVerifierTarget.targets(from: targetVariants) + ) } static func verifyTargets(targets: [ModuleVerifierTarget], targetVariants: [ModuleVerifierTarget]) -> [Diagnostic] { @@ -149,8 +153,10 @@ extension ModuleVerifierTargetSet { return diagnostics } - private static func verifyMatchingTargetForVariant(partitionedTargets: [String: [ModuleVerifierTarget]], - partitionedTargetVariants: [String: [ModuleVerifierTarget]]) -> [Diagnostic] { + private static func verifyMatchingTargetForVariant( + partitionedTargets: [String: [ModuleVerifierTarget]], + partitionedTargetVariants: [String: [ModuleVerifierTarget]] + ) -> [Diagnostic] { var diagnostics: [Diagnostic] = [] for (architecture, targetVariants) in partitionedTargetVariants { diff --git a/Sources/SWBCore/ClangModuleVerifierOutputParser.swift b/Sources/SWBCore/ClangModuleVerifierOutputParser.swift index 524507fd..06cb457c 100644 --- a/Sources/SWBCore/ClangModuleVerifierOutputParser.swift +++ b/Sources/SWBCore/ClangModuleVerifierOutputParser.swift @@ -95,7 +95,8 @@ extension ModuleVerifierFilenameMap { let fixits = diag.fixIts.map { updateFixit($0) } var location = diag.location if case .path(let path, fileLocation: let fileLoc) = location, - let mappedFilename = map(filename: path.str) { + let mappedFilename = map(filename: path.str) + { location = .path(Path(mappedFilename), fileLocation: fileLoc) } return diag.with(location: location, fixIts: fixits, childDiagnostics: childDiagnostics) diff --git a/Sources/SWBCore/ClangSerializedDiagnostics.swift b/Sources/SWBCore/ClangSerializedDiagnostics.swift index dd538341..9cbbf5bf 100644 --- a/Sources/SWBCore/ClangSerializedDiagnostics.swift +++ b/Sources/SWBCore/ClangSerializedDiagnostics.swift @@ -41,7 +41,7 @@ extension Diagnostic.Behavior { case .warning: self = .warning case .error, - .fatal: + .fatal: self = .error } } diff --git a/Sources/SWBCore/CommandLineArgument.swift b/Sources/SWBCore/CommandLineArgument.swift index 0a4cc458..b946a964 100644 --- a/Sources/SWBCore/CommandLineArgument.swift +++ b/Sources/SWBCore/CommandLineArgument.swift @@ -43,7 +43,7 @@ public enum CommandLineArgument: Equatable, Hashable, ExpressibleByStringLiteral self = .literal(ByteString(stringLiteral: value)) } - public func serialize(to serializer: T) where T : SWBUtil.Serializer { + public func serialize(to serializer: T) where T: SWBUtil.Serializer { switch self { case .literal(let byteString): // FIXME: pack in one byte diff --git a/Sources/SWBCore/ConfiguredTarget.swift b/Sources/SWBCore/ConfiguredTarget.swift index 5ab5c0c5..f318634a 100644 --- a/Sources/SWBCore/ConfiguredTarget.swift +++ b/Sources/SWBCore/ConfiguredTarget.swift @@ -73,7 +73,7 @@ public final class ConfiguredTarget: Hashable, CustomStringConvertible, Serializ components.append(("runDestination", runDestString)) } - let string = components.map({ "\($0.key): \($0.value)"}).joined(separator: " ") + let string = components.map({ "\($0.key): \($0.value)" }).joined(separator: " ") // Construct the description. return "<\(type(of: self)) \(string)>" @@ -107,7 +107,7 @@ public final class ConfiguredTarget: Hashable, CustomStringConvertible, Serializ } } if specializeGuidForActiveRunDestination { - let discriminator = self.parameters.activeRunDestination.map{ "\($0.platform)-\($0.sdkVariant ?? "")" } ?? "" + let discriminator = self.parameters.activeRunDestination.map { "\($0.platform)-\($0.sdkVariant ?? "")" } ?? "" parameters.append(discriminator) } return .init(id: ["target", target.name, target.guid, parameters.joined(separator: ":")].joined(separator: "-")) @@ -127,12 +127,11 @@ public final class ConfiguredTarget: Hashable, CustomStringConvertible, Serializ // Make sure each build parameters struct is serialized only once. if let index = delegate.buildParametersIndexes[parameters] { // We already have an index into the build parameters list, so serialize it. - serializer.serialize(1) // Placeholder indicating the next element is an index + serializer.serialize(1) // Placeholder indicating the next element is an index serializer.serialize(index) - } - else { + } else { // These parameters have not been serialized before, so serialize them and add them to our delegate's index map. - serializer.serialize(0) // Placeholder indicating the next element is a serialized BuildParameters + serializer.serialize(0) // Placeholder indicating the next element is a serialized BuildParameters serializer.serialize(parameters) delegate.buildParametersIndexes[parameters] = delegate.currentBuildParametersIndex delegate.currentBuildParametersIndex += 1 @@ -171,7 +170,7 @@ public final class ConfiguredTarget: Hashable, CustomStringConvertible, Serializ } } - public static func ==(lhs: ConfiguredTarget, rhs: ConfiguredTarget) -> Bool { + public static func == (lhs: ConfiguredTarget, rhs: ConfiguredTarget) -> Bool { // Fast path common case. // // FIXME: We key on this a lot -- we need to move this to using reference equality: Change ConfiguredTarget to use reference equality diff --git a/Sources/SWBCore/Core.swift b/Sources/SWBCore/Core.swift index b01421d2..cae14793 100644 --- a/Sources/SWBCore/Core.swift +++ b/Sources/SWBCore/Core.swift @@ -40,7 +40,7 @@ public final class Core: Sendable { /// Get a configured instance of the core. /// /// - returns: An initialized Core instance on which all discovery and loading will have been completed. If there are errors during that process, they will be logged to `stderr` and no instance will be returned. Otherwise, the initialized object is returned. - public static func getInitializedCore(_ delegate: any CoreDelegate, pluginManager: MutablePluginManager, developerPath: DeveloperPath? = nil, resourceSearchPaths: [Path] = [], inferiorProductsPath: Path? = nil, extraPluginRegistration: @PluginExtensionSystemActor (_ pluginManager: MutablePluginManager, _ pluginPaths: [Path]) -> Void = { _, _ in }, additionalContentPaths: [Path] = [], environment: [String:String] = [:], buildServiceModTime: Date, connectionMode: ServiceHostConnectionMode) async -> Core? { + public static func getInitializedCore(_ delegate: any CoreDelegate, pluginManager: MutablePluginManager, developerPath: DeveloperPath? = nil, resourceSearchPaths: [Path] = [], inferiorProductsPath: Path? = nil, extraPluginRegistration: @PluginExtensionSystemActor (_ pluginManager: MutablePluginManager, _ pluginPaths: [Path]) -> Void = { _, _ in }, additionalContentPaths: [Path] = [], environment: [String: String] = [:], buildServiceModTime: Date, connectionMode: ServiceHostConnectionMode) async -> Core? { // Enable macro expression interning during loading. return await MacroNamespace.withExpressionInterningEnabled { () -> Core? in let hostOperatingSystem: OperatingSystem @@ -97,9 +97,16 @@ public final class Core: Sendable { if UserDefaults.enablePluginManagerLogging { let plugins = core.pluginManager.pluginsByIdentifier - delegate.emit(Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("Loaded \(plugins.count) plugins"), childDiagnostics: plugins.sorted(byKey: <).map { (identifier, plugin) in - Diagnostic(behavior: .note, location: .path(plugin.path), data: DiagnosticData("Loaded plugin: \(identifier) from \(plugin.path.str)")) - })) + delegate.emit( + Diagnostic( + behavior: .note, + location: .unknown, + data: DiagnosticData("Loaded \(plugins.count) plugins"), + childDiagnostics: plugins.sorted(byKey: <).map { (identifier, plugin) in + Diagnostic(behavior: .note, location: .path(plugin.path), data: DiagnosticData("Loaded plugin: \(identifier) from \(plugin.path.str)")) + } + ) + ) } for diagnostic in core.pluginManager.loadingDiagnostics { @@ -203,7 +210,7 @@ public final class Core: Sendable { public let additionalContentPaths: [Path] /// Additional override environment variables - public let environment: [String:String] + public let environment: [String: String] /// The Xcode application version, as a string. public let xcodeVersionString: String @@ -222,7 +229,7 @@ public final class Core: Sendable { public let connectionMode: ServiceHostConnectionMode - @_spi(Testing) public init(delegate: any CoreDelegate, hostOperatingSystem: OperatingSystem, pluginManager: any PluginManager, developerPath: DeveloperPath, resourceSearchPaths: [Path], inferiorProductsPath: Path?, additionalContentPaths: [Path], environment: [String:String], buildServiceModTime: Date, connectionMode: ServiceHostConnectionMode) async throws { + @_spi(Testing) public init(delegate: any CoreDelegate, hostOperatingSystem: OperatingSystem, pluginManager: any PluginManager, developerPath: DeveloperPath, resourceSearchPaths: [Path], inferiorProductsPath: Path?, additionalContentPaths: [Path], environment: [String: String], buildServiceModTime: Date, connectionMode: ServiceHostConnectionMode) async throws { self.delegate = delegate self.hostOperatingSystem = hostOperatingSystem self.pluginManager = pluginManager @@ -401,7 +408,8 @@ public final class Core: Sendable { } let libclangVersion: Version? if let versionString = libclang?.getVersionString(), - let match = try? #/\(clang-(?[0-9]+(?:\.[0-9]+){0,})\)/#.firstMatch(in: versionString) { + let match = try? #/\(clang-(?[0-9]+(?:\.[0-9]+){0,})\)/#.firstMatch(in: versionString) + { libclangVersion = try? Version(String(match.clang)) } else { libclangVersion = nil @@ -461,7 +469,6 @@ public final class Core: Sendable { _platformRegistry.initialize(to: await PlatformRegistry(delegate: self.registryDelegate, searchPaths: searchPaths, hostOperatingSystem: hostOperatingSystem, fs: fs)) } - private func initializeToolchainRegistry() async { self._toolchainRegistry.initialize(to: await ToolchainRegistry(delegate: self.registryDelegate, searchPaths: self.toolchainPaths, fs: localFS, hostOperatingSystem: hostOperatingSystem)) } @@ -549,7 +556,7 @@ public final class Core: Sendable { /// Dump information on the registered spec proxies. public func getSpecsDump(conformingTo: String?) -> String { var result = "" - for (domain,domainRegistry) in specRegistry.proxiesByDomain.sorted(byKey: <) { + for (domain, domainRegistry) in specRegistry.proxiesByDomain.sorted(byKey: <) { let domainName = domain.isEmpty ? "(default)" : domain result += "-- Domain: \(domainName) --\n" @@ -589,7 +596,8 @@ public final class Core: Sendable { } let specs: [SpecDump] = specRegistry.domains.flatMap { domain -> [SpecDump] in - let allSpecs = specRegistry.findSpecs(BuildSettingsSpec.self, domain: domain, includeInherited: false) + let allSpecs = + specRegistry.findSpecs(BuildSettingsSpec.self, domain: domain, includeInherited: false) + specRegistry.findSpecs(BuildSettingsExtensionSpec.self, domain: domain, includeInherited: false) + specRegistry.findSpecs(BuildSystemSpec.self, domain: domain, includeInherited: false) + specRegistry.findSpecs(CommandLineToolSpec.self, domain: domain, includeInherited: false) @@ -599,7 +607,8 @@ public final class Core: Sendable { path: spec.proxyPath.str, options: spec.flattenedBuildOptions.values.sorted(by: \.name).map { option in .init(name: option.name, displayName: option.localizedName != option.name ? option.localizedName : nil, categoryName: option.localizedCategoryName, description: option.localizedDescription) - }) + } + ) } } @@ -610,7 +619,7 @@ public final class Core: Sendable { /// Dump information on the registered toolchains. public func getToolchainsDump() async -> String { var result = "" - for (_,toolchain) in toolchainRegistry.toolchainsByIdentifier.sorted(byKey: <) { + for (_, toolchain) in toolchainRegistry.toolchainsByIdentifier.sorted(byKey: <) { result += "\(toolchain)\n" } return result @@ -659,13 +668,15 @@ extension Core: PlatformInfoLookup { // If we found a match, look up the SDK -- we'll deterministically get the latest version of that SDK, // and it should have only one variant whose platform ID matches our platform. if let platformName = platformNames.only, let platform = platformRegistry.lookup(name: platformName) { - let potentialSDKNames = [platform.sdkCanonicalName].compactMap { $0 } + sdkRegistry.supportedSDKCanonicalNameSuffixes().compactMap { - if let sdkBaseName = platform.sdkCanonicalName { - return "\(sdkBaseName).\($0)" - } else { - return nil + let potentialSDKNames = + [platform.sdkCanonicalName].compactMap { $0 } + + sdkRegistry.supportedSDKCanonicalNameSuffixes().compactMap { + if let sdkBaseName = platform.sdkCanonicalName { + return "\(sdkBaseName).\($0)" + } else { + return nil + } } - } if let sdk = potentialSDKNames.compactMap({ try? sdkRegistry.lookup($0, activeRunDestination: nil) }).first { return sdk.variants.values.filter { sdk.targetBuildVersionPlatform(sdkVariant: $0) == buildPlatform }.only } @@ -714,7 +725,7 @@ extension Core { /// The delegate used to convey information to registry subsystems about the core, including a channel for those registries to report diagnostics. This struct is created by the core itself and refers to the core. It exists as a struct separate from core to avoid creating an ownership cycle between the core and the registry objects. /// /// Although primarily used by registries during the loading of the core, this delegate is persisted since registries may need to report additional information after loading. For example, new toolchains may be downloaded. -struct CoreRegistryDelegate : PlatformRegistryDelegate, SDKRegistryDelegate, SpecRegistryDelegate, ToolchainRegistryDelegate, SpecRegistryProvider, Sendable { +struct CoreRegistryDelegate: PlatformRegistryDelegate, SDKRegistryDelegate, SpecRegistryDelegate, ToolchainRegistryDelegate, SpecRegistryProvider, Sendable { unowned let core: Core var diagnosticsEngine: DiagnosticProducingDelegateProtocolPrivate { diff --git a/Sources/SWBCore/CustomTaskTypeDescription.swift b/Sources/SWBCore/CustomTaskTypeDescription.swift index ee15c9a8..9210ce81 100644 --- a/Sources/SWBCore/CustomTaskTypeDescription.swift +++ b/Sources/SWBCore/CustomTaskTypeDescription.swift @@ -60,6 +60,6 @@ public final class CustomTaskTypeDescription: TaskTypeDescription { } public func generateLocalizationInfo(for task: any ExecutableTask, input: TaskGenerateLocalizationInfoInput) -> [TaskGenerateLocalizationInfoOutput] { - [] + [] } } diff --git a/Sources/SWBCore/Dependencies.swift b/Sources/SWBCore/Dependencies.swift index de31a838..0cff938b 100644 --- a/Sources/SWBCore/Dependencies.swift +++ b/Sources/SWBCore/Dependencies.swift @@ -97,7 +97,8 @@ public struct ModuleDependenciesContext: Sendable, SerializableCodable { Diagnostic( behavior: .warning, location: .unknown, - data: DiagnosticData("The current toolchain does not support \(BuiltinMacros.VALIDATE_MODULE_DEPENDENCIES.name)")) + data: DiagnosticData("The current toolchain does not support \(BuiltinMacros.VALIDATE_MODULE_DEPENDENCIES.name)") + ) } /// Compute missing module dependencies. @@ -130,31 +131,36 @@ public struct ModuleDependenciesContext: Sendable, SerializableCodable { let fixIt = fixItContext?.makeFixIt(newModules: missingDependencies.map { $0.0 }) let fixIts = fixIt.map { [$0] } ?? [] - let importDiags: [Diagnostic] = missingDependencies + let importDiags: [Diagnostic] = + missingDependencies .flatMap { dep in dep.1.map { return Diagnostic( behavior: behavior, location: $0, data: DiagnosticData("Missing entry in \(BuiltinMacros.MODULE_DEPENDENCIES.name): \(dep.0.asBuildSettingEntryQuotedIfNeeded)"), - fixIts: fixIts) + fixIts: fixIts + ) } } let message = "Missing entries in \(BuiltinMacros.MODULE_DEPENDENCIES.name): \(missingDependencies.map { $0.0.asBuildSettingEntryQuotedIfNeeded }.sorted().joined(separator: " "))" - let location: Diagnostic.Location = fixIt.map { - Diagnostic.Location.path($0.sourceRange.path, line: $0.sourceRange.endLine, column: $0.sourceRange.endColumn) - } ?? Diagnostic.Location.buildSetting(BuiltinMacros.MODULE_DEPENDENCIES) - - missingDiagnostics = [Diagnostic( - behavior: behavior, - location: location, - data: DiagnosticData(message), - fixIts: fixIts, - childDiagnostics: importDiags)] - } - else { + let location: Diagnostic.Location = + fixIt.map { + Diagnostic.Location.path($0.sourceRange.path, line: $0.sourceRange.endLine, column: $0.sourceRange.endColumn) + } ?? Diagnostic.Location.buildSetting(BuiltinMacros.MODULE_DEPENDENCIES) + + missingDiagnostics = [ + Diagnostic( + behavior: behavior, + location: location, + data: DiagnosticData(message), + fixIts: fixIts, + childDiagnostics: importDiags + ) + ] + } else { missingDiagnostics = [] } @@ -162,13 +168,15 @@ public struct ModuleDependenciesContext: Sendable, SerializableCodable { if !unusedDependencies.isEmpty { let message = "Unused entries in \(BuiltinMacros.MODULE_DEPENDENCIES.name): \(unusedDependencies.map { $0.name }.sorted().joined(separator: " "))" // TODO location & fixit - unusedDiagnostics = [Diagnostic( - behavior: validateUnused == .yesError ? .error : .warning, - location: .unknown, - data: DiagnosticData(message), - fixIts: [])] - } - else { + unusedDiagnostics = [ + Diagnostic( + behavior: validateUnused == .yesError ? .error : .warning, + location: .unknown, + data: DiagnosticData(message), + fixIts: [] + ) + ] + } else { unusedDiagnostics = [] } @@ -189,20 +197,18 @@ public struct ModuleDependenciesContext: Sendable, SerializableCodable { let thisTargetCondition = MacroCondition(parameter: BuiltinMacros.targetNameCondition, valuePattern: target.name) // TODO: if you have an assignment in a project-xcconfig and another assignment in target-settings, this would find the project-xcconfig assignment, but updating that might have no effect depending on the target-settings assignment - if let assignment = (settings.globalScope.table.lookupMacro(BuiltinMacros.MODULE_DEPENDENCIES)?.sequence.first { - $0.location != nil && ($0.conditions?.conditions == [thisTargetCondition] || ($0.conditions?.conditions.isEmpty ?? true)) - }), - let location = assignment.location + if let assignment = + (settings.globalScope.table.lookupMacro(BuiltinMacros.MODULE_DEPENDENCIES)?.sequence.first { + $0.location != nil && ($0.conditions?.conditions == [thisTargetCondition] || ($0.conditions?.conditions.isEmpty ?? true)) + }), + let location = assignment.location { self.init(sourceRange: .init(path: location.path, startLine: location.endLine, startColumn: location.endColumn, endLine: location.endLine, endColumn: location.endColumn), modificationStyle: .appendToExistingAssignment) - } - else if let xcconfig = settings.constructionComponents.targetXcconfig { + } else if let xcconfig = settings.constructionComponents.targetXcconfig { self.init(sourceRange: .init(path: xcconfig.path, startLine: xcconfig.finalLineNumber, startColumn: xcconfig.finalColumnNumber, endLine: xcconfig.finalLineNumber, endColumn: xcconfig.finalColumnNumber), modificationStyle: .insertNewAssignment(targetNameCondition: nil)) - } - else if let xcconfig = settings.constructionComponents.projectXcconfig { + } else if let xcconfig = settings.constructionComponents.projectXcconfig { self.init(sourceRange: .init(path: xcconfig.path, startLine: xcconfig.finalLineNumber, startColumn: xcconfig.finalColumnNumber, endLine: xcconfig.finalLineNumber, endColumn: xcconfig.finalColumnNumber), modificationStyle: .insertNewAssignment(targetNameCondition: target.name)) - } - else { + } else { return nil } } @@ -299,7 +305,8 @@ public struct HeaderDependenciesContext: Sendable, SerializableCodable { Diagnostic( behavior: .warning, location: .unknown, - data: DiagnosticData("The current toolchain does not support \(BuiltinMacros.VALIDATE_HEADER_DEPENDENCIES.name)")) + data: DiagnosticData("The current toolchain does not support \(BuiltinMacros.VALIDATE_HEADER_DEPENDENCIES.name)") + ) } /// Compute missing module dependencies. @@ -314,16 +321,14 @@ public struct HeaderDependenciesContext: Sendable, SerializableCodable { // TODO: What if the basename doesn't uniquely identify the header? HeaderDependency(name: $0.basename, accessLevel: .Private, optional: false) } - } - else { + } else { missing = [] } let unused: [HeaderDependency] if validateUnused != .no { unused = declared.filter { !$0.optional && !declaredNames.contains($0.name) } - } - else { + } else { unused = [] } @@ -344,17 +349,20 @@ public struct HeaderDependenciesContext: Sendable, SerializableCodable { let message = "Missing entries in \(BuiltinMacros.HEADER_DEPENDENCIES.name): \(missingDependencies.map { $0.asBuildSettingEntryQuotedIfNeeded }.sorted().joined(separator: " "))" - let location: Diagnostic.Location = fixIt.map { - Diagnostic.Location.path($0.sourceRange.path, line: $0.sourceRange.endLine, column: $0.sourceRange.endColumn) - } ?? Diagnostic.Location.buildSetting(BuiltinMacros.HEADER_DEPENDENCIES) - - missingDiagnostics = [Diagnostic( - behavior: behavior, - location: location, - data: DiagnosticData(message), - fixIts: fixIts)] - } - else { + let location: Diagnostic.Location = + fixIt.map { + Diagnostic.Location.path($0.sourceRange.path, line: $0.sourceRange.endLine, column: $0.sourceRange.endColumn) + } ?? Diagnostic.Location.buildSetting(BuiltinMacros.HEADER_DEPENDENCIES) + + missingDiagnostics = [ + Diagnostic( + behavior: behavior, + location: location, + data: DiagnosticData(message), + fixIts: fixIts + ) + ] + } else { missingDiagnostics = [] } @@ -362,13 +370,15 @@ public struct HeaderDependenciesContext: Sendable, SerializableCodable { if !unusedDependencies.isEmpty { let message = "Unused entries in \(BuiltinMacros.HEADER_DEPENDENCIES.name): \(unusedDependencies.map { $0.name }.sorted().joined(separator: " "))" // TODO location & fixit - unusedDiagnostics = [Diagnostic( - behavior: validateUnused == .yesError ? .error : .warning, - location: .unknown, - data: DiagnosticData(message), - fixIts: [])] - } - else { + unusedDiagnostics = [ + Diagnostic( + behavior: validateUnused == .yesError ? .error : .warning, + location: .unknown, + data: DiagnosticData(message), + fixIts: [] + ) + ] + } else { unusedDiagnostics = [] } @@ -388,20 +398,18 @@ public struct HeaderDependenciesContext: Sendable, SerializableCodable { guard let target = settings.target else { return nil } let thisTargetCondition = MacroCondition(parameter: BuiltinMacros.targetNameCondition, valuePattern: target.name) - if let assignment = (settings.globalScope.table.lookupMacro(BuiltinMacros.HEADER_DEPENDENCIES)?.sequence.first { - $0.location != nil && ($0.conditions?.conditions == [thisTargetCondition] || ($0.conditions?.conditions.isEmpty ?? true)) - }), - let location = assignment.location + if let assignment = + (settings.globalScope.table.lookupMacro(BuiltinMacros.HEADER_DEPENDENCIES)?.sequence.first { + $0.location != nil && ($0.conditions?.conditions == [thisTargetCondition] || ($0.conditions?.conditions.isEmpty ?? true)) + }), + let location = assignment.location { self.init(sourceRange: .init(path: location.path, startLine: location.endLine, startColumn: location.endColumn, endLine: location.endLine, endColumn: location.endColumn), modificationStyle: .appendToExistingAssignment) - } - else if let xcconfig = settings.constructionComponents.targetXcconfig { + } else if let xcconfig = settings.constructionComponents.targetXcconfig { self.init(sourceRange: .init(path: xcconfig.path, startLine: xcconfig.finalLineNumber, startColumn: xcconfig.finalColumnNumber, endLine: xcconfig.finalLineNumber, endColumn: xcconfig.finalColumnNumber), modificationStyle: .insertNewAssignment(targetNameCondition: nil)) - } - else if let xcconfig = settings.constructionComponents.projectXcconfig { + } else if let xcconfig = settings.constructionComponents.projectXcconfig { self.init(sourceRange: .init(path: xcconfig.path, startLine: xcconfig.finalLineNumber, startColumn: xcconfig.finalColumnNumber, endLine: xcconfig.finalLineNumber, endColumn: xcconfig.finalColumnNumber), modificationStyle: .insertNewAssignment(targetNameCondition: target.name)) - } - else { + } else { return nil } } diff --git a/Sources/SWBCore/DependencyResolution.swift b/Sources/SWBCore/DependencyResolution.swift index 80343091..1d126d9b 100644 --- a/Sources/SWBCore/DependencyResolution.swift +++ b/Sources/SWBCore/DependencyResolution.swift @@ -100,7 +100,6 @@ struct SpecializationParameters: Hashable, CustomStringConvertible { return macros } - enum SpecializationSource: CustomStringConvertible { case synthesized case workspace @@ -155,9 +154,9 @@ struct SpecializationParameters: Hashable, CustomStringConvertible { sourceString = "imposed by \(source)" } - return "Specialization parameters \(sourceString): platform '\(platform?.identifier ?? "nil")' sdkVariant '\(sdkVariant?.name ?? "nil")' supportedPlatforms: '\(supportedPlatforms?.joined(separator: " ") ?? "nil")' toolchain: '\(toolchain?.joined(separator: " ") ?? "nil")'" + + return "Specialization parameters \(sourceString): platform '\(platform?.identifier ?? "nil")' sdkVariant '\(sdkVariant?.name ?? "nil")' supportedPlatforms: '\(supportedPlatforms?.joined(separator: " ") ?? "nil")' toolchain: '\(toolchain?.joined(separator: " ") ?? "nil")'" // Hide the suffix if it is not present. - (canonicalNameSuffix != nil ? " suffix: \(String(describing: canonicalNameSuffix))" : "") + + (canonicalNameSuffix != nil ? " suffix: \(String(describing: canonicalNameSuffix))" : "") } private func effectiveToolchainOverride(originalParameters: BuildParameters, workspaceContext: WorkspaceContext) -> [String]? { @@ -186,10 +185,7 @@ struct SpecializationParameters: Hashable, CustomStringConvertible { /// Check if a configured target can be used when this specialization is required. func isCompatible(with configuredTarget: ConfiguredTarget, settings: Settings, workspaceContext: WorkspaceContext) -> Bool { let toolchain = effectiveToolchainOverride(originalParameters: configuredTarget.parameters, workspaceContext: workspaceContext) - return (platform == nil || platform === settings.platform) && - (sdkVariant == nil || sdkVariant?.name == settings.sdkVariant?.name) && - (toolchain == nil || toolchain == settings.toolchains.map(\.identifier)) && - (canonicalNameSuffix == nil || canonicalNameSuffix?.nilIfEmpty == settings.sdk?.canonicalNameSuffix) + return (platform == nil || platform === settings.platform) && (sdkVariant == nil || sdkVariant?.name == settings.sdkVariant?.name) && (toolchain == nil || toolchain == settings.toolchains.map(\.identifier)) && (canonicalNameSuffix == nil || canonicalNameSuffix?.nilIfEmpty == settings.sdk?.canonicalNameSuffix) } /// Return an effective set of specialization parameters based on a specific target-dependency pair. @@ -251,8 +247,7 @@ struct SpecializationParameters: Hashable, CustomStringConvertible { let overridingSdk: SDK? if let sdkRoot { overridingSdk = try? workspaceContext.core.sdkRegistry.lookup(nameOrPath: sdkRoot.value, basePath: Path("/"), activeRunDestination: parameters.activeRunDestination) - } - else { + } else { overridingSdk = nil } // This seems like an unfortunate way to get from the SDK to its platform. But SettingsBuilder.computeBoundProperties() creates a scope to evaluate the PLATFORM_NAME defined in the SDK's default properties, so maybe there isn't a clearly better way. @@ -265,8 +260,7 @@ struct SpecializationParameters: Hashable, CustomStringConvertible { if let overridingSdk = overridingSdk { let platformNames = workspaceContext.core.platformRegistry.platforms.map { $0.name } diagnostics.append(Diagnostic(behavior: .warning, location: .unknown, data: DiagnosticData("Could not find a platform name for workspace specialization parameter for overriding SDK '\(overridingSdk.canonicalName)' among loaded platforms '\(platformNames.joined(separator: " "))'s."))) - } - else if let sdkRoot = sdkRoot { + } else if let sdkRoot = sdkRoot { diagnostics.append(Diagnostic(behavior: .warning, location: .unknown, data: DiagnosticData("Could not find an SDK for workspace specialization parameters for overriding SDKROOT '\(sdkRoot.value)' from \(sdkRoot.source)."))) } // Otherwise there was no overriding SDK provided, and there is no active run destination (or somehow there's a destination without a platform). This is valid, but it's not clear to me what this means for specialization parameters. @@ -294,8 +288,7 @@ struct SpecializationParameters: Hashable, CustomStringConvertible { let platformNames = workspaceContext.core.platformRegistry.platforms.map { $0.name } diagnostics.append(Diagnostic(behavior: .warning, location: .unknown, data: DiagnosticData("Could not find a platform named '\(platformName)' from loaded platforms '\(platformNames.joined(separator: " "))' for workspace specialization parameters."))) } - } - else { + } else { platform = nil } let defaultVariant: SDKVariant? @@ -368,14 +361,14 @@ extension BuildRequestContext { for overrideSource in overrideSources { switch overrideSource { case .environmentConfigOverrides(dict: let dict), - .commandLineConfigOverrides(dict: let dict), - .commandLineOverrides(dict: let dict), - .buildParametersOverrides(dict: let dict): + .commandLineConfigOverrides(dict: let dict), + .commandLineOverrides(dict: let dict), + .buildParametersOverrides(dict: let dict): if let value = dict[macroName] { return (value, overrideSource) } case .environmentConfigOverridesPath, - .commandLineConfigOverridesPath: + .commandLineConfigOverridesPath: // These cases were handled before this loop. continue } @@ -500,7 +493,7 @@ extension SpecializationParameters { // Keep this dictionary empty so that `LinkageDependencyResolver` fallbacks to using the build parameters of the configured targets, which are the relevant ones. self.buildParametersByTarget = [:] } else { - var buildParametersByTarget = [Target:BuildParameters]() + var buildParametersByTarget = [Target: BuildParameters]() for targetInfo in buildRequest.buildTargets { buildParametersByTarget[targetInfo.target] = targetInfo.parameters } @@ -517,15 +510,20 @@ extension SpecializationParameters { var hostBuildParameters: PlatformBuildParameters? = nil for platform in workspaceContext.core.platformRegistry.platforms { // Find the corresponding SDK for this platform - let potentialSDKNames = [platform.sdkCanonicalName].compactMap { $0 } + workspaceContext.core.sdkRegistry.supportedSDKCanonicalNameSuffixes().compactMap { - if let sdkBaseName = platform.sdkCanonicalName { - return "\(sdkBaseName).\($0)" - } else { - return nil + let potentialSDKNames = + [platform.sdkCanonicalName].compactMap { $0 } + + workspaceContext.core.sdkRegistry.supportedSDKCanonicalNameSuffixes().compactMap { + if let sdkBaseName = platform.sdkCanonicalName { + return "\(sdkBaseName).\($0)" + } else { + return nil + } } - } - guard let matchingSDK = potentialSDKNames - .compactMap({ try? workspaceContext.core.sdkRegistry.lookup($0, activeRunDestination: nil) }).first else { + guard + let matchingSDK = + potentialSDKNames + .compactMap({ try? workspaceContext.core.sdkRegistry.lookup($0, activeRunDestination: nil) }).first + else { continue } @@ -537,7 +535,7 @@ extension SpecializationParameters { let specializationParams = SpecializationParameters.default(workspaceContext: workspaceContext, buildRequestContext: buildRequestContext, parameters: buildParams) platformBuildParameters.append(PlatformBuildParameters(buildParams: buildParams, specializationParams: specializationParams)) - if runDestination.platform == hostOS && runDestination.sdkVariant == matchingSDK.defaultVariant?.name { + if runDestination.platform == hostOS && runDestination.sdkVariant == matchingSDK.defaultVariant?.name { hostBuildParameters = platformBuildParameters.last } } @@ -550,11 +548,13 @@ extension SpecializationParameters { self.hostParametersForIndex = nil } - self.dynamicallyBuildingTargets = Set(buildRequest.buildTargets.filter { - workspaceContext.workspace.project(for: $0.target).isPackage && buildRequestContext.getCachedSettings($0.parameters, target: $0.target).globalScope.evaluate(BuiltinMacros.PACKAGE_BUILD_DYNAMICALLY) - }.map { - $0.target - }) + self.dynamicallyBuildingTargets = Set( + buildRequest.buildTargets.filter { + workspaceContext.workspace.project(for: $0.target).isPackage && buildRequestContext.getCachedSettings($0.parameters, target: $0.target).globalScope.evaluate(BuiltinMacros.PACKAGE_BUILD_DYNAMICALLY) + }.map { + $0.target + } + ) } /// Add the superimposed overrides in `specialization` to be imposed on the target in `configuredTarget` and other instances of that target which match this `configuredTarget`. @@ -868,8 +868,7 @@ extension SpecializationParameters { } else { imposedSdkVariant = imposedPlatform?.defaultSDKVariant } - } - else { + } else { imposedSdkVariant = nil } @@ -897,14 +896,21 @@ extension SpecializationParameters { // having a build description, severely hampering semantic functionality. let behavior: Diagnostic.Behavior = buildRequest.enableIndexBuildArena ? .warning : .error - delegate.emit(Diagnostic(behavior: behavior, location: .unknown, data: data, childDiagnostics: { - switch specialization.source { - case .synthesized, .workspace: - [] - case let .target(name): - [Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("Consider changing target '\(name)' to build using an \(specializationSDKOptions.joined(separator: " ")) SDK."))] - } - }())) + delegate.emit( + Diagnostic( + behavior: behavior, + location: .unknown, + data: data, + childDiagnostics: { + switch specialization.source { + case .synthesized, .workspace: + [] + case let .target(name): + [Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("Consider changing target '\(name)' to build using an \(specializationSDKOptions.joined(separator: " ")) SDK."))] + } + }() + ) + ) } // Since we are imposing a platform, we also need to impose internal-ness (from either the client or `SPECIALIZATION_SDK_OPTIONS`). @@ -947,14 +953,16 @@ extension SpecializationParameters { imposedToolchain = nil } - let fromPackage = workspaceContext.workspace.project(for: forTarget).isPackage + let fromPackage = workspaceContext.workspace.project(for: forTarget).isPackage let imposedSwiftCompileCache: Bool? if fromPackage { - imposedSwiftCompileCache = settings.globalScope.evaluate(BuiltinMacros.SWIFT_ENABLE_COMPILE_CACHE) || buildRequest.buildTargets.contains { buildTargetInfo in - let buildTargetSettings = buildRequestContext.getCachedSettings(buildTargetInfo.parameters, target: buildTargetInfo.target) - return buildTargetSettings.globalScope.evaluate(BuiltinMacros.SWIFT_ENABLE_COMPILE_CACHE) - } + imposedSwiftCompileCache = + settings.globalScope.evaluate(BuiltinMacros.SWIFT_ENABLE_COMPILE_CACHE) + || buildRequest.buildTargets.contains { buildTargetInfo in + let buildTargetSettings = buildRequestContext.getCachedSettings(buildTargetInfo.parameters, target: buildTargetInfo.target) + return buildTargetSettings.globalScope.evaluate(BuiltinMacros.SWIFT_ENABLE_COMPILE_CACHE) + } } else { imposedSwiftCompileCache = nil } @@ -967,8 +975,7 @@ extension SpecializationParameters { // Ideally, this code shouldn't live here, but there are issues tracked in (rdar://80907686) that we need to work through. if settings.enableTargetPlatformSpecialization || settings.enableBuildRequestOverrides || fromPackage { return lookupConfiguredTarget(forTarget, parameters: filteredSpecialization.imposed(on: parameters, workspaceContext: workspaceContext), superimposedProperties: filteredSpecialization.superimposedProperties) - } - else { + } else { let nonimposedParameters = fromPackage ? parameters : parameters.withoutImposedOverrides(buildRequest, core: workspaceContext.core) return lookupConfiguredTarget(forTarget, parameters: filteredSpecialization.imposed(on: nonimposedParameters, workspaceContext: workspaceContext), superimposedProperties: filteredSpecialization.superimposedProperties) } @@ -1083,8 +1090,7 @@ extension DependencyResolver { for n in 0..(maximumParallelism: Int, _ items: S, _ transform: @Sendable @escaping (Element) async -> [T]) async -> [T] where S: Sequence { if disableConcurrentDependencyResolution { return await items.asyncMap(transform).flatMap { $0 } - } - else { + } else { return await items.concurrentMap(maximumParallelism: maximumParallelism, transform).flatMap { $0 } } } diff --git a/Sources/SWBCore/EnvironmentBindings.swift b/Sources/SWBCore/EnvironmentBindings.swift index ad20efef..f42388ba 100644 --- a/Sources/SWBCore/EnvironmentBindings.swift +++ b/Sources/SWBCore/EnvironmentBindings.swift @@ -53,7 +53,7 @@ public struct EnvironmentBindings: Sendable { } extension EnvironmentBindings: Equatable { - public static func ==(lhs: EnvironmentBindings, rhs: EnvironmentBindings) -> Bool { + public static func == (lhs: EnvironmentBindings, rhs: EnvironmentBindings) -> Bool { return lhs.bindingsDictionary == rhs.bindingsDictionary } } diff --git a/Sources/SWBCore/ExecutableTask.swift b/Sources/SWBCore/ExecutableTask.swift index ad6fbf31..d9ea0ceb 100644 --- a/Sources/SWBCore/ExecutableTask.swift +++ b/Sources/SWBCore/ExecutableTask.swift @@ -105,7 +105,7 @@ extension ExecutableTask { /// /// Clients should expect this is cheap to compute, but not necessarily O(1). public var commandLineAsStrings: AnySequence { - return AnySequence(commandLine.lazy.map{ $0.asString }) + return AnySequence(commandLine.lazy.map { $0.asString }) } public func generateIndexingInfo(input: TaskGenerateIndexingInfoInput) -> [TaskGenerateIndexingInfoOutput] { @@ -145,8 +145,7 @@ extension ExecutableTask { if let selfName = self.forTarget?.target.name, let otherName = other.forTarget?.target.name { // If both tasks' targets have names, then we order them by those names return selfName < otherName - } - else { + } else { // Otherwise only one has a name, so we order the one without a name last. So all tasks without a // target will get ordered to the end (usually these are all gate tasks). return self.forTarget?.target.name != nil diff --git a/Sources/SWBCore/Extensions/SettingsBuilderExtension.swift b/Sources/SWBCore/Extensions/SettingsBuilderExtension.swift index 6a5ca157..520f307d 100644 --- a/Sources/SWBCore/Extensions/SettingsBuilderExtension.swift +++ b/Sources/SWBCore/Extensions/SettingsBuilderExtension.swift @@ -25,16 +25,16 @@ public struct SettingsBuilderExtensionPoint: ExtensionPoint { public protocol SettingsBuilderExtension: Sendable { /// Provides a table of additional build properties overrides - func addOverrides(fromEnvironment: [String:String], parameters: BuildParameters) throws -> [String:String] + func addOverrides(fromEnvironment: [String: String], parameters: BuildParameters) throws -> [String: String] /// Provides a table of additional build settings builtin defaults - func addBuiltinDefaults(fromEnvironment: [String:String], parameters: BuildParameters) throws -> [String:String] + func addBuiltinDefaults(fromEnvironment: [String: String], parameters: BuildParameters) throws -> [String: String] /// Provides a table of default settings for a product type. func addProductTypeDefaults(productType: ProductTypeSpec) -> [String: String] /// Provides a table of additional SDK settings - func addSDKSettings(_ sdk: SDK, _ variant: SDKVariant?, _ sparseSDKs: [SDK]) throws -> [String : String] + func addSDKSettings(_ sdk: SDK, _ variant: SDKVariant?, _ sparseSDKs: [SDK]) throws -> [String: String] /// Provides a table of overriding SDK settings func addSDKOverridingSettings(_ sdk: SDK, _ variant: SDKVariant?, _ sparseSDKs: [SDK], specLookupContext: any SpecLookupContext, environment: [String: String]) throws -> [String: String] diff --git a/Sources/SWBCore/FileSystemSignatureBasedCache.swift b/Sources/SWBCore/FileSystemSignatureBasedCache.swift index 4ba2551c..6af259a1 100644 --- a/Sources/SWBCore/FileSystemSignatureBasedCache.swift +++ b/Sources/SWBCore/FileSystemSignatureBasedCache.swift @@ -30,11 +30,14 @@ final class FileSystemSignatureBasedCache: Sendable { /// Gets the object with the given path and signature, creating it if necessary. public func get(at path: Path, filesSignature: FilesSignature) throws -> Value { - return try cache.getOrInsert(Key(path: path, filesSignature: filesSignature), body: { () -> Result in - return Result { - try block(path) + return try cache.getOrInsert( + Key(path: path, filesSignature: filesSignature), + body: { () -> Result in + return Result { + try block(path) + } } - }).get() + ).get() } } diff --git a/Sources/SWBCore/FileToBuild.swift b/Sources/SWBCore/FileToBuild.swift index 00e8222a..88bcafad 100644 --- a/Sources/SWBCore/FileToBuild.swift +++ b/Sources/SWBCore/FileToBuild.swift @@ -15,7 +15,7 @@ public import SWBProtocol public import SWBMacro /// Represents a file to be passed as input to some part of the build machinery. May be a source file originally sent down with the PIF, or might be a temporary file. Once a build rule action has been determined, it is assigned to the FileToBuild so it doesn’t have to be looked up again. Note that the term “file” here is used in the loosest sense — the path can refer to any file system entity. -public struct FileToBuild : Hashable, Sendable { +public struct FileToBuild: Hashable, Sendable { /// Absolute path of the referenced file. public let absolutePath: Path @@ -100,7 +100,7 @@ public struct FileToBuild : Hashable, Sendable { hasher.combine(headerVisibility) } - public static func ==(lhs: FileToBuild, rhs: FileToBuild) -> Bool { + public static func == (lhs: FileToBuild, rhs: FileToBuild) -> Bool { // QUESTION: Does the header visibility really make two files of the same type and the same path different? [Swift Build] Should FileToBuild.== be considering the headerVisibility property? return lhs.absolutePath == rhs.absolutePath && lhs.fileType === rhs.fileType && lhs.headerVisibility == rhs.headerVisibility } @@ -112,7 +112,7 @@ public struct FileToBuild : Hashable, Sendable { } /// Represents a group of one or more files to be processed by a single build task. Once added to a group, a file cannot be removed from it. -public final class FileToBuildGroup : Hashable, Equatable, CustomStringConvertible { +public final class FileToBuildGroup: Hashable, Equatable, CustomStringConvertible { /// The identifier for this group. public let identifier: String? @@ -132,12 +132,11 @@ public final class FileToBuildGroup : Hashable, Equatable, CustomStringConvertib hasher.combine(files) } - public static func ==(lhs: FileToBuildGroup, rhs: FileToBuildGroup) -> Bool { + public static func == (lhs: FileToBuildGroup, rhs: FileToBuildGroup) -> Bool { return lhs.files == rhs.files } - public var description: String - { + public var description: String { // Since only the files are relevant for equality, that's all we emit here. let filesDescription = files.map({ $0.descriptionForGroup }).joined(separator: ", ") return "<\(type(of: self)):[\(filesDescription)]>" @@ -148,8 +147,8 @@ public protocol RegionVariable { var regionVariantName: String? { get } } -extension Path: RegionVariable { } -extension FileToBuild: RegionVariable { } +extension Path: RegionVariable {} +extension FileToBuild: RegionVariable {} extension FileToBuildGroup: RegionVariable { /// Returns the region variant name for a group of files. diff --git a/Sources/SWBCore/LibSwiftDriver/LibSwiftDriver.swift b/Sources/SWBCore/LibSwiftDriver/LibSwiftDriver.swift index c3ef9d63..fede4384 100644 --- a/Sources/SWBCore/LibSwiftDriver/LibSwiftDriver.swift +++ b/Sources/SWBCore/LibSwiftDriver/LibSwiftDriver.swift @@ -18,7 +18,7 @@ import TSCBasic public import SWBUtil -public protocol SwiftGlobalExplicitDependencyGraph : AnyObject { +public protocol SwiftGlobalExplicitDependencyGraph: AnyObject { /// Register a collection of driver jobs in the graph, de-duplicating them in the process /// - Parameters: /// - jobs: A collection of explicit-dependency build jobs from a given Swift Driver invocation @@ -54,8 +54,11 @@ private struct GlobalExplicitDependencyTracker { /// The collection of *all* explicit module dependency build jobs found so far fileprivate private(set) var plannedExplicitDependencyJobs: [LibSwiftDriver.PlannedBuild.PlannedSwiftDriverJob] = [] - mutating func addExplicitDependencyBuildJobs(_ jobs: [SwiftDriverJob], workingDirectory: Path, - producerMap: inout [Path: LibSwiftDriver.JobKey]) throws -> Set { + mutating func addExplicitDependencyBuildJobs( + _ jobs: [SwiftDriverJob], + workingDirectory: Path, + producerMap: inout [Path: LibSwiftDriver.JobKey] + ) throws -> Set { // Filter out "new" unique jobs and populate the `producerMap` var jobKeys: Set = [] var jobsWithIndices: [(SwiftDriverJob, LibSwiftDriver.JobIndex)] = [] @@ -137,7 +140,7 @@ public final class SwiftModuleDependencyGraph: SwiftGlobalExplicitDependencyGrap public init() {} public func waitForCompletion() async { - await registryQueue.sync { } + await registryQueue.sync {} } /// Plans a build and stores it for a given unique identifier. @@ -301,8 +304,11 @@ public final class SwiftModuleDependencyGraph: SwiftGlobalExplicitDependencyGrap } } - public func addExplicitDependencyBuildJobs(_ jobs: [SwiftDriverJob], workingDirectory: Path, - producerMap: inout [Path: LibSwiftDriver.JobKey]) throws -> Set { + public func addExplicitDependencyBuildJobs( + _ jobs: [SwiftDriverJob], + workingDirectory: Path, + producerMap: inout [Path: LibSwiftDriver.JobKey] + ) throws -> Set { try registryQueue.blocking_sync { try globalExplicitDependencyTracker.addExplicitDependencyBuildJobs(jobs, workingDirectory: workingDirectory, producerMap: &producerMap) } @@ -353,17 +359,21 @@ public final class SwiftModuleDependencyGraph: SwiftGlobalExplicitDependencyGrap summaryCSV.writeRow([moduleID, "\(jobs.count)"]) summaryMessage += "\(moduleID): \(jobs.count == 1 ? "1 variant" : "\(jobs.count) variants")\n" - let mergeResult = nWayMerge(jobs.map { $0.commandLine.filter { - if ["pcm", "dia", "d"].contains(Path($0).fileExtension) { - // Filter differences in module paths, they are a function of the other args - return false - } else if $0.hasPrefix("llvmcas://") { - // Filter differences in CAS URLs, they are a function of the other args - return false - } else { - return true + let mergeResult = nWayMerge( + jobs.map { + $0.commandLine.filter { + if ["pcm", "dia", "d"].contains(Path($0).fileExtension) { + // Filter differences in module paths, they are a function of the other args + return false + } else if $0.hasPrefix("llvmcas://") { + // Filter differences in CAS URLs, they are a function of the other args + return false + } else { + return true + } + }.map { $0.asString } } - }.map { $0.asString } }).filter { + ).filter { if $0.elementOf.count == jobs.count { // Don't report args common to all variants return false @@ -417,10 +427,12 @@ class Executor: DriverExecutor { self.workingDirectory = workingDirectory } - func execute(job: Job, forceResponseFiles: Bool, recordedInputModificationDates: [TypedVirtualPath : TimePoint]) throws -> ProcessResult { - let useResponseFiles : ResponseFileHandling = forceResponseFiles ? .forced : .heuristic - let arguments: [String] = try resolver.resolveArgumentList(for: job, - useResponseFiles: useResponseFiles) + func execute(job: Job, forceResponseFiles: Bool, recordedInputModificationDates: [TypedVirtualPath: TimePoint]) throws -> ProcessResult { + let useResponseFiles: ResponseFileHandling = forceResponseFiles ? .forced : .heuristic + let arguments: [String] = try resolver.resolveArgumentList( + for: job, + useResponseFiles: useResponseFiles + ) try job.verifyInputsNotModified(since: recordedInputModificationDates, fileSystem: fileSystem) @@ -439,14 +451,14 @@ class Executor: DriverExecutor { } } - func execute(workload: DriverExecutorWorkload, delegate: any JobExecutionDelegate, numParallelJobs: Int, forceResponseFiles: Bool, recordedInputModificationDates: [TypedVirtualPath : TimePoint]) throws { + func execute(workload: DriverExecutorWorkload, delegate: any JobExecutionDelegate, numParallelJobs: Int, forceResponseFiles: Bool, recordedInputModificationDates: [TypedVirtualPath: TimePoint]) throws { guard self.plannedBuild == nil else { throw StubError.error("Unexpected extra workload from Swift driver.") } self.plannedBuild = try LibSwiftDriver.PlannedBuild(workload: workload, argsResolver: self.resolver, explicitModulesResolver: self.explicitModulesResolver, jobExecutionDelegate: delegate, globalExplicitDependencyJobGraph: explicitDependencyGraph, workingDirectory: workingDirectory, eagerCompilationEnabled: eagerCompilationEnabled) } - func checkNonZeroExit(args: String..., environment: [String : String]) throws -> String { + func checkNonZeroExit(args: String..., environment: [String: String]) throws -> String { try Process.checkNonZeroExit(arguments: args, environmentBlock: .init(environment)) } @@ -469,19 +481,19 @@ class Executor: DriverExecutor { public final class LibSwiftDriver { public typealias JobIndex = Int /// Type to fetch dependencies of planned jobs - public enum JobKey : Comparable, Hashable, Serializable { + public enum JobKey: Comparable, Hashable, Serializable { case explicitDependencyJob(_ index: JobIndex) case targetJob(_ index: JobIndex) - public func serialize(to serializer: T) where T : Serializer { + public func serialize(to serializer: T) where T: Serializer { serializer.beginAggregate(2) switch self { - case .explicitDependencyJob(let index): - serializer.serialize(0) - serializer.serialize(index) - case .targetJob(let index): - serializer.serialize(1) - serializer.serialize(index) + case .explicitDependencyJob(let index): + serializer.serialize(0) + serializer.serialize(index) + case .targetJob(let index): + serializer.serialize(1) + serializer.serialize(index) } serializer.endAggregate() } @@ -490,14 +502,14 @@ public final class LibSwiftDriver { try deserializer.beginAggregate(2) let code: Int = try deserializer.deserialize() switch code { - case 0: - let index: JobIndex = try deserializer.deserialize() - self = .explicitDependencyJob(index) - case 1: - let index: JobIndex = try deserializer.deserialize() - self = .targetJob(index) - default: - throw DeserializerError.incorrectType("Unexpected type code for LibSwiftDriver.JobKey: \(code)") + case 0: + let index: JobIndex = try deserializer.deserialize() + self = .explicitDependencyJob(index) + case 1: + let index: JobIndex = try deserializer.deserialize() + self = .targetJob(index) + default: + throw DeserializerError.incorrectType("Unexpected type code for LibSwiftDriver.JobKey: \(code)") } } } @@ -614,7 +626,7 @@ public final class LibSwiftDriver { let fallbackDiagnostics: [SWBUtil.Diagnostic] if driver.diagnosticEngine.hasErrors { #if canImport(os) - OSLog.log("Driver threw error \(error) but emitted errors to build log.") + OSLog.log("Driver threw error \(error) but emitted errors to build log.") #endif fallbackDiagnostics = [] } else { @@ -641,9 +653,11 @@ public final class LibSwiftDriver { return nil } do { - guard let job = jobs.filter({ job in - job.primarySwiftSourceFiles.contains(where: { $0.typedFile.file.absolutePath?.pathString == inputPath.str }) - }).only else { + guard + let job = jobs.filter({ job in + job.primarySwiftSourceFiles.contains(where: { $0.typedFile.file.absolutePath?.pathString == inputPath.str }) + }).only + else { return nil } return try shim.resolver.resolveArgumentList(for: job, useResponseFiles: .heuristic) @@ -655,7 +669,7 @@ public final class LibSwiftDriver { if diagnosticsEngine.hasErrors { #if canImport(os) - OSLog.log("Driver threw error \(error) but emitted errors to build log.") + OSLog.log("Driver threw error \(error) but emitted errors to build log.") #endif } else { outputDelegate.error("Driver threw \(error) without emitting errors.") @@ -684,7 +698,7 @@ public final class LibSwiftDriver { if diagnosticsEngine.hasErrors { #if canImport(os) - OSLog.log("Driver threw error \(error) but emitted errors to build log.") + OSLog.log("Driver threw error \(error) but emitted errors to build log.") #endif } else { outputDelegate.error("Driver threw \(error) without emitting errors.") @@ -718,9 +732,11 @@ extension SwiftModuleDependencyGraph { guard let oracle = oracleRegistry[key] else { throw StubError.error("can't find created dependency scanning oracle from compiler location \(compilerLocation)") } - let cas = try oracle.getOrCreateCAS(pluginPath: try toAbsolutePath(casOptions.pluginPath?.str), - onDiskPath: try toAbsolutePath(casOptions.casPath.str), - pluginOptions: pluginOpts) + let cas = try oracle.getOrCreateCAS( + pluginPath: try toAbsolutePath(casOptions.pluginPath?.str), + onDiskPath: try toAbsolutePath(casOptions.casPath.str), + pluginOptions: pluginOpts + ) return SwiftCASDatabases(cas) } } diff --git a/Sources/SWBCore/LibSwiftDriver/PlannedBuild.swift b/Sources/SWBCore/LibSwiftDriver/PlannedBuild.swift index a5bc4e9c..fcb83f89 100644 --- a/Sources/SWBCore/LibSwiftDriver/PlannedBuild.swift +++ b/Sources/SWBCore/LibSwiftDriver/PlannedBuild.swift @@ -11,9 +11,9 @@ //===----------------------------------------------------------------------===// #if os(Windows) -private import Foundation + private import Foundation #else -public import Foundation + public import Foundation #endif import SwiftDriver @@ -38,7 +38,7 @@ public struct SwiftDriverJob: Serializable, CustomDebugStringConvertible { public enum Kind: Serializable { case target, explicitModule(uniqueID: Int) - public func serialize(to serializer: T) where T : Serializer { + public func serialize(to serializer: T) where T: Serializer { serializer.serializeAggregate(2) { switch self { case .target: @@ -120,7 +120,7 @@ public struct SwiftDriverJob: Serializable, CustomDebugStringConvertible { }.sorted() } - public func serialize(to serializer: T) where T : Serializer { + public func serialize(to serializer: T) where T: Serializer { serializer.serializeAggregate(10) { serializer.serialize(kind) serializer.serialize(ruleInfoType) @@ -154,7 +154,6 @@ public struct SwiftDriverJob: Serializable, CustomDebugStringConvertible { } } - extension LibSwiftDriver { public final class PlannedBuild { @@ -198,7 +197,7 @@ extension LibSwiftDriver { self.signature = md5.signature } - public func serialize(to serializer: T) where T : Serializer { + public func serialize(to serializer: T) where T: Serializer { serializer.serializeAggregate(5) { serializer.serialize(key) serializer.serialize(driverJob) @@ -229,10 +228,10 @@ extension LibSwiftDriver { let keyStrConverter: (_ key: JobKey) -> String = { key in let keyStr: String switch key { - case .explicitDependencyJob(let index): - keyStr = ("explicit(\(index))") - case .targetJob(let index): - keyStr = ("target(\(index))") + case .explicitDependencyJob(let index): + keyStr = ("explicit(\(index))") + case .targetJob(let index): + keyStr = ("target(\(index))") } return keyStr @@ -306,7 +305,7 @@ extension LibSwiftDriver { } } - private static func evaluateWorkload(_ workload: SwiftDriver.DriverExecutorWorkload, argsResolver: ArgsResolver, explicitModulesResolver: ArgsResolver, globalExplicitDependencyJobGraph: (any SwiftGlobalExplicitDependencyGraph)?, workingDirectory: Path, eagerCompilationEnabled: Bool) throws -> (plannedTargetJobs: [PlannedSwiftDriverJob], originalTargetJobs: [SwiftDriver.Job], incrementalCompilationState: IncrementalCompilationState?, producerMap: [Path: JobKey], explicitModuleBuildJobKeys: Set, compilationRequirementsIndices: Range, compilationIndices: Range, afterCompilationIndices: Range, verificationIndices: Range) { + private static func evaluateWorkload(_ workload: SwiftDriver.DriverExecutorWorkload, argsResolver: ArgsResolver, explicitModulesResolver: ArgsResolver, globalExplicitDependencyJobGraph: (any SwiftGlobalExplicitDependencyGraph)?, workingDirectory: Path, eagerCompilationEnabled: Bool) throws -> (plannedTargetJobs: [PlannedSwiftDriverJob], originalTargetJobs: [SwiftDriver.Job], incrementalCompilationState: IncrementalCompilationState?, producerMap: [Path: JobKey], explicitModuleBuildJobKeys: Set, compilationRequirementsIndices: Range, compilationIndices: Range, afterCompilationIndices: Range, verificationIndices: Range) { var wrappedJobs: [SwiftDriverJob] = [] var originalTargetJobs: [SwiftDriver.Job] = [] let incrementalCompilationState: IncrementalCompilationState? @@ -334,30 +333,39 @@ extension LibSwiftDriver { /* 2 */ \.categorizer.isGeneratePch, /* 3 */ \.categorizer.isCompile, /* 4 */ \.categorizer.isVerification, - /* 5 */ { _ in true } + /* 5 */ { _ in true }, ]) + explicitModuleBuildJobKeys = try addExplicitDependencyBuildJobs( + groupedJobs[0], + to: explicitDependencyJobGraph, + workingDirectory: workingDirectory, + producing: &producerMap + ) - explicitModuleBuildJobKeys = try addExplicitDependencyBuildJobs(groupedJobs[0], - to: explicitDependencyJobGraph, - workingDirectory: workingDirectory, - producing: &producerMap) - - compilationRequirementsIndices = try addTargetJobs(groupedJobs[2] + groupedJobs[1] + (eagerCompilationEnabled ? [] : groupedJobs[3]), - to: &wrappedJobs, - producing: &producerMap) + compilationRequirementsIndices = try addTargetJobs( + groupedJobs[2] + groupedJobs[1] + (eagerCompilationEnabled ? [] : groupedJobs[3]), + to: &wrappedJobs, + producing: &producerMap + ) - compilationIndices = try addTargetJobs(eagerCompilationEnabled ? groupedJobs[3] : [], - to: &wrappedJobs, - producing: &producerMap) + compilationIndices = try addTargetJobs( + eagerCompilationEnabled ? groupedJobs[3] : [], + to: &wrappedJobs, + producing: &producerMap + ) - verificationIndices = try addTargetJobs(groupedJobs[4], - to: &wrappedJobs, - producing: &producerMap) + verificationIndices = try addTargetJobs( + groupedJobs[4], + to: &wrappedJobs, + producing: &producerMap + ) - afterCompilationIndices = try addTargetJobs(groupedJobs[5], - to: &wrappedJobs, - producing: &producerMap) + afterCompilationIndices = try addTargetJobs( + groupedJobs[5], + to: &wrappedJobs, + producing: &producerMap + ) originalTargetJobs = jobsToWrap.filter { !SwiftDriverJobCategorizer($0).isExplicitDependencyBuild @@ -390,12 +398,17 @@ extension LibSwiftDriver { } @discardableResult - private static func addExplicitDependencyBuildJobs(_ explicitDependencyDriverJobs: [SwiftDriverJob], - to explicitDependencyJobGraph: any SwiftGlobalExplicitDependencyGraph, - workingDirectory: Path, producing producerMap: inout [Path: JobKey]) throws -> Set { - return try explicitDependencyJobGraph.addExplicitDependencyBuildJobs(explicitDependencyDriverJobs, - workingDirectory: workingDirectory, - producerMap: &producerMap) + private static func addExplicitDependencyBuildJobs( + _ explicitDependencyDriverJobs: [SwiftDriverJob], + to explicitDependencyJobGraph: any SwiftGlobalExplicitDependencyGraph, + workingDirectory: Path, + producing producerMap: inout [Path: JobKey] + ) throws -> Set { + return try explicitDependencyJobGraph.addExplicitDependencyBuildJobs( + explicitDependencyDriverJobs, + workingDirectory: workingDirectory, + producerMap: &producerMap + ) } @discardableResult @@ -407,22 +420,26 @@ extension LibSwiftDriver { jobs.append(job) } - return initialCount ..< jobs.count + return initialCount.. SwiftDriver.Job { switch plannedDriverJob.key { - case .targetJob(let index): - guard let job = driverTargetJobs[safe: index] else { - throw StubError.error("Data inconsistency between planned driver jobs and actual jobs. Job \(plannedDriverJob.driverJob) is unknown.") - } - return job - case .explicitDependencyJob(_): + case .targetJob(let index): + guard let job = driverTargetJobs[safe: index] else { + throw StubError.error("Data inconsistency between planned driver jobs and actual jobs. Job \(plannedDriverJob.driverJob) is unknown.") + } + return job + case .explicitDependencyJob(_): throw StubError.error("Querying SwiftDriver.job unsupported for an Explicit Dependency Build job.") } } @@ -497,13 +514,13 @@ extension LibSwiftDriver { public func dependencies(for job: PlannedSwiftDriverJob) -> [PlannedSwiftDriverJob] { return job.dependencies.compactMap { dependencyKey in switch dependencyKey { - case .targetJob(_): - return plannedTargetJob(for: dependencyKey) - case .explicitDependencyJob(_): - guard let explicitDependencyJobGraph = globalExplicitDependencyJobGraph else { - fatalError("Explicit Module build job detected without a globalExplicitDependencyJobGraph.") - } - return explicitDependencyJobGraph.plannedExplicitDependencyBuildJob(for: dependencyKey) + case .targetJob(_): + return plannedTargetJob(for: dependencyKey) + case .explicitDependencyJob(_): + guard let explicitDependencyJobGraph = globalExplicitDependencyJobGraph else { + fatalError("Explicit Module build job detected without a globalExplicitDependencyJobGraph.") + } + return explicitDependencyJobGraph.plannedExplicitDependencyBuildJob(for: dependencyKey) } } } @@ -550,7 +567,7 @@ extension LibSwiftDriver { try await dispatchQueue.sync { let driverJob = try self.driverJob(for: job) guard let reproJob = self.jobExecutionDelegate?.getReproducerJob(job: driverJob, output: try VirtualPath(path: dir.str)) else { - return nil + return nil } return try self.argsResolver.resolveArgumentList(for: reproJob, useResponseFiles: .heuristic) } @@ -601,11 +618,11 @@ private extension TSCBasic.ProcessResult.ExitStatus { case let .exit(code): self = .terminated(code: code) case let .uncaughtSignal(signal): -#if os(Windows) - self = .abnormal(exception: UInt32(signal)) -#else - self = .signalled(signal: signal) -#endif + #if os(Windows) + self = .abnormal(exception: UInt32(signal)) + #else + self = .signalled(signal: signal) + #endif } } } @@ -663,19 +680,16 @@ public extension SwiftDriverJobCategorizer { } var isEmitModule: Bool { - containsInputs { ext, _ in ext == "swift" } && - containsOutputs { ext, basename in ext == "swiftmodule" && !basename.contains("~partial") } + containsInputs { ext, _ in ext == "swift" } && containsOutputs { ext, basename in ext == "swiftmodule" && !basename.contains("~partial") } } var isCompile: Bool { // Compile jobs may not contain .o file if build for IS_ZIPPERED - containsInputs { ext, _ in ext == "swift" } && - containsOutputs { ext, _ in ext == "d" || ext == "o" } + containsInputs { ext, _ in ext == "swift" } && containsOutputs { ext, _ in ext == "d" || ext == "o" } } var isGeneratePch: Bool { - containsInputs { ext, _ in ext == "h" } && - containsOutputs { ext, _ in ext == "pch" } + containsInputs { ext, _ in ext == "h" } && containsOutputs { ext, _ in ext == "pch" } } // FIXME: Track the SwiftDriver.Job.Kind of SWBCore.SwiftDriverJobs that are not for explicit modules, and use that to classify verification tasks. @@ -702,8 +716,7 @@ public extension SwiftDriverJob { private extension Array { func grouped(by conditionsInOrder: [(Element) -> Bool]) throws -> [[Element]] { var result: [[Element]] = .init(repeating: [], count: conditionsInOrder.count) - arrayLoop: - for element in self { + arrayLoop: for element in self { for (index, condition) in conditionsInOrder.enumerated() { if condition(element) { result[index].append(element) diff --git a/Sources/SWBCore/LibclangVendored/Libclang.swift b/Sources/SWBCore/LibclangVendored/Libclang.swift index 06125dd9..b69934df 100644 --- a/Sources/SWBCore/LibclangVendored/Libclang.swift +++ b/Sources/SWBCore/LibclangVendored/Libclang.swift @@ -17,7 +17,7 @@ import SWBLibc /// A wrapper for a libclang library. public final class Libclang { public enum Error: Swift.Error { - case unableToGetDriverActions(String) + case unableToGetDriverActions(String) } fileprivate let lib: libclang_t @@ -66,11 +66,14 @@ public final class Libclang { // let envp = CStringArray(environment.map{ "\($0.0)=\($0.1)" }) var error: String? = nil let success = libclang_driver_get_actions( - lib, CInt(args.cArray.count - 1), args.cArray, nil, + lib, + CInt(args.cArray.count - 1), + args.cArray, + nil, workingDirectory, /*callback: */ { (argc, argv) in let argc = Int(argc) - result.append((0 ..< argc).map{ String(cString: argv![$0]!) }) + result.append((0..?) -> Void in let errString = String(cString: string!) @@ -79,7 +82,8 @@ public final class Libclang { } else { error = errString } - }) + } + ) guard success else { assert(error != nil) throw Error.unableToGetDriverActions(error!) @@ -211,12 +215,20 @@ public final class DependencyScanner { var result: FileDependencies? // The count is `- 1` here, because CStringArray appends a trailing nullptr. let success = libclang_scanner_scan_dependencies( - scanner, CInt(args.cArray.count - 1), args.cArray, workingDirectory, - /*lookup_output:*/ { (cmoduleName: UnsafePointer?, ccontextHash: UnsafePointer?, - ckind: clang_output_kind_t, coutput: UnsafeMutablePointer?, - maxLen: size_t) -> size_t in + scanner, + CInt(args.cArray.count - 1), + args.cArray, + workingDirectory, + /*lookup_output:*/ { + ( + cmoduleName: UnsafePointer?, + ccontextHash: UnsafePointer?, + ckind: clang_output_kind_t, + coutput: UnsafeMutablePointer?, + maxLen: size_t + ) -> size_t in guard let kind = ModuleOutputKind(ckind) else { - return 0 // Unknown output + return 0 // Unknown output } let moduleName = String(cString: cmoduleName!) let contextHash: String = String(cString: ccontextHash!) @@ -229,7 +241,7 @@ public final class DependencyScanner { } } else { assert(kind != .moduleFile, "moduleFile is a required output") - return 0 // Empty + return 0 // Empty } }, /*modules_callback:*/ { (modules: clang_module_dependency_set_t, topologicallySorted: Bool) -> Void in @@ -243,14 +255,16 @@ public final class DependencyScanner { /*callback:*/ { (fileDeps: clang_file_dependencies_t) -> Void in var commands: [Command] = [] for i in 0...fromCStringArray(fileDeps.commands[i].file_deps), - module_deps: Array.fromCStringArray(fileDeps.commands[i].module_deps), - cache_key: fileDeps.commands[i].cache_key.map{String(cString:$0)}, - executable: fileDeps.commands[i].executable.map({String(cString:$0)}), - build_arguments: Array.fromCStringArray(fileDeps.commands[i].build_arguments) - )) + commands.append( + Command( + context_hash: String(cString: fileDeps.commands[i].context_hash), + file_deps: Array.fromCStringArray(fileDeps.commands[i].file_deps), + module_deps: Array.fromCStringArray(fileDeps.commands[i].module_deps), + cache_key: fileDeps.commands[i].cache_key.map { String(cString: $0) }, + executable: fileDeps.commands[i].executable.map({ String(cString: $0) }), + build_arguments: Array.fromCStringArray(fileDeps.commands[i].build_arguments) + ) + ) } result = FileDependencies(includeTreeID: fileDeps.include_tree_id.map { String(cString: $0) }, commands: commands) }, @@ -265,7 +279,8 @@ public final class DependencyScanner { }, /*error_callback:*/ { (string: UnsafePointer?) -> Void in error = String(cString: string!) - }) + } + ) guard success, let fileDeps = result else { if let diagnostics = diagnostics { throw Error.dependencyScanDiagnostics(diagnostics) @@ -280,12 +295,15 @@ public final class DependencyScanner { public func diagnoseInvalidNegativeStatCacheEntries() -> [String] { var entries: [String] = [] - libclang_scanner_diagnose_invalid_negative_stat_cache_entries(scanner, { cString in - guard let cString else { - return + libclang_scanner_diagnose_invalid_negative_stat_cache_entries( + scanner, + { cString in + guard let cString else { + return + } + entries.append(String(cString: cString)) } - entries.append(String(cString: cString)) - }) + ) return entries } @@ -317,9 +335,11 @@ fileprivate struct ClangDiagnosticSet { public init(_ libclang: Libclang, filePath: String) throws { var error: UnsafePointer! defer { error?.deallocate() } - guard let diagnosticSet = filePath.withCString({ path in - return libclang_read_diagnostics(libclang.lib, path, &error) - }) else { + guard + let diagnosticSet = filePath.withCString({ path in + return libclang_read_diagnostics(libclang.lib, path, &error) + }) + else { throw Error.error(String(cString: error)) } @@ -494,9 +514,14 @@ public final class ClangCASDatabases { } self.libclang = libclang var error: Error? = nil - guard let dbs = libclang_casdatabases_create(options.options, { cerror in - error = .creationFailed(String(cString: cerror!)) - }) else { + guard + let dbs = libclang_casdatabases_create( + options.options, + { cerror in + error = .creationFailed(String(cString: cerror!)) + } + ) + else { throw error! } self.dbs = dbs @@ -508,9 +533,12 @@ public final class ClangCASDatabases { public func getOndiskSize() throws -> Int64? { var error: ClangCASDatabases.Error? = nil - let ret = libclang_casdatabases_get_ondisk_size(dbs, { c_error in - error = .operationFailed(String(cString: c_error!)) - }) + let ret = libclang_casdatabases_get_ondisk_size( + dbs, + { c_error in + error = .operationFailed(String(cString: c_error!)) + } + ) if let error { throw error } @@ -522,9 +550,13 @@ public final class ClangCASDatabases { public func setOndiskSizeLimit(_ limit: Int64?) throws { var error: ClangCASDatabases.Error? = nil - libclang_casdatabases_set_ondisk_size_limit(dbs, limit ?? 0, { c_error in - error = .operationFailed(String(cString: c_error!)) - }) + libclang_casdatabases_set_ondisk_size_limit( + dbs, + limit ?? 0, + { c_error in + error = .operationFailed(String(cString: c_error!)) + } + ) if let error { throw error } @@ -532,9 +564,12 @@ public final class ClangCASDatabases { public func pruneOndiskData() throws { var error: ClangCASDatabases.Error? = nil - libclang_casdatabases_prune_ondisk_data(dbs, { c_error in - error = .operationFailed(String(cString: c_error!)) - }) + libclang_casdatabases_prune_ondisk_data( + dbs, + { c_error in + error = .operationFailed(String(cString: c_error!)) + } + ) if let error { throw error } @@ -550,9 +585,14 @@ public final class ClangCASDatabases { /// Query the CAS for the associated outputs of a cache key. public func getCachedCompilation(cacheKey: String, globally: Bool) throws -> ClangCASCachedCompilation? { var error: ClangCASDatabases.Error? = nil - let c_cachedComp = libclang_cas_get_cached_compilation(dbs, cacheKey, globally, { c_error in - error = .operationFailed(String(cString: c_error!)) - }) + let c_cachedComp = libclang_cas_get_cached_compilation( + dbs, + cacheKey, + globally, + { c_error in + error = .operationFailed(String(cString: c_error!)) + } + ) if let error { throw error } @@ -569,7 +609,7 @@ public final class ClangCASDatabases { public func getCachedCompilation(cacheKey: String) async throws -> ClangCASCachedCompilation? { let libclang = self.libclang return try await withCheckedThrowingContinuation { (continuation: CheckedContinuation) in - libclang_cas_get_cached_compilation_async(dbs, cacheKey, /*globally*/true) { c_cachedComp, c_error in + libclang_cas_get_cached_compilation_async(dbs, cacheKey, /*globally*/ true) { c_cachedComp, c_error in if let c_error { continuation.resume(throwing: Error.operationFailed(String(cString: c_error))) return diff --git a/Sources/SWBCore/LinkageDependencyResolver.swift b/Sources/SWBCore/LinkageDependencyResolver.swift index da34444e..658510b1 100644 --- a/Sources/SWBCore/LinkageDependencyResolver.swift +++ b/Sources/SWBCore/LinkageDependencyResolver.swift @@ -247,7 +247,7 @@ actor LinkageDependencyResolver { } // If we're resolving implicit dependencies, build up the names of products of these targets, so we don't try to resolve implicit dependencies for any of them. - let productNamesOfExplicitDependencies = Set(immediateDependencies.compactMap{ ($0.target as? StandardTarget)?.productReference.name }) + let productNamesOfExplicitDependencies = Set(immediateDependencies.compactMap { ($0.target as? StandardTarget)?.productReference.name }) // Get information about the configured target which we need to determine its implicit dependencies. let buildFileFilter = LinkageDependencyBuildFileFilteringContext(scope: configuredTargetSettings.globalScope) @@ -297,7 +297,6 @@ actor LinkageDependencyResolver { continue } - // Look for a target which generates a product with the stem of this name. // // The purpose of this logic (at present) is to be able to resolve implicit dependencies when linking against the binary inside of an arbitrary bundle. For example, this can be used for the Xcode workspace itself to deal with linking against the binary inside a .ideplugin. @@ -371,9 +370,11 @@ actor LinkageDependencyResolver { } } - let moduleNamesOfExplicitDependencies = Set(immediateDependencies.compactMap{ - buildRequestContext.getCachedSettings($0.parameters, target: $0.target).globalScope.evaluate(BuiltinMacros.PRODUCT_MODULE_NAME) - }) + let moduleNamesOfExplicitDependencies = Set( + immediateDependencies.compactMap { + buildRequestContext.getCachedSettings($0.parameters, target: $0.target).globalScope.evaluate(BuiltinMacros.PRODUCT_MODULE_NAME) + } + ) for moduleDependencyName in (configuredTargetSettings.moduleDependencies.map { $0.name }) { if !moduleNamesOfExplicitDependencies.contains(moduleDependencyName), let implicitDependency = await implicitDependency(forModuleName: moduleDependencyName, from: configuredTarget, imposedParameters: imposedParameters, source: .moduleDependency(name: moduleDependencyName, buildSetting: BuiltinMacros.MODULE_DEPENDENCIES)) { @@ -575,19 +576,27 @@ actor LinkageDependencyResolver { let location: Diagnostic.Location switch source { case let .frameworkLinkerFlag(_, _, buildSetting), - let .libraryLinkerFlag(_, _, buildSetting): + let .libraryLinkerFlag(_, _, buildSetting): location = .buildSetting(buildSetting) case let .productReference(_, buildFile, buildPhase), - let .productNameStem(_, buildFile, buildPhase): + let .productNameStem(_, buildFile, buildPhase): location = .buildFile(buildFileGUID: buildFile.guid, buildPhaseGUID: buildPhase.guid, targetGUID: configuredTarget.target.guid) case let .moduleDependency(_, buildSetting): location = .buildSettings([buildSetting]) } - delegate.emit(.overrideTarget(configuredTarget), SWBUtil.Diagnostic(behavior: .warning, location: location, data: DiagnosticData("Multiple targets match implicit dependency for \(source.valueForDisplay). Consider adding an explicit dependency on the intended target to resolve this ambiguity.", component: .targetIntegrity), childDiagnostics: candidateConfiguredTargets.map({ dependency -> Diagnostic in - let project = workspaceContext.workspace.project(for: dependency.target) - return Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("Target '\(dependency.target.name)' (in project '\(project.name)')")) - }))) + delegate.emit( + .overrideTarget(configuredTarget), + SWBUtil.Diagnostic( + behavior: .warning, + location: location, + data: DiagnosticData("Multiple targets match implicit dependency for \(source.valueForDisplay). Consider adding an explicit dependency on the intended target to resolve this ambiguity.", component: .targetIntegrity), + childDiagnostics: candidateConfiguredTargets.map({ dependency -> Diagnostic in + let project = workspaceContext.workspace.project(for: dependency.target) + return Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("Target '\(dependency.target.name)' (in project '\(project.name)')")) + }) + ) + ) } } diff --git a/Sources/SWBCore/MacCatalystInfo.swift b/Sources/SWBCore/MacCatalystInfo.swift index 15f829bc..54a1d07c 100644 --- a/Sources/SWBCore/MacCatalystInfo.swift +++ b/Sources/SWBCore/MacCatalystInfo.swift @@ -24,5 +24,5 @@ public struct MacCatalystInfo { /// Name of the SDK variant used for the `SDK_VARIANT` build setting. public static let sdkVariantName = "iosmac" - private init() { } + private init() {} } diff --git a/Sources/SWBCore/MacroConfigFileLoader.swift b/Sources/SWBCore/MacroConfigFileLoader.swift index 705f9940..5520f41e 100644 --- a/Sources/SWBCore/MacroConfigFileLoader.swift +++ b/Sources/SWBCore/MacroConfigFileLoader.swift @@ -82,7 +82,7 @@ final class MacroConfigFileLoader: Sendable { } var normalizedPath: Path { return normalizedPathCache.getValue(self) } - private var normalizedPathCache = LazyCache{ (ancestorInclude: AncestorInclude) -> Path in + private var normalizedPathCache = LazyCache { (ancestorInclude: AncestorInclude) -> Path in return ancestorInclude.path.normalize() } @@ -214,7 +214,7 @@ final class MacroConfigFileLoader: Sendable { } } - let cyclePathStr = (ancestorIncludes.paths[startingCycleIndex...] + [pathToInclude]).map{ $0.descriptionWithBasename }.joined(separator: " -> ") + let cyclePathStr = (ancestorIncludes.paths[startingCycleIndex...] + [pathToInclude]).map { $0.descriptionWithBasename }.joined(separator: " -> ") // To maintain compatibility, we need to warn here. rdar://45532351 to consider putting this behind an option to expose it's level of diagnostic. handleDiagnostic(MacroConfigFileDiagnostic(kind: .cyclicIncludeFileDirective, level: .warning, message: "Skipping the inclusion of '\(pathToInclude.normalizedPath.basename)' from '\(parser.path.normalize().basename)' as it would create a cycle.\nCycle Path: \(cyclePathStr)\nCycle Details:\n\(cycleDetailsStr)", lineNumber: parser.lineNumber), parser: parser) @@ -255,7 +255,7 @@ final class MacroConfigFileLoader: Sendable { // If we have any conditions, we also construct a condition set. var conditionSet: MacroConditionSet? if !conditions.isEmpty { - conditionSet = MacroConditionSet(conditions: conditions.map{ MacroCondition(parameter: table.namespace.declareConditionParameter($0.0), valuePattern: $0.1) }) + conditionSet = MacroConditionSet(conditions: conditions.map { MacroCondition(parameter: table.namespace.declareConditionParameter($0.0), valuePattern: $0.1) }) } // Parse the value in a manner consistent with the macro definition. @@ -283,7 +283,7 @@ final class MacroConfigFileLoader: Sendable { let diagnostics = ConfigDiagnostics() let nestedConfigs = NestedConfigurations() let ancestorIncludes = AncestorIncludes() - path.map{ nestedConfigs.paths.append($0) } + path.map { nestedConfigs.paths.append($0) } if let path { ancestorIncludes.paths.append(AncestorInclude(path: path, fs: fs)) diff --git a/Sources/SWBCore/MacroEvaluationExtensions.swift b/Sources/SWBCore/MacroEvaluationExtensions.swift index db4c6c09..e484498b 100644 --- a/Sources/SWBCore/MacroEvaluationExtensions.swift +++ b/Sources/SWBCore/MacroEvaluationExtensions.swift @@ -14,8 +14,7 @@ public import SWBUtil public import SWBMacro import Foundation -public extension PropertyListItem -{ +public extension PropertyListItem { // TODO: In principle we could push 'preserveReferencesToSettings' down to the core evaluation methods, but that's a lot more work and not immediately needed. I only added it here because evaluating macros in a whole property list structure is nontrivial. // /// Method which returns a new property list with macros evaluated in all string values. By default, macros in dictionary keys are *not* evaluated, but they optionally can be. @@ -24,11 +23,9 @@ public extension PropertyListItem /// - parameter preserveReferencesToSettings: If not nil, then any macros in this set will not be evaluated but will be preserved for later potential evaluation. Macros in this set take precedence over the later `lookup` parameter. /// - parameter lookup: A block used to override evaluation of macros in the scope. /// - returns: The receiver with settings evaluated as directed. - func byEvaluatingMacros(withScope scope: MacroEvaluationScope, andDictionaryKeys: Bool = false, preserveReferencesToSettings: Set? = nil, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> PropertyListItem - { + func byEvaluatingMacros(withScope scope: MacroEvaluationScope, andDictionaryKeys: Bool = false, preserveReferencesToSettings: Set? = nil, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> PropertyListItem { // Helper function to evaluate macros in a string, - func stringByEvaluatingMacros(_ string: String, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> String - { + func stringByEvaluatingMacros(_ string: String, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> String { // If preserveSettingsReferences is not nil, then we create a lookup block to preserve settings references during evaluation. We define this block here rather than in the outermost call so we can track whether we actually preserved any references and skip the .replacingOccurrences(of:) call below if we don't need to do it. var anyReferencesWerePreserved = false let preservingLookup: ((MacroDeclaration) -> MacroExpression?)? @@ -41,8 +38,7 @@ public extension PropertyListItem } return lookup?(macro) ?? nil } - } - else { + } else { preservingLookup = lookup } let parsedString = scope.table.namespace.parseString(string) @@ -68,8 +64,7 @@ public extension PropertyListItem case .plDict(let value): // Expand macros in values of a dictionary, and if requested, in keys. var result = [String: PropertyListItem]() - for (key, item) in value - { + for (key, item) in value { let newKey = andDictionaryKeys ? stringByEvaluatingMacros(key, lookup: lookup) : key let newValue = item.byEvaluatingMacros(withScope: scope, andDictionaryKeys: andDictionaryKeys, preserveReferencesToSettings: preserveReferencesToSettings, lookup: lookup) result[newKey] = newValue @@ -122,9 +117,12 @@ extension MacroEvaluationScope { /// Get the value of `$(TARGET_BUILD_DIR)` unmodified by `$(TARGET_BUILD_SUBPATH)`. /// This is used to retrieve the "original" target build directory for test targets using a host bundle. public var unmodifiedTargetBuildDir: Path { - return evaluate(BuiltinMacros.TARGET_BUILD_DIR, lookup: { - return ($0 == BuiltinMacros.TARGET_BUILD_SUBPATH) ? self.table.namespace.parseLiteralString("") : nil - }) + return evaluate( + BuiltinMacros.TARGET_BUILD_DIR, + lookup: { + return ($0 == BuiltinMacros.TARGET_BUILD_SUBPATH) ? self.table.namespace.parseLiteralString("") : nil + } + ) } public func subscopeBindingArchAndTriple(arch: String) -> MacroEvaluationScope { diff --git a/Sources/SWBCore/OnDemandResources.swift b/Sources/SWBCore/OnDemandResources.swift index a3eef722..cfb008fc 100644 --- a/Sources/SWBCore/OnDemandResources.swift +++ b/Sources/SWBCore/OnDemandResources.swift @@ -28,7 +28,7 @@ public struct ODRAssetPackInfo: Sendable { var infoPlistContent: [String: any PropertyListItemConvertible] = [ "CFBundleIdentifier": identifier, "Tags": tags.sorted(), - ] + ] if let priority { infoPlistContent["Priority"] = priority @@ -58,12 +58,10 @@ public struct ODRAssetPackInfo: Sendable { let productOnDemandResourcesDirectory: Path if !assetPackFolderPath.isEmpty { productOnDemandResourcesDirectory = targetBuildDir.join(assetPackFolderPath) - } - else if scope.evaluate(BuiltinMacros.EMBED_ASSET_PACKS_IN_PRODUCT_BUNDLE) && !unlocalizedResourcesFolderPath.isEmpty { + } else if scope.evaluate(BuiltinMacros.EMBED_ASSET_PACKS_IN_PRODUCT_BUNDLE) && !unlocalizedResourcesFolderPath.isEmpty { let productResourcesDir = targetBuildDir.join(unlocalizedResourcesFolderPath) productOnDemandResourcesDirectory = productResourcesDir.join(onDemandResourcesSubdirectoryName) - } - else { + } else { productOnDemandResourcesDirectory = targetBuildDir.join(onDemandResourcesSubdirectoryName) } @@ -97,21 +95,25 @@ public struct AssetPackOutputSpecificationsPlist: PropertyListItemConvertible { public var entries: Set public init(assetPacks: [ODRAssetPackInfo]) { - entries = Set(assetPacks.map { - Entry(identifier: $0.identifier, tags: $0.tags, path: $0.path) - }) + entries = Set( + assetPacks.map { + Entry(identifier: $0.identifier, tags: $0.tags, path: $0.path) + } + ) } public var propertyListItem: PropertyListItem { - return .init(entries - .sorted { $0.identifier < $1.identifier } - .map { - [ - "bundle-id": .plString($0.identifier), - "tags": PropertyListItem($0.tags.sorted()), - "bundle-path": .plString($0.path.str), - ] - }) + return .init( + entries + .sorted { $0.identifier < $1.identifier } + .map { + [ + "bundle-id": .plString($0.identifier), + "tags": PropertyListItem($0.tags.sorted()), + "bundle-path": .plString($0.path.str), + ] + } + ) } } @@ -166,7 +168,7 @@ public struct AssetPackManifestPlist: Hashable, PropertyListItemConvertible { return PropertyListItem([ "strategy": "modtime", "hash": hash, - ]) + ]) } } } @@ -194,7 +196,7 @@ public struct AssetPackManifestPlist: Hashable, PropertyListItemConvertible { "primaryContentHash": primaryContentHash, "uncompressedSize": uncompressedSize, "URL": url, - ] + ] if let p = downloadPriority { result["downloadPriority"] = p @@ -212,7 +214,7 @@ public struct AssetPackManifestPlist: Hashable, PropertyListItemConvertible { public var propertyListItem: PropertyListItem { return PropertyListItem([ - "resources": resources.sorted { $0.assetPackBundleIdentifier < $1.assetPackBundleIdentifier }, - ]) + "resources": resources.sorted { $0.assetPackBundleIdentifier < $1.assetPackBundleIdentifier } + ]) } } diff --git a/Sources/SWBCore/OptimizationRemarks.swift b/Sources/SWBCore/OptimizationRemarks.swift index a07b53db..114ba287 100644 --- a/Sources/SWBCore/OptimizationRemarks.swift +++ b/Sources/SWBCore/OptimizationRemarks.swift @@ -13,298 +13,298 @@ public import SWBUtil #if canImport(LLVM_C) && !SKIP_LLVM_REMARKS -public import Foundation -private import LLVM_C - -/// The type of a remark. -public enum OptimizationRemarkType { - case unknown, passed, missed, analysis, analysisFPCommute, analysisAliasing, failure - - fileprivate init(_ type: LLVMRemarkType) { - switch type { - case LLVMRemarkTypePassed: - self = .passed - case LLVMRemarkTypeMissed: - self = .missed - case LLVMRemarkTypeAnalysis: - self = .analysis - case LLVMRemarkTypeAnalysisFPCommute: - self = .analysisFPCommute - case LLVMRemarkTypeAnalysisAliasing: - self = .analysisAliasing - case LLVMRemarkTypeFailure: - self = .failure - default: - self = .unknown - } - } -} - -/** - A remark entry used to give more insight into the compiler. - */ -public struct OptimizationRemark { - /// The debug location that the remark is referring to. - public struct DebugLoc { - /// The path to the source file. Can be relative or absolute. - public let sourceFilePath: String - /// The line in the source file. - public let sourceLine: UInt - /// The column in the source file. - public let sourceColumn: UInt - - fileprivate init(_ rawDebugLoc: LLVMRemarkDebugLocRef) { - sourceFilePath = String(LLVMRemarkDebugLocGetSourceFilePath(rawDebugLoc)) - sourceLine = UInt(LLVMRemarkDebugLocGetSourceLine(rawDebugLoc)) - sourceColumn = UInt(LLVMRemarkDebugLocGetSourceColumn(rawDebugLoc)) + public import Foundation + private import LLVM_C + + /// The type of a remark. + public enum OptimizationRemarkType { + case unknown, passed, missed, analysis, analysisFPCommute, analysisAliasing, failure + + fileprivate init(_ type: LLVMRemarkType) { + switch type { + case LLVMRemarkTypePassed: + self = .passed + case LLVMRemarkTypeMissed: + self = .missed + case LLVMRemarkTypeAnalysis: + self = .analysis + case LLVMRemarkTypeAnalysisFPCommute: + self = .analysisFPCommute + case LLVMRemarkTypeAnalysisAliasing: + self = .analysisAliasing + case LLVMRemarkTypeFailure: + self = .failure + default: + self = .unknown + } } } /** - An argument is an arbitrary key-value entry that adds more information to the remark. - - Depending on the key, the value can be interpreted as a string, integer, etc. - - The debug location associated with the argument may refer to a separate location in the code. + A remark entry used to give more insight into the compiler. */ - public struct Arg { - public let key: String - public let value: String - public let debugLoc: DebugLoc? + public struct OptimizationRemark { + /// The debug location that the remark is referring to. + public struct DebugLoc { + /// The path to the source file. Can be relative or absolute. + public let sourceFilePath: String + /// The line in the source file. + public let sourceLine: UInt + /// The column in the source file. + public let sourceColumn: UInt + + fileprivate init(_ rawDebugLoc: LLVMRemarkDebugLocRef) { + sourceFilePath = String(LLVMRemarkDebugLocGetSourceFilePath(rawDebugLoc)) + sourceLine = UInt(LLVMRemarkDebugLocGetSourceLine(rawDebugLoc)) + sourceColumn = UInt(LLVMRemarkDebugLocGetSourceColumn(rawDebugLoc)) + } + } - fileprivate init(_ rawArg: LLVMRemarkArgRef) { - key = String(LLVMRemarkArgGetKey(rawArg)) - value = String(LLVMRemarkArgGetValue(rawArg)) - if let rawDebugLoc = LLVMRemarkArgGetDebugLoc(rawArg) { - debugLoc = OptimizationRemark.DebugLoc(rawDebugLoc) - } else { - debugLoc = nil + /** + * An argument is an arbitrary key-value entry that adds more information to the remark. + * + * Depending on the key, the value can be interpreted as a string, integer, etc. + * + * The debug location associated with the argument may refer to a separate location in the code. + **/ + public struct Arg { + public let key: String + public let value: String + public let debugLoc: DebugLoc? + + fileprivate init(_ rawArg: LLVMRemarkArgRef) { + key = String(LLVMRemarkArgGetKey(rawArg)) + value = String(LLVMRemarkArgGetValue(rawArg)) + if let rawDebugLoc = LLVMRemarkArgGetDebugLoc(rawArg) { + debugLoc = OptimizationRemark.DebugLoc(rawDebugLoc) + } else { + debugLoc = nil + } } } - } - /// The type of the remark. - public let type: OptimizationRemarkType + /// The type of the remark. + public let type: OptimizationRemarkType - /// The name of the pass that emitted the remark. - public let passName: String + /// The name of the pass that emitted the remark. + public let passName: String - /// The per-pass unique identifier of the remark. For example, a missing definition found by the inliner will have a remark name: "NoDefinition". - public let remarkName: String + /// The per-pass unique identifier of the remark. For example, a missing definition found by the inliner will have a remark name: "NoDefinition". + public let remarkName: String - /// The name of the function where the remark occurred. For C++, the mangled name is used. For Swift, the demangled name is used. - public let functionName: String + /// The name of the function where the remark occurred. For C++, the mangled name is used. For Swift, the demangled name is used. + public let functionName: String - /// An optional debug source location that the remark is referring to. - public let debugLoc: DebugLoc? + /// An optional debug source location that the remark is referring to. + public let debugLoc: DebugLoc? - /// The hotness of the code referenced by the remark, determined by compiler heuristics like PGO. - public let hotness: UInt + /// The hotness of the code referenced by the remark, determined by compiler heuristics like PGO. + public let hotness: UInt - /// A list of key-value entries adding more information to the remark. - public let args: [Arg] + /// A list of key-value entries adding more information to the remark. + public let args: [Arg] - fileprivate init(_ rawEntry: LLVMRemarkEntryRef) { - type = OptimizationRemarkType(LLVMRemarkEntryGetType(rawEntry)) - passName = String(LLVMRemarkEntryGetPassName(rawEntry)) - remarkName = String(LLVMRemarkEntryGetRemarkName(rawEntry)) - functionName = String(LLVMRemarkEntryGetFunctionName(rawEntry)) - if let rawDebugLoc = LLVMRemarkEntryGetDebugLoc(rawEntry) { - debugLoc = DebugLoc(rawDebugLoc) - } else { - debugLoc = nil - } - hotness = UInt(LLVMRemarkEntryGetHotness(rawEntry)) - var mutableArgs: [Arg] = [] - let numArgs = LLVMRemarkEntryGetNumArgs(rawEntry) - if numArgs != 0 { - mutableArgs.reserveCapacity(Int(numArgs)) - var nextRawArg = LLVMRemarkEntryGetFirstArg(rawEntry)! - mutableArgs.append(Arg(nextRawArg)) - while let rawArg = LLVMRemarkEntryGetNextArg(nextRawArg, rawEntry) { - mutableArgs.append(Arg(rawArg)) - nextRawArg = rawArg + fileprivate init(_ rawEntry: LLVMRemarkEntryRef) { + type = OptimizationRemarkType(LLVMRemarkEntryGetType(rawEntry)) + passName = String(LLVMRemarkEntryGetPassName(rawEntry)) + remarkName = String(LLVMRemarkEntryGetRemarkName(rawEntry)) + functionName = String(LLVMRemarkEntryGetFunctionName(rawEntry)) + if let rawDebugLoc = LLVMRemarkEntryGetDebugLoc(rawEntry) { + debugLoc = DebugLoc(rawDebugLoc) + } else { + debugLoc = nil + } + hotness = UInt(LLVMRemarkEntryGetHotness(rawEntry)) + var mutableArgs: [Arg] = [] + let numArgs = LLVMRemarkEntryGetNumArgs(rawEntry) + if numArgs != 0 { + mutableArgs.reserveCapacity(Int(numArgs)) + var nextRawArg = LLVMRemarkEntryGetFirstArg(rawEntry)! + mutableArgs.append(Arg(nextRawArg)) + while let rawArg = LLVMRemarkEntryGetNextArg(nextRawArg, rawEntry) { + mutableArgs.append(Arg(rawArg)) + nextRawArg = rawArg + } + args = mutableArgs + } else { + args = [] } - args = mutableArgs - } else { - args = [] } } -} - -extension OptimizationRemarkType: CustomStringConvertible { - public var description: String { - switch self { - case .unknown: - return "Unknown" - case .passed: - return "Passed" - case .missed: - return "Missed" - case .analysis: - return "Analysis" - case .analysisFPCommute: - return "Analysis (FP commute)" - case .analysisAliasing: - return "Analysis (aliasing)" - case .failure: - return "Failure" + + extension OptimizationRemarkType: CustomStringConvertible { + public var description: String { + switch self { + case .unknown: + return "Unknown" + case .passed: + return "Passed" + case .missed: + return "Missed" + case .analysis: + return "Analysis" + case .analysisFPCommute: + return "Analysis (FP commute)" + case .analysisAliasing: + return "Analysis (aliasing)" + case .failure: + return "Failure" + } } } -} -extension OptimizationRemark.DebugLoc: CustomStringConvertible { - public var description: String { - return "\(sourceFilePath):\(sourceLine):\(sourceColumn)" + extension OptimizationRemark.DebugLoc: CustomStringConvertible { + public var description: String { + return "\(sourceFilePath):\(sourceLine):\(sourceColumn)" + } } -} -extension OptimizationRemark.Arg: CustomStringConvertible { - public var description: String { - var desc = "\(key) : \(value)" - if let loc = debugLoc { - desc += " at \(loc)" + extension OptimizationRemark.Arg: CustomStringConvertible { + public var description: String { + var desc = "\(key) : \(value)" + if let loc = debugLoc { + desc += " at \(loc)" + } + return desc } - return desc } -} -extension OptimizationRemark: CustomStringConvertible { - public var description: String { - var desc = "\(type) remark: \(passName):\(remarkName) in \(functionName)" - if let loc = debugLoc { - desc += " at \(loc)" - } - if hotness > 0 { - desc += " with hotness \(hotness)" - } - if !args.isEmpty { - desc += "\nArgs:\n" - for arg in args { - desc += "- \(arg)\n" + extension OptimizationRemark: CustomStringConvertible { + public var description: String { + var desc = "\(type) remark: \(passName):\(remarkName) in \(functionName)" + if let loc = debugLoc { + desc += " at \(loc)" + } + if hotness > 0 { + desc += " with hotness \(hotness)" } + if !args.isEmpty { + desc += "\nArgs:\n" + for arg in args { + desc += "- \(arg)\n" + } + } + return desc } - return desc - } - public var message: String { - return args.reduce(String(), { result, arg in result + arg.value }) + public var message: String { + return args.reduce(String(), { result, arg in result + arg.value }) + } } -} -extension String { - fileprivate init(_ rawString: LLVMRemarkStringRef) { - self = String(decoding: Data(bytesNoCopy: UnsafeMutableRawPointer(mutating: LLVMRemarkStringGetData(rawString)), count: Int(LLVMRemarkStringGetLen(rawString)), deallocator: Data.Deallocator.none), as: UTF8.self) + extension String { + fileprivate init(_ rawString: LLVMRemarkStringRef) { + self = String(decoding: Data(bytesNoCopy: UnsafeMutableRawPointer(mutating: LLVMRemarkStringGetData(rawString)), count: Int(LLVMRemarkStringGetLen(rawString)), deallocator: Data.Deallocator.none), as: UTF8.self) + } } -} -public enum OptimizationRemarkParserFormat { - case bitstream, yaml -} + public enum OptimizationRemarkParserFormat { + case bitstream, yaml + } -public enum OptimizationRemarkParsingError: LocalizedError { - case parsing(_ message: String) + public enum OptimizationRemarkParsingError: LocalizedError { + case parsing(_ message: String) - public var errorDescription: String? { - switch self { - case .parsing(let message): - return message - } - } -} - -fileprivate class OptimizationRemarkParser { - private var rawParser: LLVMRemarkParserRef - - init(data: UnsafeRawBufferPointer, format: OptimizationRemarkParserFormat = .bitstream) throws { - let rawParser: LLVMRemarkParserRef - switch format { - case .bitstream: - rawParser = LLVMRemarkParserCreateBitstream(data.baseAddress, UInt64(data.count)) - case .yaml: - rawParser = LLVMRemarkParserCreateYAML(data.baseAddress, UInt64(data.count)) + public var errorDescription: String? { + switch self { + case .parsing(let message): + return message + } } - self.rawParser = rawParser } - deinit { - LLVMRemarkParserDispose(rawParser) - } + fileprivate class OptimizationRemarkParser { + private var rawParser: LLVMRemarkParserRef - private func getError() -> String? { - if LLVMRemarkParserHasError(rawParser) == 0 { - return nil + init(data: UnsafeRawBufferPointer, format: OptimizationRemarkParserFormat = .bitstream) throws { + let rawParser: LLVMRemarkParserRef + switch format { + case .bitstream: + rawParser = LLVMRemarkParserCreateBitstream(data.baseAddress, UInt64(data.count)) + case .yaml: + rawParser = LLVMRemarkParserCreateYAML(data.baseAddress, UInt64(data.count)) + } + self.rawParser = rawParser } - if let rawMessage = LLVMRemarkParserGetErrorMessage(rawParser) { - return String(cString: rawMessage) + + deinit { + LLVMRemarkParserDispose(rawParser) } - return "no error message" - } - public func readRemark() throws -> OptimizationRemark? { - guard let entry = LLVMRemarkParserGetNext(rawParser) else { - // If it's an error, throw it. - if let error = getError() { - throw OptimizationRemarkParsingError.parsing(error) + private func getError() -> String? { + if LLVMRemarkParserHasError(rawParser) == 0 { + return nil } - // If there is no error and no remark, it's the end of the file. - return nil + if let rawMessage = LLVMRemarkParserGetErrorMessage(rawParser) { + return String(cString: rawMessage) + } + return "no error message" } - defer { LLVMRemarkEntryDispose(entry) } + public func readRemark() throws -> OptimizationRemark? { + guard let entry = LLVMRemarkParserGetNext(rawParser) else { + // If it's an error, throw it. + if let error = getError() { + throw OptimizationRemarkParsingError.parsing(error) + } + // If there is no error and no remark, it's the end of the file. + return nil + } - // We have a remark entry. - return OptimizationRemark(entry) - } + defer { LLVMRemarkEntryDispose(entry) } - public func forEach(_ body: (OptimizationRemark) -> Void) throws { - while let remark = try readRemark() { - body(remark) + // We have a remark entry. + return OptimizationRemark(entry) } - } - public func readRemarks() throws -> [OptimizationRemark] { - var result: [OptimizationRemark] = [] - try forEach { remark in - result.append(remark) + public func forEach(_ body: (OptimizationRemark) -> Void) throws { + while let remark = try readRemark() { + body(remark) + } + } + + public func readRemarks() throws -> [OptimizationRemark] { + var result: [OptimizationRemark] = [] + try forEach { remark in + result.append(remark) + } + return result } - return result - } -} - -extension Diagnostic { - public init?(_ remark: OptimizationRemark, workingDirectory: Path) { - guard let debugLoc = remark.debugLoc else { return nil } // skip if no debug location - let path = Path(debugLoc.sourceFilePath) - // Paths can be both absolute and relative to the working directory. - let absolutePath = path.makeAbsolute(relativeTo: workingDirectory) ?? path - self.init(behavior: .remark, location: .path(absolutePath, line: Int(debugLoc.sourceLine), column: Int(debugLoc.sourceColumn)), data: DiagnosticData(remark.message)) } - public static func remarks(forObjectPath objectFilePath: Path, fs: any FSProxy, workingDirectory: Path) throws -> [Diagnostic] { - let object = try MachO(data: fs.read(objectFilePath)) - var result: [Diagnostic] = [] - for slice in try object.slices() { - guard let bitstream = try slice.remarks() else { continue; } // skip if no remarks section - - try bitstream.withUnsafeBytes { (ptr: UnsafeRawBufferPointer) in - let parser = try OptimizationRemarkParser(data: ptr, format: .bitstream) - try parser.forEach { remark in - guard let diagnostic = Diagnostic(remark, workingDirectory: workingDirectory) else { return } - result.append(diagnostic) + extension Diagnostic { + public init?(_ remark: OptimizationRemark, workingDirectory: Path) { + guard let debugLoc = remark.debugLoc else { return nil } // skip if no debug location + let path = Path(debugLoc.sourceFilePath) + // Paths can be both absolute and relative to the working directory. + let absolutePath = path.makeAbsolute(relativeTo: workingDirectory) ?? path + self.init(behavior: .remark, location: .path(absolutePath, line: Int(debugLoc.sourceLine), column: Int(debugLoc.sourceColumn)), data: DiagnosticData(remark.message)) + } + + public static func remarks(forObjectPath objectFilePath: Path, fs: any FSProxy, workingDirectory: Path) throws -> [Diagnostic] { + let object = try MachO(data: fs.read(objectFilePath)) + var result: [Diagnostic] = [] + for slice in try object.slices() { + guard let bitstream = try slice.remarks() else { continue; } // skip if no remarks section + + try bitstream.withUnsafeBytes { (ptr: UnsafeRawBufferPointer) in + let parser = try OptimizationRemarkParser(data: ptr, format: .bitstream) + try parser.forEach { remark in + guard let diagnostic = Diagnostic(remark, workingDirectory: workingDirectory) else { return } + result.append(diagnostic) + } } } + return result } - return result } -} #else -extension Diagnostic { - public static func remarks(forObjectPath objectFilePath: Path, fs: any FSProxy, workingDirectory: Path) throws -> [Diagnostic] { - throw StubError.error("Swift Build was not compiled with support for LLVM optimization remarks") + extension Diagnostic { + public static func remarks(forObjectPath objectFilePath: Path, fs: any FSProxy, workingDirectory: Path) throws -> [Diagnostic] { + throw StubError.error("Swift Build was not compiled with support for LLVM optimization remarks") + } } -} #endif diff --git a/Sources/SWBCore/PlannedTask.swift b/Sources/SWBCore/PlannedTask.swift index 6ef5a5ac..1ff23d75 100644 --- a/Sources/SWBCore/PlannedTask.swift +++ b/Sources/SWBCore/PlannedTask.swift @@ -168,7 +168,7 @@ public final class ConstructedTask: PlannedTask, Sendable { self.additionalSignatureData = builder.additionalSignatureData self.inputs = builder.inputs self.outputs = builder.outputs - self.mustPrecede = builder.mustPrecede.map{ UnownedPlannedTask($0) } + self.mustPrecede = builder.mustPrecede.map { UnownedPlannedTask($0) } self.execTask = execTask self.alwaysExecuteTask = builder.alwaysExecuteTask self.priority = builder.priority @@ -189,7 +189,7 @@ public final class ConstructedTask: PlannedTask, Sendable { // MARK: Forwarding methods - public var type: any TaskTypeDescription { return execTask.type} + public var type: any TaskTypeDescription { return execTask.type } public var forTarget: ConfiguredTarget? { return execTask.forTarget } public var ruleInfo: [String] { return execTask.ruleInfo } public var commandLine: [ByteString] { return execTask.commandLine.map(\.asByteString) } @@ -198,7 +198,6 @@ public final class ConstructedTask: PlannedTask, Sendable { public var llbuildControlDisabled: Bool { return execTask.llbuildControlDisabled } } - public final class GateTask: PlannedTask, Sendable { /// A static task type description for gate tasks. private final class GateTaskTypeDescription: TaskTypeDescription { @@ -282,7 +281,7 @@ public final class GateTask: PlannedTask, Sendable { self.additionalSignatureData = builder.additionalSignatureData self.inputs = builder.inputs self.outputs = builder.outputs - self.mustPrecede = builder.mustPrecede.map{ UnownedPlannedTask($0) } + self.mustPrecede = builder.mustPrecede.map { UnownedPlannedTask($0) } // FIXME: It seems unfortunate that GateTask has to take an ExecutableTask, but I think that the BuildDescription expects that every PlannedTask will have an ExecutableTask. self.execTask = execTask } diff --git a/Sources/SWBCore/PlannedTaskAction.swift b/Sources/SWBCore/PlannedTaskAction.swift index d762996f..b2bb6d86 100644 --- a/Sources/SWBCore/PlannedTaskAction.swift +++ b/Sources/SWBCore/PlannedTaskAction.swift @@ -14,12 +14,10 @@ import Foundation public import SWBUtil public import SWBMacro - /// A task action encapsulates concrete work to be done for a task during a build. /// /// Task actions are primarily used to capture state and execution logic for in-process tasks. -public protocol PlannedTaskAction -{ +public protocol PlannedTaskAction { } public struct AuxiliaryFileTaskActionContext { @@ -103,8 +101,7 @@ public struct InfoPlistProcessorTaskActionContext: PlatformBuildContext, Seriali serializer.endAggregate() } - public init(from deserializer: any Deserializer) throws - { + public init(from deserializer: any Deserializer) throws { // Get the platform registry to use to look up the platform from the deserializer's delegate. guard let delegate = deserializer.delegate as? (any InfoPlistProcessorTaskActionContextDeserializerDelegate) else { throw DeserializerError.invalidDelegate("delegate must be a BuildDescriptionDeserializerDelegate") } @@ -118,8 +115,7 @@ public struct InfoPlistProcessorTaskActionContext: PlatformBuildContext, Seriali throw DeserializerError.deserializationFailed("Platform lookup failed for identifier: \(platformIdentifier)") } platform = p - } - else { + } else { platform = nil } @@ -144,7 +140,7 @@ public struct InfoPlistProcessorTaskActionContext: PlatformBuildContext, Seriali sdk = loadedSDK } else { sdk = nil - _ = deserializer.deserializeNil() // skip past SDK variant name + _ = deserializer.deserializeNil() // skip past SDK variant name sdkVariant = nil } @@ -251,11 +247,11 @@ public struct FileCopyTaskActionContext { return ( compileAndLink: partialTargetValues.map { partialTargetValue in - ( - compile: stubPartialCompilerCommandLine + ["-target", "\(partialTargetValue)-\(targetTripleOSVersion)\(llvmTargetTripleSuffix)", "-o", tempDir.join("\(partialTargetValue).o").str], - link: stubPartialLinkerCommandLine + ["-target", "\(partialTargetValue)-\(targetTripleOSVersion)\(llvmTargetTripleSuffix)", "-o", tempDir.join("\(partialTargetValue)").str, tempDir.join("\(partialTargetValue).o").str] - ) - }, + ( + compile: stubPartialCompilerCommandLine + ["-target", "\(partialTargetValue)-\(targetTripleOSVersion)\(llvmTargetTripleSuffix)", "-o", tempDir.join("\(partialTargetValue).o").str], + link: stubPartialLinkerCommandLine + ["-target", "\(partialTargetValue)-\(targetTripleOSVersion)\(llvmTargetTripleSuffix)", "-o", tempDir.join("\(partialTargetValue)").str, tempDir.join("\(partialTargetValue).o").str] + ) + }, lipo: stubPartialLipoCommandLine + ["-output", frameworkPath.join(isDeepBundle ? "Versions/A" : nil).join(frameworkPath.basenameWithoutSuffix).str] + partialTargetValues.map { partialTargetValue in tempDir.join("\(partialTargetValue)").str } ) } @@ -264,14 +260,21 @@ public struct FileCopyTaskActionContext { extension FileCopyTaskActionContext { public init(_ cbc: CommandBuildContext) { let compilerPath = cbc.producer.clangSpec.resolveExecutablePath(cbc, forLanguageOfFileType: cbc.producer.lookupFileType(languageDialect: .c)) - let linkerPath = cbc.producer.ldLinkerSpec.resolveExecutablePath(cbc.producer, cbc.producer.ldLinkerSpec.computeLinkerPath(cbc, usedCXX: false, lookup: { macro in - switch macro { - case BuiltinMacros.LINKER_DRIVER: - return cbc.scope.namespace.parseString("clang") - default: - return nil - } - })) + let linkerPath = cbc.producer.ldLinkerSpec.resolveExecutablePath( + cbc.producer, + cbc.producer.ldLinkerSpec.computeLinkerPath( + cbc, + usedCXX: false, + lookup: { macro in + switch macro { + case BuiltinMacros.LINKER_DRIVER: + return cbc.scope.namespace.parseString("clang") + default: + return nil + } + } + ) + ) let lipoPath = cbc.producer.lipoSpec.resolveExecutablePath(cbc.producer, Path(cbc.producer.lipoSpec.computeExecutablePath(cbc))) // If we couldn't find clang, skip the special stub binary handling. We may be using an Open Source toolchain which only has Swift. Also skip it for installLoc builds. @@ -283,12 +286,15 @@ extension FileCopyTaskActionContext { let scope = cbc.scope let partialTargetValues = scope.evaluate(BuiltinMacros.ARCHS).map { arch in - return scope.evaluate(scope.namespace.parseString("$(CURRENT_ARCH)-$(LLVM_TARGET_TRIPLE_VENDOR)"), lookup: { - if $0 == BuiltinMacros.CURRENT_ARCH { - return scope.namespace.parseLiteralString(arch) + return scope.evaluate( + scope.namespace.parseString("$(CURRENT_ARCH)-$(LLVM_TARGET_TRIPLE_VENDOR)"), + lookup: { + if $0 == BuiltinMacros.CURRENT_ARCH { + return scope.namespace.parseLiteralString(arch) + } + return nil } - return nil - }) + ) } let llvmTargetTripleOSVersion = scope.evaluate(BuiltinMacros.LLVM_TARGET_TRIPLE_OS_VERSION) let llvmTargetTripleSuffix = scope.evaluate(BuiltinMacros.LLVM_TARGET_TRIPLE_SUFFIX) @@ -307,13 +313,13 @@ extension FileCopyTaskActionContext { llvmTargetTripleSuffix: llvmTargetTripleSuffix, platformName: platformName, swiftPlatformTargetPrefix: swiftPlatformTargetPrefix, - isMacCatalyst: isMacCatalyst) + isMacCatalyst: isMacCatalyst + ) } } /// Protocol for objects that can create task actions for in-process tasks. -public protocol TaskActionCreationDelegate -{ +public protocol TaskActionCreationDelegate { func createAuxiliaryFileTaskAction(_ context: AuxiliaryFileTaskActionContext) -> any PlannedTaskAction func createBuildDependencyInfoTaskAction() -> any PlannedTaskAction func createBuildDirectoryTaskAction() -> any PlannedTaskAction diff --git a/Sources/SWBCore/PlatformRegistry.swift b/Sources/SWBCore/PlatformRegistry.swift index 16910753..29f16342 100644 --- a/Sources/SWBCore/PlatformRegistry.swift +++ b/Sources/SWBCore/PlatformRegistry.swift @@ -169,7 +169,7 @@ public final class Platform: Sendable { } var result = [String](values) - result.sort(by:) { (lhs,rhs) in + result.sort(by:) { (lhs, rhs) in // Compare as versions. let lhsNums = lhs.split(separator: ".").compactMap { Int($0) } let rhsNums = rhs.split(separator: ".").compactMap { Int($0) } @@ -369,14 +369,16 @@ public final class PlatformRegistry { /// The default deployment targets for all installed platforms. var defaultDeploymentTargets: [String: Version] { - Dictionary(uniqueKeysWithValues: Dictionary(grouping: platforms, by: { $0.defaultSDKVariant?.deploymentTargetSettingName }) - .sorted(by: \.key) - .compactMap { (key, value) in - guard let key, let value = Set(value.compactMap { $0.defaultSDKVariant?.defaultDeploymentTarget }).only else { - return nil + Dictionary( + uniqueKeysWithValues: Dictionary(grouping: platforms, by: { $0.defaultSDKVariant?.deploymentTargetSettingName }) + .sorted(by: \.key) + .compactMap { (key, value) in + guard let key, let value = Set(value.compactMap { $0.defaultSDKVariant?.defaultDeploymentTarget }).only else { + return nil + } + return (key, value) } - return (key, value) - }) + ) } @_spi(Testing) public init(delegate: any PlatformRegistryDelegate, searchPaths: [Path], hostOperatingSystem: OperatingSystem, fs: any FSProxy) async { @@ -594,7 +596,7 @@ public final class PlatformRegistry { } var executableSearchPaths: [Path] = [ - path.join("usr").join("bin"), + path.join("usr").join("bin") ] var sdkSearchPaths: [Path] = [ @@ -611,7 +613,7 @@ public final class PlatformRegistry { executableSearchPaths.append(contentsOf: [ path.join("usr").join("local").join("bin"), path.join("Developer").join("usr").join("bin"), - path.join("Developer").join("usr").join("local").join("bin") + path.join("Developer").join("usr").join("local").join("bin"), ]) // FIXME: Need to parse other fields. It would also be nice to diagnose unused keys like we do for Spec data (and we might want to just use the spec parser here). @@ -663,7 +665,6 @@ public final class PlatformRegistry { return false } - /// Load the extended platform info. @_spi(Testing) public func loadExtendedInfo(_ namespace: MacroNamespace) { precondition(!hasLoadedExtendedInfo) @@ -682,12 +683,19 @@ public final class PlatformRegistry { } } else if !deploymentTargets.isEmpty { platform._deploymentTargetMacro.initialize(to: nil) - delegate.emit(Diagnostic(behavior: .error, location: .unknown, data: DiagnosticData("Multiple deployment targets for platform '\(platform.name)'"), childDiagnostics: baseSDKs.sorted(by: \.canonicalName).compactMap { baseSDK in - guard let deploymentTargetSettingName = baseSDK.defaultVariant?.deploymentTargetSettingName else { - return nil - } - return Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("\(deploymentTargetSettingName), defined by SDK '\(baseSDK.canonicalName)'")) - })) + delegate.emit( + Diagnostic( + behavior: .error, + location: .unknown, + data: DiagnosticData("Multiple deployment targets for platform '\(platform.name)'"), + childDiagnostics: baseSDKs.sorted(by: \.canonicalName).compactMap { baseSDK in + guard let deploymentTargetSettingName = baseSDK.defaultVariant?.deploymentTargetSettingName else { + return nil + } + return Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("\(deploymentTargetSettingName), defined by SDK '\(baseSDK.canonicalName)'")) + } + ) + ) } else { platform._deploymentTargetMacro.initialize(to: nil) } @@ -733,12 +741,14 @@ extension Platform { /// Determines the default architecture to use for the index arena build, preferring `preferredArch` if valid on this platform or the first "Standard" architecture otherwise. @_spi(Testing) public func determineDefaultArchForIndexArena(preferredArch: String?, using core: Core) -> String? { if let preferredArch, - core.specRegistry.getSpec(preferredArch, domain: name) is ArchitectureSpec { + core.specRegistry.getSpec(preferredArch, domain: name) is ArchitectureSpec + { return preferredArch } if let standardArch = core.specRegistry.getSpec("Standard", domain: name) as? ArchitectureSpec, - let realArchs = standardArch.realArchs?.stringRep { + let realArchs = standardArch.realArchs?.stringRep + { return realArchs.split(" ").0 } diff --git a/Sources/SWBCore/ProjectModel/BuildConfiguration.swift b/Sources/SWBCore/ProjectModel/BuildConfiguration.swift index a7f85a78..d920e5b6 100644 --- a/Sources/SWBCore/ProjectModel/BuildConfiguration.swift +++ b/Sources/SWBCore/ProjectModel/BuildConfiguration.swift @@ -14,8 +14,7 @@ import SWBProtocol import SWBUtil public import SWBMacro -public final class BuildConfiguration: ProjectModelItem, Encodable, Sendable -{ +public final class BuildConfiguration: ProjectModelItem, Encodable, Sendable { public let name: String /// The (cached) parsed build settings table. @@ -40,7 +39,7 @@ public final class BuildConfiguration: ProjectModelItem, Encodable, Sendable public let impartedBuildProperties: ImpartedBuildProperties - private enum CodingKeys : String, CodingKey { + private enum CodingKeys: String, CodingKey { case name case namespace case baseConfigurationFileReferenceGUID @@ -57,7 +56,7 @@ public final class BuildConfiguration: ProjectModelItem, Encodable, Sendable case .string(let value): settings[setting.key] = .plString(value) case .stringList(let value): - settings[setting.key] = .plArray(value.map{ .plString($0) }) + settings[setting.key] = .plArray(value.map { .plString($0) }) } } return settings @@ -113,7 +112,7 @@ public final class BuildConfiguration: ProjectModelItem, Encodable, Sendable // We also construct a condition set if we have conditions. var conditionSet: MacroConditionSet? if let conditions { - conditionSet = MacroConditionSet(conditions: conditions.map{ MacroCondition(parameter: namespace.declareConditionParameter($0.0), valuePattern: $0.1) }) + conditionSet = MacroConditionSet(conditions: conditions.map { MacroCondition(parameter: namespace.declareConditionParameter($0.0), valuePattern: $0.1) }) } // Parse the value in a manner consistent with the macro definition. @@ -122,7 +121,7 @@ public final class BuildConfiguration: ProjectModelItem, Encodable, Sendable case .plString(let stringValue): expr = namespace.parseForMacro(macro, value: stringValue) case .plArray(let contents): - let asStringList = contents.map{ item -> String in + let asStringList = contents.map { item -> String in guard case let .plString(string) = item else { fatalError("unexpected build configuration data") } return string } diff --git a/Sources/SWBCore/ProjectModel/BuildFile.swift b/Sources/SWBCore/ProjectModel/BuildFile.swift index 68d3db75..49330e0d 100644 --- a/Sources/SWBCore/ProjectModel/BuildFile.swift +++ b/Sources/SWBCore/ProjectModel/BuildFile.swift @@ -15,8 +15,7 @@ public import SWBUtil public import SWBMacro /// Reference subclasses which can be represented by a build file must adopt the BuildFileRepresentable protocol. -public protocol BuildFileRepresentable: AnyObject -{ +public protocol BuildFileRepresentable: AnyObject { } public typealias HeaderVisibility = SWBProtocol.BuildFile.HeaderVisibility @@ -91,20 +90,19 @@ public final class BuildFile: ProjectModelItem { /// Whether to skip the "no rule to process file..." warning for this file. public let shouldWarnIfNoRuleToProcess: Bool - init(_ model: SWBProtocol.BuildFile, _ pifLoader: PIFLoader) - { + init(_ model: SWBProtocol.BuildFile, _ pifLoader: PIFLoader) { guid = model.guid codeSignOnCopy = model.codeSignOnCopy removeHeadersOnCopy = model.removeHeadersOnCopy shouldLinkWeakly = model.shouldLinkWeakly headerVisibility = model.headerVisibility - additionalArgs = model.additionalArgs.map{ pifLoader.userNamespace.parseStringList($0) } + additionalArgs = model.additionalArgs.map { pifLoader.userNamespace.parseStringList($0) } decompress = model.decompress migCodegenFiles = model.migCodegenFiles assetTags = model.assetTags intentsCodegenVisibility = model.intentsCodegenVisibility resourceRule = model.resourceRule - platformFilters = Set(model.platformFilters.map{ SWBCore.PlatformFilter($0, pifLoader) }) + platformFilters = Set(model.platformFilters.map { SWBCore.PlatformFilter($0, pifLoader) }) shouldWarnIfNoRuleToProcess = model.shouldWarnIfNoRuleToProcess switch model.buildableItemGUID { @@ -133,12 +131,14 @@ public final class BuildFile: ProjectModelItem { case .plString(let stringValue)?: self.additionalArgs = pifLoader.userNamespace.parseStringList(stringValue) case .plArray(let arrayValue)?: - self.additionalArgs = try pifLoader.userNamespace.parseStringList(arrayValue.map { (plItem) -> String in - guard case .plString(let string) = plItem else { - throw PIFParsingError.incorrectTypeInArray(keyName: PIFKey_BuildFile_additionalCompilerOptions, objectType: Self.self, expectedType: "String") - } - return string - }.joined(separator: " ")) + self.additionalArgs = try pifLoader.userNamespace.parseStringList( + arrayValue.map { (plItem) -> String in + guard case .plString(let string) = plItem else { + throw PIFParsingError.incorrectTypeInArray(keyName: PIFKey_BuildFile_additionalCompilerOptions, objectType: Self.self, expectedType: "String") + } + return string + }.joined(separator: " ") + ) case .none: self.additionalArgs = nil break @@ -162,13 +162,20 @@ public final class BuildFile: ProjectModelItem { if let value: ResourceRule = try Self.parseOptionalValueForKeyAsStringEnum(PIFKey_BuildFile_resourceRule, pifDict: pifDict) { resourceRule = value } else { - resourceRule = .process // default to `.process` for backwards compatibility + resourceRule = .process // default to `.process` for backwards compatibility } // Parse the platformFilters data. - platformFilters = try Set(Self.parseOptionalValueForKeyAsArrayOfProjectModelItems(PIFKey_platformFilters, pifDict: pifDict, pifLoader: pifLoader, construct: { - try PlatformFilter(fromDictionary: $0, withPIFLoader: pifLoader) - }) ?? []) + platformFilters = try Set( + Self.parseOptionalValueForKeyAsArrayOfProjectModelItems( + PIFKey_platformFilters, + pifDict: pifDict, + pifLoader: pifLoader, + construct: { + try PlatformFilter(fromDictionary: $0, withPIFLoader: pifLoader) + } + ) ?? [] + ) if let targetReferenceGUID = try Self.parseOptionalValueForKeyAsString(PIFKey_BuildFile_targetReference, pifDict: pifDict) { buildableItem = .targetProduct(guid: targetReferenceGUID) @@ -198,8 +205,7 @@ public final class BuildFile: ProjectModelItem { self.resourceRule = .process } - public var description: String - { + public var description: String { // It would be convenient to emit something to identify the reference here. return "\(type(of: self))<\(guid)>" } diff --git a/Sources/SWBCore/ProjectModel/BuildPhase.swift b/Sources/SWBCore/ProjectModel/BuildPhase.swift index c533ffd6..447919fe 100644 --- a/Sources/SWBCore/ProjectModel/BuildPhase.swift +++ b/Sources/SWBCore/ProjectModel/BuildPhase.swift @@ -16,9 +16,7 @@ public import SWBMacro // MARK: Build phase abstract class - -public class BuildPhase: ProjectModelItem, @unchecked Sendable -{ +public class BuildPhase: ProjectModelItem, @unchecked Sendable { /// The name of the build phase, typically for reporting issues. public var name: String { fatalError("abstract \(type(of: self)) build phase asked for its name") } @@ -78,29 +76,24 @@ public class BuildPhase: ProjectModelItem, @unchecked Sendable } } - public var description: String - { + public var description: String { return "\(type(of: self))<>" } } - // MARK: Build phase with build files abstract class - -public class BuildPhaseWithBuildFiles: BuildPhase, @unchecked Sendable -{ +public class BuildPhaseWithBuildFiles: BuildPhase, @unchecked Sendable { public let buildFiles: [BuildFile] - init(_ model: SWBProtocol.BuildPhaseWithBuildFiles, _ pifLoader: PIFLoader) - { - buildFiles = model.buildFiles.map{ BuildFile($0, pifLoader) } + init(_ model: SWBProtocol.BuildPhaseWithBuildFiles, _ pifLoader: PIFLoader) { + buildFiles = model.buildFiles.map { BuildFile($0, pifLoader) } super.init(model, pifLoader) } override init(fromDictionary pifDict: ProjectModelItemPIF, withPIFLoader pifLoader: PIFLoader) throws { // Build files are required. - self.buildFiles = try Self.parseValueForKeyAsArrayOfProjectModelItems(PIFKey_BuildPhase_buildFiles, pifDict: pifDict, pifLoader: pifLoader, construct: { try BuildFile(fromDictionary: $0, withPIFLoader: pifLoader) } ) + self.buildFiles = try Self.parseValueForKeyAsArrayOfProjectModelItems(PIFKey_BuildPhase_buildFiles, pifDict: pifDict, pifLoader: pifLoader, construct: { try BuildFile(fromDictionary: $0, withPIFLoader: pifLoader) }) try super.init(fromDictionary: pifDict, withPIFLoader: pifLoader) } @@ -117,8 +110,7 @@ public class BuildPhaseWithBuildFiles: BuildPhase, @unchecked Sendable return buildFiles.filter({ platformFilter.matches($0.platformFilters) }) } - override public var description: String - { + override public var description: String { return "\(type(of: self))<\(buildFiles.count) files>" } @@ -130,48 +122,33 @@ public class BuildPhaseWithBuildFiles: BuildPhase, @unchecked Sendable } } - // MARK: Sources build phase class - -public final class SourcesBuildPhase: BuildPhaseWithBuildFiles, @unchecked Sendable -{ +public final class SourcesBuildPhase: BuildPhaseWithBuildFiles, @unchecked Sendable { public override var name: String { return "Compile Sources" } } - // MARK: Frameworks build phase class - -public final class FrameworksBuildPhase: BuildPhaseWithBuildFiles, @unchecked Sendable -{ +public final class FrameworksBuildPhase: BuildPhaseWithBuildFiles, @unchecked Sendable { public override var name: String { return "Link Binary" } } - // MARK: Headers build phase class - -public final class HeadersBuildPhase: BuildPhaseWithBuildFiles, @unchecked Sendable -{ +public final class HeadersBuildPhase: BuildPhaseWithBuildFiles, @unchecked Sendable { public override var name: String { return "Copy Headers" } } - // MARK: Resources build phase class - -public final class ResourcesBuildPhase: BuildPhaseWithBuildFiles, @unchecked Sendable -{ +public final class ResourcesBuildPhase: BuildPhaseWithBuildFiles, @unchecked Sendable { public override var name: String { return "Copy Bundle Resources" } } - // MARK: Copy files build phase class - -public final class CopyFilesBuildPhase: BuildPhaseWithBuildFiles, @unchecked Sendable -{ +public final class CopyFilesBuildPhase: BuildPhaseWithBuildFiles, @unchecked Sendable { public override var name: String { return "Copy Files" } /// The destination subfolder to copy to. This will commonly be a location relative to `$(TARGET_BUILD_DIR)/$(WRAPPER_NAME)`, but can also be a special indicator such as `` or ``. @@ -181,8 +158,7 @@ public final class CopyFilesBuildPhase: BuildPhaseWithBuildFiles, @unchecked Sen /// If `true` then the contents of this phase will only be copied when `$(DEPLOYMENT_POSTPROCESSING)` is enabled. public let runOnlyForDeploymentPostprocessing: Bool - init(_ model: SWBProtocol.CopyFilesBuildPhase, _ pifLoader: PIFLoader) - { + init(_ model: SWBProtocol.CopyFilesBuildPhase, _ pifLoader: PIFLoader) { var destinationSubfolder = model.destinationSubfolder var destinationSubpath = model.destinationSubpath @@ -224,12 +200,9 @@ public final class CopyFilesBuildPhase: BuildPhaseWithBuildFiles, @unchecked Sen } } - // MARK: Shell script build phase class - -public final class ShellScriptBuildPhase: BuildPhase, @unchecked Sendable -{ +public final class ShellScriptBuildPhase: BuildPhase, @unchecked Sendable { public override var name: String { return _name.isEmpty ? "Run Script" : _name } /// The name of the shell script phase. @@ -279,17 +252,16 @@ public final class ShellScriptBuildPhase: BuildPhase, @unchecked Sendable /// Whether the script phase should also run for `installhdrs` and `installapi` builds, regardless of what `INSTALLHDRS_SCRIPT_PHASE` is set to. public let alwaysRunForInstallHdrs: Bool - init(_ model: SWBProtocol.ShellScriptBuildPhase, _ pifLoader: PIFLoader) - { + init(_ model: SWBProtocol.ShellScriptBuildPhase, _ pifLoader: PIFLoader) { _name = model.name // FIXME: This should be a string list, but this matches PBXBuild. Also, this shouldn't ever be a Path. shellPath = pifLoader.userNamespace.parseString(model.shellPath.str) scriptContents = model.scriptContents originalObjectID = model.originalObjectID - inputFilePaths = model.inputFilePaths.map{ pifLoader.userNamespace.parseString($0) } - inputFileListPaths = model.inputFileListPaths.map{ pifLoader.userNamespace.parseString($0) } - outputFilePaths = model.outputFilePaths.map{ pifLoader.userNamespace.parseString($0) } - outputFileListPaths = model.outputFileListPaths.map{ pifLoader.userNamespace.parseString($0) } + inputFilePaths = model.inputFilePaths.map { pifLoader.userNamespace.parseString($0) } + inputFileListPaths = model.inputFileListPaths.map { pifLoader.userNamespace.parseString($0) } + outputFilePaths = model.outputFilePaths.map { pifLoader.userNamespace.parseString($0) } + outputFileListPaths = model.outputFileListPaths.map { pifLoader.userNamespace.parseString($0) } emitEnvironment = model.emitEnvironment sandboxingOverride = model.sandboxingOverride runOnlyForDeploymentPostprocessing = model.runOnlyForDeploymentPostprocessing @@ -374,29 +346,20 @@ public final class ShellScriptBuildPhase: BuildPhase, @unchecked Sendable } } - // MARK: Rez build phase class - -public final class RezBuildPhase: BuildPhaseWithBuildFiles, @unchecked Sendable -{ +public final class RezBuildPhase: BuildPhaseWithBuildFiles, @unchecked Sendable { public override var name: String { return "Build Carbon Resources" } } - // MARK: AppleScript build phase class - -public final class AppleScriptBuildPhase: BuildPhaseWithBuildFiles, @unchecked Sendable -{ +public final class AppleScriptBuildPhase: BuildPhaseWithBuildFiles, @unchecked Sendable { public override var name: String { return "Compile AppleScript Files" } } - // MARK: Java archive build phase class - -public final class JavaArchiveBuildPhase: BuildPhaseWithBuildFiles, @unchecked Sendable -{ +public final class JavaArchiveBuildPhase: BuildPhaseWithBuildFiles, @unchecked Sendable { public override var name: String { return "Build Java Resources" } } diff --git a/Sources/SWBCore/ProjectModel/BuildRule.swift b/Sources/SWBCore/ProjectModel/BuildRule.swift index ce87fe12..677733b4 100644 --- a/Sources/SWBCore/ProjectModel/BuildRule.swift +++ b/Sources/SWBCore/ProjectModel/BuildRule.swift @@ -43,7 +43,6 @@ public final class BuildRule: ProjectModelItem { self.inputSpecifier = .fileType(identifier: identifier) } - switch model.actionSpecifier { case .compiler(let identifier): self.actionSpecifier = .compiler(identifier: identifier) @@ -53,9 +52,12 @@ public final class BuildRule: ProjectModelItem { inputs: inputInfo.map { pifLoader.userNamespace.parseString($0) }, inputFileLists: inputFileLists.map { pifLoader.userNamespace.parseString($0) }, outputs: outputInfo.map { - BuildRuleOutputInfo(path: pifLoader.userNamespace.parseString($0.path), additionalCompilerFlags: $0.additionalCompilerFlags.map { - pifLoader.userNamespace.parseStringList($0) - } ?? pifLoader.userNamespace.parseLiteralStringList([])) + BuildRuleOutputInfo( + path: pifLoader.userNamespace.parseString($0.path), + additionalCompilerFlags: $0.additionalCompilerFlags.map { + pifLoader.userNamespace.parseStringList($0) + } ?? pifLoader.userNamespace.parseLiteralStringList([]) + ) }, outputFileLists: outputFileLists.map { pifLoader.userNamespace.parseString($0) }, dependencyInfo: DependencyInfoFormat.fromPIF(dependencyInfo, pifLoader: pifLoader), @@ -99,7 +101,6 @@ public final class BuildRule: ProjectModelItem { let outputFileListPaths = (try Self.parseOptionalValueForKeyAsArrayOfStrings(PIFKey_BuildRule_outputFileListPaths, pifDict: pifDict) ?? []).map { return pifLoader.userNamespace.parseString($0) } - // The output file compiler flags are optional. let outputFilesCompilerFlags: [MacroStringListExpression]? if let data = pifDict[PIFKey_BuildRule_outputFilesCompilerFlags] { @@ -111,12 +112,14 @@ public final class BuildRule: ProjectModelItem { guard case let .plArray(value) = flagData else { throw PIFParsingError.incorrectType(keyName: PIFKey_BuildRule_outputFilesCompilerFlags, objectType: Self.self, expectedType: "Array", destinationType: nil) } - return try pifLoader.userNamespace.parseStringList(value.map { item -> String in - guard case let .plString(value) = item else { - throw PIFParsingError.incorrectType(keyName: PIFKey_BuildRule_outputFilesCompilerFlags, objectType: Self.self, expectedType: "String", destinationType: nil) + return try pifLoader.userNamespace.parseStringList( + value.map { item -> String in + guard case let .plString(value) = item else { + throw PIFParsingError.incorrectType(keyName: PIFKey_BuildRule_outputFilesCompilerFlags, objectType: Self.self, expectedType: "String", destinationType: nil) + } + return value } - return value - }) + ) } } else { outputFilesCompilerFlags = nil @@ -181,8 +184,7 @@ public final class BuildRule: ProjectModelItem { } } - public var description: String - { + public var description: String { return "\(type(of: self))<\(fileTypeIdentifier)->\(compilerSpecificationIdentifier)>" } } @@ -219,17 +221,18 @@ public enum BuildRuleActionSpecifier: Sendable { /// - outputFileLists: The list of xcfilelists that contains a list of additional output file paths /// - dependencyInfo: The dependency info. /// - runOncePerArchitecture: Run once per architecture/variant. - case shellScript(contents: String, - inputs: [MacroStringExpression], - inputFileLists: [MacroStringExpression], - outputs: [BuildRuleOutputInfo], - outputFileLists: [MacroStringExpression], - dependencyInfo: DependencyInfoFormat?, - runOncePerArchitecture: Bool) + case shellScript( + contents: String, + inputs: [MacroStringExpression], + inputFileLists: [MacroStringExpression], + outputs: [BuildRuleOutputInfo], + outputFileLists: [MacroStringExpression], + dependencyInfo: DependencyInfoFormat?, + runOncePerArchitecture: Bool + ) } // MARK: Build rule constant strings - -let BuildRule_FileTypeIsPatternIdentifier = "pattern.proxy" -let BuildRule_CompilerIsShellScriptIdentifier = "com.apple.compilers.proxy.script" +let BuildRule_FileTypeIsPatternIdentifier = "pattern.proxy" +let BuildRule_CompilerIsShellScriptIdentifier = "com.apple.compilers.proxy.script" diff --git a/Sources/SWBCore/ProjectModel/DependencyInfoFormat.swift b/Sources/SWBCore/ProjectModel/DependencyInfoFormat.swift index d7331fbc..3cbd871f 100644 --- a/Sources/SWBCore/ProjectModel/DependencyInfoFormat.swift +++ b/Sources/SWBCore/ProjectModel/DependencyInfoFormat.swift @@ -24,7 +24,6 @@ public enum DependencyInfoFormat: Sendable { case makefiles([MacroStringExpression]) } - extension DependencyInfoFormat { public static func fromPIF(_ dependencyInfo: SWBProtocol.DependencyInfo?, pifLoader: PIFLoader) -> DependencyInfoFormat? { guard let dependencyInfo else { @@ -37,7 +36,7 @@ extension DependencyInfoFormat { case .makefile(let path): return .makefile(pifLoader.userNamespace.parseString(path)) case .makefiles(let paths): - return .makefiles(paths.map{ pifLoader.userNamespace.parseString($0) }) + return .makefiles(paths.map { pifLoader.userNamespace.parseString($0) }) } } } diff --git a/Sources/SWBCore/ProjectModel/FilePathResolver.swift b/Sources/SWBCore/ProjectModel/FilePathResolver.swift index d78b65d0..5950a13c 100644 --- a/Sources/SWBCore/ProjectModel/FilePathResolver.swift +++ b/Sources/SWBCore/ProjectModel/FilePathResolver.swift @@ -15,8 +15,7 @@ public import SWBMacro import SWBProtocol /// A FilePathResolver is used to resolve the absolute paths for Reference objects. -public final class FilePathResolver: Sendable -{ +public final class FilePathResolver: Sendable { /// The MacroEvaluationScope used to expand source trees. private let scope: MacroEvaluationScope @@ -29,8 +28,7 @@ public final class FilePathResolver: Sendable /// Private table use to cache paths already evaluated for build settings in the MacroEvaluationScope. private let buildSettingCache = Cache() - public init(scope: MacroEvaluationScope, projectDir: Path? = nil) - { + public init(scope: MacroEvaluationScope, projectDir: Path? = nil) { self.scope = scope self.projectDir = projectDir ?? scope.evaluate(BuiltinMacros.PROJECT_DIR) @@ -42,11 +40,9 @@ public final class FilePathResolver: Sendable } /// Resolve and return the absolute path for a Reference. - public func resolveAbsolutePath(_ reference: Reference) -> Path - { + public func resolveAbsolutePath(_ reference: Reference) -> Path { // If this is a FileGroup, look it up in the cache. - if let fileGroup = reference as? FileGroup - { + if let fileGroup = reference as? FileGroup { return fileGroupCache.getOrInsert(fileGroup) { return computeAbsolutePath(fileGroup) } @@ -56,8 +52,7 @@ public final class FilePathResolver: Sendable } /// Computes the absolute path for a Reference and returns it. This method does no memoizing of the result, so resolveAbsolutePath() is the preferred client method. - private func computeAbsolutePath(_ reference: Reference) -> Path - { + private func computeAbsolutePath(_ reference: Reference) -> Path { // Evaluate the path for the reference. switch reference { @@ -91,8 +86,7 @@ public final class FilePathResolver: Sendable } /// Resolve and return the absolute path for a Reference's source tree. This method will always return an absolute path even if sourceTree is .Absolute, so that it can be prepended to a path even if that path should be absolute but is not. So the caller should use the result appropriately based on what it will be prepended to. - func resolveSourceTree(_ sourceTree: SourceTree, forReference reference: Reference) -> Path - { + func resolveSourceTree(_ sourceTree: SourceTree, forReference reference: Reference) -> Path { // Resolve the source tree. // Note: If the Reference's path is relative, then we want to always append it to something, so we always compute a value for the source tree here, defaulting to $(PROJECT_DIR) if no other value can be determined switch sourceTree @@ -124,8 +118,7 @@ public final class FilePathResolver: Sendable var pathForSetting = Path(pathString) // If the path for the setting is not absolute, then we append it to the value of $(PROJECT_DIR). - if !pathForSetting.isAbsolute - { + if !pathForSetting.isAbsolute { pathForSetting = projectDir.join(pathForSetting) } @@ -136,8 +129,7 @@ public final class FilePathResolver: Sendable } /// Resolve and return the path for a Reference's path property, evaluating any build settings in it if necessary. - func resolvePath(_ refPath: MacroStringExpression, forReference reference: Reference) -> Path - { + func resolvePath(_ refPath: MacroStringExpression, forReference reference: Reference) -> Path { return Path(scope.evaluate(refPath)) } diff --git a/Sources/SWBCore/ProjectModel/FileTextEncoding.swift b/Sources/SWBCore/ProjectModel/FileTextEncoding.swift index ee43d556..f9adbbce 100644 --- a/Sources/SWBCore/ProjectModel/FileTextEncoding.swift +++ b/Sources/SWBCore/ProjectModel/FileTextEncoding.swift @@ -20,17 +20,17 @@ public import class Foundation.NSNumber public import class Foundation.NSString #if canImport(Darwin) -import class CoreFoundation.CFString -import var CoreFoundation.kCFStringEncodingInvalidId -import func CoreFoundation.CFStringConvertEncodingToNSStringEncoding -import func CoreFoundation.CFStringConvertEncodingToIANACharSetName -import func CoreFoundation.CFStringConvertIANACharSetNameToEncoding -import func CoreFoundation.CFStringConvertNSStringEncodingToEncoding + import class CoreFoundation.CFString + import var CoreFoundation.kCFStringEncodingInvalidId + import func CoreFoundation.CFStringConvertEncodingToNSStringEncoding + import func CoreFoundation.CFStringConvertEncodingToIANACharSetName + import func CoreFoundation.CFStringConvertIANACharSetNameToEncoding + import func CoreFoundation.CFStringConvertNSStringEncodingToEncoding #endif #if canImport(Darwin) -import struct CoreFoundation.ObjCBool -public import struct Foundation.StringEncodingDetectionOptionsKey + import struct CoreFoundation.ObjCBool + public import struct Foundation.StringEncodingDetectionOptionsKey #endif // The naming convention in Foundation is rather unfortunate. @@ -39,11 +39,11 @@ public import struct Foundation.StringEncodingDetectionOptionsKey public extension FileTextEncoding { init?(stringEncoding: String.Encoding) { #if canImport(Darwin) - let cfencoding = CFStringConvertNSStringEncodingToEncoding(stringEncoding.rawValue) - if cfencoding != kCFStringEncodingInvalidId, let name = CFStringConvertEncodingToIANACharSetName(cfencoding).map(String.init) { - self.init(name) - return - } + let cfencoding = CFStringConvertNSStringEncodingToEncoding(stringEncoding.rawValue) + if cfencoding != kCFStringEncodingInvalidId, let name = CFStringConvertEncodingToIANACharSetName(cfencoding).map(String.init) { + self.init(name) + return + } #endif return nil } @@ -51,10 +51,10 @@ public extension FileTextEncoding { /// Convert the given encoding to an `NSStringEncoding`. var stringEncoding: String.Encoding? { #if canImport(Darwin) - let cfencoding = CFStringConvertIANACharSetNameToEncoding(rawValue.asCFString) - if cfencoding != kCFStringEncodingInvalidId { - return String.Encoding(rawValue: CFStringConvertEncodingToNSStringEncoding(cfencoding)) - } + let cfencoding = CFStringConvertIANACharSetNameToEncoding(rawValue.asCFString) + if cfencoding != kCFStringEncodingInvalidId { + return String.Encoding(rawValue: CFStringConvertEncodingToNSStringEncoding(cfencoding)) + } #endif return nil } @@ -75,13 +75,13 @@ public extension FileTextEncoding { } #if canImport(Darwin) - var convertedString: NSString? - var usedLossyConversion: ObjCBool = true - let stringEncoding = String.Encoding(rawValue: NSString.stringEncoding(for: Data(bytes), encodingOptions: [.allowLossyKey: NSNumber(value: false)], convertedString: &convertedString, usedLossyConversion: &usedLossyConversion)) - if let convertedString = convertedString as String?, let discoveredEncoding = FileTextEncoding(stringEncoding: stringEncoding), !usedLossyConversion.boolValue { - // Always detect ASCII as UTF-8, because we want to prefer Unicode encodings - return (convertedString, stringEncoding == .ascii ? FileTextEncoding.utf8 : discoveredEncoding) - } + var convertedString: NSString? + var usedLossyConversion: ObjCBool = true + let stringEncoding = String.Encoding(rawValue: NSString.stringEncoding(for: Data(bytes), encodingOptions: [.allowLossyKey: NSNumber(value: false)], convertedString: &convertedString, usedLossyConversion: &usedLossyConversion)) + if let convertedString = convertedString as String?, let discoveredEncoding = FileTextEncoding(stringEncoding: stringEncoding), !usedLossyConversion.boolValue { + // Always detect ASCII as UTF-8, because we want to prefer Unicode encodings + return (convertedString, stringEncoding == .ascii ? FileTextEncoding.utf8 : discoveredEncoding) + } #endif return nil @@ -89,7 +89,7 @@ public extension FileTextEncoding { } fileprivate extension String { - init?(_ bytes: C, encoding: FileTextEncoding) where C.Index : SignedInteger, C.Element == UInt8 { + init?(_ bytes: C, encoding: FileTextEncoding) where C.Index: SignedInteger, C.Element == UInt8 { switch encoding { case .utf8: self.init(decodingBytes: bytes, as: Unicode.UTF8.self) diff --git a/Sources/SWBCore/ProjectModel/PIFLoader.swift b/Sources/SWBCore/ProjectModel/PIFLoader.swift index dc167904..9f190408 100644 --- a/Sources/SWBCore/ProjectModel/PIFLoader.swift +++ b/Sources/SWBCore/ProjectModel/PIFLoader.swift @@ -117,7 +117,6 @@ struct PIFObjectReference: Hashable { return vers } - /// A top-level object which can be represented independently in the PIF format. /// /// These objects represent the granularity at which the PIF can be incrementally loaded or replaced. @@ -271,7 +270,7 @@ public final class PIFLoader { } // Return the ProjectModelItem for the requested GUID. If one cannot be returned, that's an internal error. - func lookupReference(for guid: String ) -> Reference? { + func lookupReference(for guid: String) -> Reference? { return knownReferences[guid] } @@ -295,7 +294,6 @@ public final class PIFLoader { } } - /// An incremental PIF loader. /// /// This is a PIF loader implementation which keeps track of available objects and dynamically negotiates the minimal set of objects which need to be transferred to load the complete PIF. @@ -306,18 +304,30 @@ public final class PIFLoader { // // FIXME: This API doesn't feel right, it seems bogus that both the incremental loader and the regular loader are both maintaining maps of objects keyed by the same thing. The separation currently is between the incremental loader, which maintains a set of objects across multiple load requests and can communicate back to the client (via `LoadingSession`) about what new objects are required, and the bare `PIFLoader` which is only responsible for loading a single, complete graph. public final class IncrementalPIFLoader { - @_spi(Testing) public static let loadsRequested = Statistic("IncrementalPIFLoader.loadsRequested", - "The total number of top-level objects requested.") - static let objectsRequested = Statistic("IncrementalPIFLoader.objectsRequested", - "The number of PIF objects which were requested.") - @_spi(Testing) public static let objectsLoaded = Statistic("IncrementalPIFLoader.objectsLoaded", - "The number of PIF objects which were loaded, in any form.") - static let objectsCachedInMemory = Statistic("IncrementalPIFLoader.objectsCachedInMemory", - "The number of PIF objects which hit the in-memory cache.") - static let objectsCachedOnDisk = Statistic("IncrementalPIFLoader.objectsCachedOnDisk", - "The number of PIF objects which hit the on-disk cache.") - @_spi(Testing) public static let objectsTransferred = Statistic("IncrementalPIFLoader.objectsTransferred", - "The number of PIF objects which were transferred.") + @_spi(Testing) public static let loadsRequested = Statistic( + "IncrementalPIFLoader.loadsRequested", + "The total number of top-level objects requested." + ) + static let objectsRequested = Statistic( + "IncrementalPIFLoader.objectsRequested", + "The number of PIF objects which were requested." + ) + @_spi(Testing) public static let objectsLoaded = Statistic( + "IncrementalPIFLoader.objectsLoaded", + "The number of PIF objects which were loaded, in any form." + ) + static let objectsCachedInMemory = Statistic( + "IncrementalPIFLoader.objectsCachedInMemory", + "The number of PIF objects which hit the in-memory cache." + ) + static let objectsCachedOnDisk = Statistic( + "IncrementalPIFLoader.objectsCachedOnDisk", + "The number of PIF objects which hit the on-disk cache." + ) + @_spi(Testing) public static let objectsTransferred = Statistic( + "IncrementalPIFLoader.objectsTransferred", + "The number of PIF objects which were transferred." + ) /// A session for incrementally loading an individual object. /// @@ -357,7 +367,7 @@ public final class IncrementalPIFLoader { /// The complete set of missing objects for the unloaded data. public var missingObjects: AnySequence<(signature: String, type: PIFObjectType)> { - return AnySequence(_missingObjects.map{ (signature: $0.signature, type: $0.type) }) + return AnySequence(_missingObjects.map { (signature: $0.signature, type: $0.type) }) } var _missingObjects: Set = [] @@ -382,7 +392,7 @@ public final class IncrementalPIFLoader { // Create the loader for this instance and load the object. let loader = PIFLoader(provider: ObjectProviderAdaptor(session: self), userNamespace: incrementalLoader.userNamespace) - let result = try MacroNamespace.withExpressionInterningEnabled{ try loader.loadReference(signature: workspaceSignature, type: Workspace.self) } + let result = try MacroNamespace.withExpressionInterningEnabled { try loader.loadReference(signature: workspaceSignature, type: Workspace.self) } // Merge back in all the loaded objects. for (key, value) in loader.loadedObjects { diff --git a/Sources/SWBCore/ProjectModel/PlatformFilter.swift b/Sources/SWBCore/ProjectModel/PlatformFilter.swift index 16d089ce..408683e1 100644 --- a/Sources/SWBCore/ProjectModel/PlatformFilter.swift +++ b/Sources/SWBCore/ProjectModel/PlatformFilter.swift @@ -51,13 +51,13 @@ public final class PlatformFilter: ProjectModelItem, Hashable, Codable { hasher.combine(environment) } - public static func ==(lhs: PlatformFilter, rhs: PlatformFilter) -> Bool { + public static func == (lhs: PlatformFilter, rhs: PlatformFilter) -> Bool { return lhs.platform == rhs.platform && lhs.environment == rhs.environment } } extension PlatformFilter: Comparable { - public static func <(lhs: PlatformFilter, rhs: PlatformFilter) -> Bool { + public static func < (lhs: PlatformFilter, rhs: PlatformFilter) -> Bool { return lhs.comparisonString < rhs.comparisonString } @@ -68,16 +68,18 @@ extension PlatformFilter: Comparable { extension PlatformFilter { static func fromBuildConditionParameterString(_ string: String) -> Set { - return Set(string.components(separatedBy: ";").compactMap { - let parameters = $0.components(separatedBy: "-") - switch parameters.count { - case 1: - return PlatformFilter(platform: parameters[0]) - case 2: - return PlatformFilter(platform: parameters[0], environment: parameters[1]) - default: - return nil + return Set( + string.components(separatedBy: ";").compactMap { + let parameters = $0.components(separatedBy: "-") + switch parameters.count { + case 1: + return PlatformFilter(platform: parameters[0]) + case 2: + return PlatformFilter(platform: parameters[0], environment: parameters[1]) + default: + return nil + } } - }) + ) } } diff --git a/Sources/SWBCore/ProjectModel/Project.swift b/Sources/SWBCore/ProjectModel/Project.swift index 67c4b47f..27e0bda7 100644 --- a/Sources/SWBCore/ProjectModel/Project.swift +++ b/Sources/SWBCore/ProjectModel/Project.swift @@ -13,14 +13,13 @@ import SWBProtocol public import SWBUtil -public final class Project: ProjectModelItem, PIFObject, Hashable, Encodable -{ +public final class Project: ProjectModelItem, PIFObject, Hashable, Encodable { static func referencedObjects(for data: EncodedPIFValue) throws -> [PIFObjectReference] { // Any errors here will be diagnosed in the loader. switch data { case .json(let data): guard case let .plArray(projects)? = data["targets"] else { return [] } - return projects.compactMap{ + return projects.compactMap { guard case let .plString(signature) = $0 else { return nil } return PIFObjectReference(signature: signature, type: .target) } @@ -31,7 +30,7 @@ public final class Project: ProjectModelItem, PIFObject, Hashable, Encodable // FIXME: This sucks, we are doing the protocol decode twice: Don't require duplicate binary PIF decode in incremental PIF transfer let deserializer = MsgPackDeserializer(data) let model: SWBProtocol.Project = try deserializer.deserialize() - return model.targetSignatures.map{ PIFObjectReference(signature: $0, type: .target) } + return model.targetSignatures.map { PIFObjectReference(signature: $0, type: .target) } } } @@ -72,7 +71,7 @@ public final class Project: ProjectModelItem, PIFObject, Hashable, Encodable public let appPreferencesBuildSettings: [String: PropertyListItem] public let isPackage: Bool - private enum CodingKeys : String, CodingKey { + private enum CodingKeys: String, CodingKey { case name case signature case guid @@ -95,9 +94,9 @@ public final class Project: ProjectModelItem, PIFObject, Hashable, Encodable self.isPackage = model.isPackage self.xcodeprojPath = model.xcodeprojPath self.sourceRoot = model.sourceRoot - self.targets = try model.targetSignatures.map{ try pifLoader.loadReference(signature: $0, type: Target.self) } + self.targets = try model.targetSignatures.map { try pifLoader.loadReference(signature: $0, type: Target.self) } self.groupTree = try Reference.create(model.groupTree, pifLoader, isRoot: true) as! FileGroup - self.buildConfigurations = model.buildConfigurations.map{ BuildConfiguration($0, pifLoader) } + self.buildConfigurations = model.buildConfigurations.map { BuildConfiguration($0, pifLoader) } self.defaultConfigurationName = model.defaultConfigurationName self.developmentRegion = model.developmentRegion self.classPrefix = model.classPrefix @@ -155,8 +154,7 @@ public final class Project: ProjectModelItem, PIFObject, Hashable, Encodable try validateTargets() } - public var description: String - { + public var description: String { return "\(type(of: self))<\(guid):\(xcodeprojPath.str):\(targets.count) targets>" } @@ -202,7 +200,7 @@ public final class Project: ProjectModelItem, PIFObject, Hashable, Encodable hasher.combine(ObjectIdentifier(self)) } - public static func ==(lhs: Project, rhs: Project) -> Bool { + public static func == (lhs: Project, rhs: Project) -> Bool { return ObjectIdentifier(lhs) == ObjectIdentifier(rhs) } diff --git a/Sources/SWBCore/ProjectModel/ProjectModelItem.swift b/Sources/SWBCore/ProjectModel/ProjectModelItem.swift index 11bcf70e..2d73a46b 100644 --- a/Sources/SWBCore/ProjectModel/ProjectModelItem.swift +++ b/Sources/SWBCore/ProjectModel/ProjectModelItem.swift @@ -14,16 +14,12 @@ public import Foundation import SWBProtocol public import SWBUtil - // MARK: Type definitions - public typealias ProjectModelItemPIF = [String: PropertyListItem] - // MARK: ProjectModelItem protocol - public protocol ProjectModelItem: AnyObject, CustomStringConvertible, Sendable { } @@ -69,11 +65,9 @@ extension PIFParsingError: LocalizedError { } } -extension ProjectModelItem -{ +extension ProjectModelItem { // Static methods for parsing a property list to load a PIF. - /// Parses the value for an optional key in a PIF dictionary as a String. /// - returns: A string value if the key is present, `nil` if it is absent. @_spi(Testing) public static func parseOptionalValueForKeyAsString(_ key: String, pifDict: ProjectModelItemPIF) throws -> String? { @@ -157,12 +151,13 @@ extension ProjectModelItem // FIXME // @available(*, deprecated, message: "This is a shim method, and should be removed. It's used for the binary PIF representation used only by Swift Build's unit tests; we should transition everything to the JSON based API we currently use in production, and to a unified API at that (rather than one based on actual Swift types, one based on raw property lists, and different APIs for the public API and for the tests).") static func parseOptionalValueForKeyAsByteString(_ key: String, pifDict: ProjectModelItemPIF) throws -> ByteString? { - return try (parseOptionalValueForKeyAsArrayOfPropertyListItems(key, pifDict: pifDict)?.map { (plItem) -> UInt8 in - guard case .plInt(let value) = plItem, let byte = UInt8(exactly: value) else { - throw PIFParsingError.incorrectTypeInArray(keyName: key, objectType: self, expectedType: "UInt8") - } - return byte - }).map { ByteString($0) } + return try + (parseOptionalValueForKeyAsArrayOfPropertyListItems(key, pifDict: pifDict)?.map { (plItem) -> UInt8 in + guard case .plInt(let value) = plItem, let byte = UInt8(exactly: value) else { + throw PIFParsingError.incorrectTypeInArray(keyName: key, objectType: self, expectedType: "UInt8") + } + return byte + }).map { ByteString($0) } } /// Parses the value for an optional key in a PIF dictionary as an Array of Strings. @@ -203,7 +198,7 @@ extension ProjectModelItem } /// Parses the value for a required key in a PIF dictionary as an object of the appropriate concrete subclass of ProjectModelItem, and recursively parses any arrays or dictionaries appropriately. - @_spi(Testing) public static func parseValueForKeyAsProjectModelItem( _ key: String, pifDict: ProjectModelItemPIF, pifLoader: PIFLoader, construct: (ProjectModelItemPIF) throws -> T) throws -> T { + @_spi(Testing) public static func parseValueForKeyAsProjectModelItem(_ key: String, pifDict: ProjectModelItemPIF, pifLoader: PIFLoader, construct: (ProjectModelItemPIF) throws -> T) throws -> T { return try require(key) { try parseOptionalValueForKeyAsProjectModelItem(key, pifDict: pifDict, pifLoader: pifLoader, construct: construct) } } @@ -231,17 +226,13 @@ extension ProjectModelItem } } - // MARK: Wrapper for an unowned ProjectModelItem - /// Wrapper for `ProjectModelItem`-conforming objects so they can be placed in arrays and dictionaries without creating string reference loops. -struct UnownedProjectModelItem: Hashable -{ +struct UnownedProjectModelItem: Hashable { unowned let value: any ProjectModelItem - init(_ value: any ProjectModelItem) - { + init(_ value: any ProjectModelItem) { self.value = value } @@ -252,12 +243,10 @@ struct UnownedProjectModelItem: Hashable } /// Two `UnownedProjectModelItem`s are equal if their wrapped items are the same object. -func ==(lhs: UnownedProjectModelItem, rhs: UnownedProjectModelItem) -> Bool -{ +func == (lhs: UnownedProjectModelItem, rhs: UnownedProjectModelItem) -> Bool { return ObjectIdentifier(lhs.value) == ObjectIdentifier(rhs.value) } - // MARK: PIF value constant definitions public protocol PIFStringEnum { diff --git a/Sources/SWBCore/ProjectModel/Reference.swift b/Sources/SWBCore/ProjectModel/Reference.swift index 7971142d..a6810daf 100644 --- a/Sources/SWBCore/ProjectModel/Reference.swift +++ b/Sources/SWBCore/ProjectModel/Reference.swift @@ -18,9 +18,7 @@ public typealias SourceTree = SWBProtocol.SourceTree // MARK: Reference abstract class - -public class Reference: ProjectModelItem, Hashable, Equatable, @unchecked Sendable -{ +public class Reference: ProjectModelItem, Hashable, Equatable, @unchecked Sendable { /// The global ID within the PIF for this reference. public let guid: String @@ -30,7 +28,7 @@ public class Reference: ProjectModelItem, Hashable, Equatable, @unchecked Sendab case let model as SWBProtocol.VariantGroup: return try VariantGroup(model, pifLoader, isRoot: isRoot) case let model as SWBProtocol.FileGroup: return try FileGroup(model, pifLoader, isRoot: isRoot) case let model as SWBProtocol.ProductReference: return try ProductReference(model, pifLoader) - // NOTE: This comes last because it is both a concrete and base type. + // NOTE: This comes last because it is both a concrete and base type. case let model as SWBProtocol.FileReference: return try FileReference(model, pifLoader, isRoot: isRoot) default: fatalError("unexpected model: \(model)") @@ -39,7 +37,7 @@ public class Reference: ProjectModelItem, Hashable, Equatable, @unchecked Sendab init(_ model: SWBProtocol.Reference, _ pifLoader: PIFLoader) throws { #if canImport(Darwin) - assert(type(of: model) !== SWBProtocol.Reference.self, "unexpected concrete type") + assert(type(of: model) !== SWBProtocol.Reference.self, "unexpected concrete type") #endif self.guid = model.guid @@ -55,8 +53,7 @@ public class Reference: ProjectModelItem, Hashable, Equatable, @unchecked Sendab try pifLoader.registerReference(self, for: guid) } - init(guid: String) - { + init(guid: String) { self.guid = guid } @@ -64,23 +61,19 @@ public class Reference: ProjectModelItem, Hashable, Equatable, @unchecked Sendab hasher.combine(ObjectIdentifier(self)) } - public static func ==(lhs: Reference, rhs: Reference) -> Bool - { + public static func == (lhs: Reference, rhs: Reference) -> Bool { return lhs === rhs } - public var description: String - { + public var description: String { return "\(type(of: self))<\(guid):ABSTRACT>" } } // MARK: GroupTreeReference abstract class - /// A GroupTreeReference is a reference which exists as part of a project's group tree. ProductReference objects are not part of the group tree. -public class GroupTreeReference: Reference, @unchecked Sendable -{ +public class GroupTreeReference: Reference, @unchecked Sendable { /// The parent of this reference in the group tree. The root group's parent pointer will always be nil, but all other references should have backpointers once the PIF is fully loaded. /// /// The parent relationships are currently only used by the FilePath resolver. @@ -105,7 +98,7 @@ public class GroupTreeReference: Reference, @unchecked Sendable init(_ model: SWBProtocol.GroupTreeReference, _ pifLoader: PIFLoader, isRoot: Bool) throws { #if canImport(Darwin) - assert(type(of: model) !== SWBProtocol.GroupTreeReference.self, "unexpected concrete reference") + assert(type(of: model) !== SWBProtocol.GroupTreeReference.self, "unexpected concrete reference") #endif self.sourceTree = model.sourceTree self.path = pifLoader.userNamespace.parseString(model.path) @@ -174,13 +167,10 @@ public class GroupTreeReference: Reference, @unchecked Sendable } } - // MARK: FileReference class - /// A FileReference is the most common kind of Reference, representing a single concrete file on disk. -public final class FileReference: GroupTreeReference, BuildFileRepresentable, @unchecked Sendable -{ +public final class FileReference: GroupTreeReference, BuildFileRepresentable, @unchecked Sendable { public let fileTypeIdentifier: String public let regionVariantName: String? /// The file text encoding for the reference. Will only be non-nil if the file type in Xcode indicated that it is a text type file. @@ -209,19 +199,15 @@ public final class FileReference: GroupTreeReference, BuildFileRepresentable, @u try super.init(fromDictionary: pifDict, withPIFLoader: pifLoader, isRoot: isRoot) } - override public var description: String - { + override public var description: String { return "\(type(of: self))<\(guid):\(fileTypeIdentifier):\(sourceTree.debugDescription):\(path.stringRep)>" } } - // MARK: VersionGroup class - /// A VersionGroup is a file reference which contains multiple versions of itself as children. It is possible (but unlikely, and possibly no longer actively promoted) that individual children may be represented by build files. -public final class VersionGroup: GroupTreeReference, BuildFileRepresentable, @unchecked Sendable -{ +public final class VersionGroup: GroupTreeReference, BuildFileRepresentable, @unchecked Sendable { /// The children of this version group, if any. public let children: [GroupTreeReference] @@ -244,13 +230,10 @@ public final class VersionGroup: GroupTreeReference, BuildFileRepresentable, @un } } - // MARK: FileGroup class - /// A FileGroup is an abstract grouper of references and groups. It is commonly used either for conceptual grouping for the benefit of users working with a project (and in this case has no meaningful impact on the build), or to represent a directory on disk which its children are relative to. In the future it might be able to contribute other meaningful content to the build by, for example, describing build settings which apply to all of its children. -public final class FileGroup: GroupTreeReference, @unchecked Sendable -{ +public final class FileGroup: GroupTreeReference, @unchecked Sendable { public let name: String public let children: [GroupTreeReference] @@ -274,19 +257,15 @@ public final class FileGroup: GroupTreeReference, @unchecked Sendable for child in children { child._parent.initialize(to: .init(value: self)) } } - override public var description: String - { + override public var description: String { return "\(type(of: self))<\(guid):\(name):\(sourceTree.debugDescription):\(path.stringRep)>" } } - // MARK: VariantGroup class - /// A VariantGroup represents multiple files which conceptually are different 'variants' (e.g., localized variants) of a single file. This includes more complex groupings, such as a .xib file and its associated localized .strings files. -public final class VariantGroup: GroupTreeReference, BuildFileRepresentable, @unchecked Sendable -{ +public final class VariantGroup: GroupTreeReference, BuildFileRepresentable, @unchecked Sendable { /// The name of this variant group - primarily for debugging purposes. public let name: String /// The children of this variant group, if any. @@ -313,19 +292,15 @@ public final class VariantGroup: GroupTreeReference, BuildFileRepresentable, @un for child in children { child._parent.initialize(to: .init(value: self)) } } - override public var description: String - { + override public var description: String { return "\(type(of: self))<\(guid):\(name)>" } } - // MARK: ProductReference class - /// A ProductReference represents the product of a StandardTarget object. It acts as a placeholder so that product can be represented in other targets, but it contains no meaningful information itself; rather, it vends information about itself by querying its target for that information. A ProductReference object is not part of a product's group tree and has no parent property; rather, it is owned by and has an unowned backpointer to its target. -public final class ProductReference: Reference, BuildFileRepresentable -{ +public final class ProductReference: Reference, BuildFileRepresentable { /// The name of this reference - primarily for debugging purposes. public let name: String diff --git a/Sources/SWBCore/ProjectModel/Target.swift b/Sources/SWBCore/ProjectModel/Target.swift index 1a79679a..5656f90f 100644 --- a/Sources/SWBCore/ProjectModel/Target.swift +++ b/Sources/SWBCore/ProjectModel/Target.swift @@ -58,7 +58,7 @@ public final class CustomTask: ProjectModelItem, Sendable { guard let keyAndValuePLs = environmentItemPL.arrayValue else { throw PIFParsingError.incorrectTypeInArray(keyName: PIFKey_CustomTask_environment, objectType: Self.self, expectedType: "Array") } - guard keyAndValuePLs.count == 2 else { + guard keyAndValuePLs.count == 2 else { throw StubError.error("Expected a key/value pair when deserializing environment") } guard let key = keyAndValuePLs[0].stringValue, let value = keyAndValuePLs[1].stringValue else { @@ -77,11 +77,9 @@ public final class CustomTask: ProjectModelItem, Sendable { } } - // MARK: Target Dependency Info -public final class TargetDependency: ProjectModelItem, Encodable, Sendable -{ +public final class TargetDependency: ProjectModelItem, Encodable, Sendable { /// The GUID that maps back to the target GUID. public let guid: String @@ -106,31 +104,36 @@ public final class TargetDependency: ProjectModelItem, Encodable, Sendable init(_ model: SWBProtocol.TargetDependency, _ pifLoader: PIFLoader) { self.guid = model.guid self.name = model.name - self.platformFilters = Set(model.platformFilters.map{ SWBCore.PlatformFilter($0, pifLoader) }) + self.platformFilters = Set(model.platformFilters.map { SWBCore.PlatformFilter($0, pifLoader) }) } init(fromDictionary pifDict: ProjectModelItemPIF, withPIFLoader pifLoader: PIFLoader) throws { // FIXME: Falling back to an empty string GUID does not seem correct, but this can happen for dependencies on targets from missing project references. Should we instead allow `guid` to be nil to represent this case? self.guid = try Self.parseOptionalValueForKeyAsString(PIFKey_guid, pifDict: pifDict) ?? "" self.name = try Self.parseOptionalValueForKeyAsString(PIFKey_name, pifDict: pifDict) - self.platformFilters = Set(try Self.parseOptionalValueForKeyAsArrayOfProjectModelItems(PIFKey_platformFilters, pifDict: pifDict, pifLoader: pifLoader, construct: { - try PlatformFilter(fromDictionary: $0, withPIFLoader: pifLoader) - }) ?? []) + self.platformFilters = Set( + try Self.parseOptionalValueForKeyAsArrayOfProjectModelItems( + PIFKey_platformFilters, + pifDict: pifDict, + pifLoader: pifLoader, + construct: { + try PlatformFilter(fromDictionary: $0, withPIFLoader: pifLoader) + } + ) ?? [] + ) } } - // MARK: Target abstract class /// The Target abstract class defines properties common to all types of targets. -public class Target: ProjectModelItem, PIFObject, Hashable, Encodable, @unchecked Sendable -{ +public class Target: ProjectModelItem, PIFObject, Hashable, Encodable, @unchecked Sendable { static func referencedObjects(for data: EncodedPIFValue) -> [PIFObjectReference] { return [] } /// Parses a ProjectModelItemPIF dictionary as an object of the appropriate subclass of Target. - static func construct(from data: EncodedPIFValue, signature: PIFObject.Signature, loader: PIFLoader ) throws -> Self { + static func construct(from data: EncodedPIFValue, signature: PIFObject.Signature, loader: PIFLoader) throws -> Self { switch data { case .json(let data): return try construct(from: data, signature: signature, loader: loader) @@ -169,7 +172,7 @@ public class Target: ProjectModelItem, PIFObject, Hashable, Encodable, @unchecke public let guid: String public let name: String - public var type: TargetType { preconditionFailure( "\(Swift.type(of: self)) should never be asked directly for its type" ) } + public var type: TargetType { preconditionFailure("\(Swift.type(of: self)) should never be asked directly for its type") } public let buildConfigurations: [BuildConfiguration] public let hasImpartedBuildProperties: Bool @@ -201,9 +204,9 @@ public class Target: ProjectModelItem, PIFObject, Hashable, Encodable, @unchecke self.signature = signature self.guid = model.guid self.name = model.name - self.buildConfigurations = model.buildConfigurations.map{ BuildConfiguration($0, pifLoader) } + self.buildConfigurations = model.buildConfigurations.map { BuildConfiguration($0, pifLoader) } self.customTasks = model.customTasks.map { CustomTask($0, pifLoader) } - self.dependencies = model.dependencies.map{ TargetDependency($0, pifLoader) } + self.dependencies = model.dependencies.map { TargetDependency($0, pifLoader) } self.dynamicTargetVariantGuid = model.dynamicTargetVariantGuid self.approvedByUser = model.approvedByUser self.hasImpartedBuildProperties = buildConfigurations.filter { !$0.impartedBuildProperties.isEmpty }.isEmpty @@ -233,9 +236,14 @@ public class Target: ProjectModelItem, PIFObject, Hashable, Encodable, @unchecke // The list of dependencies is required. // // The target dependencies are resolved lazily (see `TargetDependencyResolver`) since they may cross target (and even project) boundaries. - dependencies = try Self.parseValueForKeyAsArrayOfProjectModelItems(PIFKey_Target_dependencies, pifDict: pifDict, pifLoader: pifLoader, construct: { - return try TargetDependency(fromDictionary: $0, withPIFLoader: pifLoader) - }) + dependencies = try Self.parseValueForKeyAsArrayOfProjectModelItems( + PIFKey_Target_dependencies, + pifDict: pifDict, + pifLoader: pifLoader, + construct: { + return try TargetDependency(fromDictionary: $0, withPIFLoader: pifLoader) + } + ) dynamicTargetVariantGuid = try Self.parseOptionalValueForKeyAsString(PIFKey_Target_dynamicTargetVariantGuid, pifDict: pifDict) approvedByUser = try Self.parseValueForKeyAsBool(PIFKey_Target_approvedByUser, pifDict: pifDict, defaultValue: true) @@ -245,8 +253,7 @@ public class Target: ProjectModelItem, PIFObject, Hashable, Encodable, @unchecke } /// Parses a ProjectModelItemPIF dictionary as an object of the appropriate subclass of Target. - static func construct(from data: ProjectModelItemPIF, signature: PIFObject.Signature, loader: PIFLoader ) throws -> Self - { + static func construct(from data: ProjectModelItemPIF, signature: PIFObject.Signature, loader: PIFLoader) throws -> Self { // Workaround inability to express protocol completely. func autocast(_ some: Target) -> T { return some as! T @@ -272,8 +279,7 @@ public class Target: ProjectModelItem, PIFObject, Hashable, Encodable, @unchecke } } - public var description: String - { + public var description: String { return "\(Swift.type(of: self))<\(guid):\(name)>" } @@ -312,17 +318,15 @@ public class Target: ProjectModelItem, PIFObject, Hashable, Encodable, @unchecke hasher.combine(ObjectIdentifier(self)) } - public static func ==(lhs: Target, rhs: Target) -> Bool { + public static func == (lhs: Target, rhs: Target) -> Bool { return lhs === rhs } } // MARK: BuildPhaseTarget class - /// A BuildPhaseTarget is a kind of target which can contain build phases. -public class BuildPhaseTarget: Target, @unchecked Sendable -{ +public class BuildPhaseTarget: Target, @unchecked Sendable { /// List of build phases in the target. public let buildPhases: [BuildPhase] @@ -336,7 +340,7 @@ public class BuildPhaseTarget: Target, @unchecked Sendable public let resourcesBuildPhase: ResourcesBuildPhase? init(_ model: SWBProtocol.BuildPhaseTarget, _ pifLoader: PIFLoader, signature: String) { - buildPhases = model.buildPhases.map{ BuildPhase.create($0, pifLoader) } + buildPhases = model.buildPhases.map { BuildPhase.create($0, pifLoader) } // Populate the convenience build phase properties. var warnings: [String] = [] var sourcesBuildPhase: SourcesBuildPhase? = nil @@ -345,28 +349,28 @@ public class BuildPhaseTarget: Target, @unchecked Sendable var resourcesBuildPhase: ResourcesBuildPhase? = nil for buildPhase in self.buildPhases { switch buildPhase { - case let sourcesPhase as SourcesBuildPhase: - if sourcesBuildPhase != nil { - warnings.append("target has multiple \(buildPhase.name) build phases, which may cause it to build incorrectly - all but one should be deleted") - } - sourcesBuildPhase = sourcesPhase - case let frameworksPhase as FrameworksBuildPhase: - if frameworksBuildPhase != nil { - warnings.append("target has multiple \(buildPhase.name) build phases, which may cause it to build incorrectly - all but one should be deleted") - } - frameworksBuildPhase = frameworksPhase - case let headersPhase as HeadersBuildPhase: - if headersBuildPhase != nil { - warnings.append("target has multiple \(buildPhase.name) build phases, which may cause it to build incorrectly - all but one should be deleted") - } - headersBuildPhase = headersPhase - case let resourcesPhase as ResourcesBuildPhase: - if resourcesBuildPhase != nil { - warnings.append("target has multiple \(buildPhase.name) build phases, which may cause it to build incorrectly - all but one should be deleted") - } - resourcesBuildPhase = resourcesPhase - default: - break + case let sourcesPhase as SourcesBuildPhase: + if sourcesBuildPhase != nil { + warnings.append("target has multiple \(buildPhase.name) build phases, which may cause it to build incorrectly - all but one should be deleted") + } + sourcesBuildPhase = sourcesPhase + case let frameworksPhase as FrameworksBuildPhase: + if frameworksBuildPhase != nil { + warnings.append("target has multiple \(buildPhase.name) build phases, which may cause it to build incorrectly - all but one should be deleted") + } + frameworksBuildPhase = frameworksPhase + case let headersPhase as HeadersBuildPhase: + if headersBuildPhase != nil { + warnings.append("target has multiple \(buildPhase.name) build phases, which may cause it to build incorrectly - all but one should be deleted") + } + headersBuildPhase = headersPhase + case let resourcesPhase as ResourcesBuildPhase: + if resourcesBuildPhase != nil { + warnings.append("target has multiple \(buildPhase.name) build phases, which may cause it to build incorrectly - all but one should be deleted") + } + resourcesBuildPhase = resourcesPhase + default: + break } } self.sourcesBuildPhase = sourcesBuildPhase @@ -376,8 +380,7 @@ public class BuildPhaseTarget: Target, @unchecked Sendable super.init(model, pifLoader, signature: signature, errors: [], warnings: warnings) } - @_spi(Testing) public override init(fromDictionary pifDict: ProjectModelItemPIF, signature: String, withPIFLoader pifLoader: PIFLoader, errors: [String] = [], warnings: [String] = []) throws - { + @_spi(Testing) public override init(fromDictionary pifDict: ProjectModelItemPIF, signature: String, withPIFLoader pifLoader: PIFLoader, errors: [String] = [], warnings: [String] = []) throws { // The list of build phases is required. buildPhases = try Self.parseValueForKeyAsArrayOfPropertyListItems(PIFKey_Target_buildPhases, pifDict: pifDict).map { plItem in guard case .plDict(let dictValue) = plItem else { @@ -400,32 +403,31 @@ public class BuildPhaseTarget: Target, @unchecked Sendable var frameworksBuildPhase: FrameworksBuildPhase? = nil var headersBuildPhase: HeadersBuildPhase? = nil var resourcesBuildPhase: ResourcesBuildPhase? = nil - for buildPhase in self.buildPhases - { + for buildPhase in self.buildPhases { switch buildPhase { - case let sourcesPhase as SourcesBuildPhase: - if sourcesBuildPhase != nil { - newWarnings.append("target has multiple \(buildPhase.name) build phases, which may cause it to build incorrectly - all but one should be deleted") - } - sourcesBuildPhase = sourcesPhase - case let frameworksPhase as FrameworksBuildPhase: - if frameworksBuildPhase != nil { - newWarnings.append("target has multiple \(buildPhase.name) build phases, which may cause it to build incorrectly - all but one should be deleted") - } - frameworksBuildPhase = frameworksPhase - case let headersPhase as HeadersBuildPhase: - if headersBuildPhase != nil { - newWarnings.append("target has multiple \(buildPhase.name) build phases, which may cause it to build incorrectly - all but one should be deleted") - } - headersBuildPhase = headersPhase - case let resourcesPhase as ResourcesBuildPhase: - if resourcesBuildPhase != nil { - newWarnings.append("target has multiple \(buildPhase.name) build phases, which may cause it to build incorrectly - all but one should be deleted") - } - resourcesBuildPhase = resourcesPhase - default: - break + case let sourcesPhase as SourcesBuildPhase: + if sourcesBuildPhase != nil { + newWarnings.append("target has multiple \(buildPhase.name) build phases, which may cause it to build incorrectly - all but one should be deleted") + } + sourcesBuildPhase = sourcesPhase + case let frameworksPhase as FrameworksBuildPhase: + if frameworksBuildPhase != nil { + newWarnings.append("target has multiple \(buildPhase.name) build phases, which may cause it to build incorrectly - all but one should be deleted") + } + frameworksBuildPhase = frameworksPhase + case let headersPhase as HeadersBuildPhase: + if headersBuildPhase != nil { + newWarnings.append("target has multiple \(buildPhase.name) build phases, which may cause it to build incorrectly - all but one should be deleted") + } + headersBuildPhase = headersPhase + case let resourcesPhase as ResourcesBuildPhase: + if resourcesBuildPhase != nil { + newWarnings.append("target has multiple \(buildPhase.name) build phases, which may cause it to build incorrectly - all but one should be deleted") + } + resourcesBuildPhase = resourcesPhase + default: + break } } self.sourcesBuildPhase = sourcesBuildPhase @@ -437,16 +439,13 @@ public class BuildPhaseTarget: Target, @unchecked Sendable } } - // MARK: StandardTarget class - /// Source data needed to compute provisioning task inputs for a target+configuration pair. public typealias ProvisioningSourceData = SWBProtocol.ProvisioningSourceData /// A StandardTarget is the most common type of target: A target which has build phases describing its input files, and which generates a product. -public final class StandardTarget: BuildPhaseTarget, @unchecked Sendable -{ +public final class StandardTarget: BuildPhaseTarget, @unchecked Sendable { public enum SourceCodeLanguage: CustomStringConvertible, Sendable { case undefined case swift @@ -511,7 +510,7 @@ public final class StandardTarget: BuildPhaseTarget, @unchecked Sendable } init(_ model: SWBProtocol.StandardTarget, _ pifLoader: PIFLoader, signature: String) throws { - buildRules = model.buildRules.map{ BuildRule($0, pifLoader) } + buildRules = model.buildRules.map { BuildRule($0, pifLoader) } productTypeIdentifier = model.productTypeIdentifier productReference = try ProductReference(model.productReference, pifLoader) isPackageTarget = model.isPackageTarget @@ -528,8 +527,7 @@ public final class StandardTarget: BuildPhaseTarget, @unchecked Sendable _provisioningSourceData.initialize(to: provisioningSourceData) } - @_spi(Testing) public override init(fromDictionary pifDict: ProjectModelItemPIF, signature: String, withPIFLoader pifLoader: PIFLoader, errors: [String] = [], warnings: [String] = []) throws - { + @_spi(Testing) public override init(fromDictionary pifDict: ProjectModelItemPIF, signature: String, withPIFLoader pifLoader: PIFLoader, errors: [String] = [], warnings: [String] = []) throws { // The product type identifier is required. productTypeIdentifier = try Self.parseValueForKeyAsString(PIFKey_Target_productTypeIdentifier, pifDict: pifDict) @@ -580,8 +578,7 @@ public final class StandardTarget: BuildPhaseTarget, @unchecked Sendable foundConfigurationNames.insert(sourceData.configurationName) } // If any configurations didn't have provisioning source data objects, then create default ones for those configurations. - for configurationName in configurationNames - { + for configurationName in configurationNames { let sourceData = ProvisioningSourceData(configurationName: configurationName, provisioningStyle: .automatic, bundleIdentifierFromInfoPlist: "") provisioningSourceData.append(sourceData) } @@ -605,28 +602,21 @@ public final class StandardTarget: BuildPhaseTarget, @unchecked Sendable } } - // MARK: AggregateTarget class - /// An AggregateTarget is a special kind of target primarily intended to group together other targets it depends on, and which does not have a defined product. However, it may also have build phases, which will be run after all of its dependencies have finished building. -public final class AggregateTarget: BuildPhaseTarget, @unchecked Sendable -{ +public final class AggregateTarget: BuildPhaseTarget, @unchecked Sendable { public override var type: TargetType { return TargetType.aggregate } } - - // MARK: PackageProductTarget class - /// A PackageProductTarget is a custom target used by the Swift package manager to encapsulate the semantics of products. /// /// This target is currently only expected to have target dependencies and an optional frameworks build phase, which should reference other (static library) targets package product targets. /// /// The behavior of this target is "as if" the dependencies of the package pass through to downstream things which link the target. -public final class PackageProductTarget: Target, @unchecked Sendable -{ +public final class PackageProductTarget: Target, @unchecked Sendable { public override var type: TargetType { return TargetType.packageProduct } /// The frameworks build phase which encodes the link dependencies. @@ -636,7 +626,7 @@ public final class PackageProductTarget: Target, @unchecked Sendable public let productReference: ProductReference init(_ model: SWBProtocol.PackageProductTarget, _ pifLoader: PIFLoader, signature: String) { - self.frameworksBuildPhase = model.frameworksBuildPhase.map{ BuildPhase.create($0, pifLoader) as! FrameworksBuildPhase } + self.frameworksBuildPhase = model.frameworksBuildPhase.map { BuildPhase.create($0, pifLoader) as! FrameworksBuildPhase } self.productReference = ProductReference(guid: "\(model.guid):ProductReference", name: model.name) super.init(model, pifLoader, signature: signature) self.productReference.target = self @@ -651,13 +641,10 @@ public final class PackageProductTarget: Target, @unchecked Sendable } } - // MARK: ExternalTarget class - /// An ExternalTarget represents the use of an external build system (most commonly, but not limited to, make). It is very different from other kinds of targets in that it has no build phases, and does have a defined product. -public final class ExternalTarget: Target, @unchecked Sendable -{ +public final class ExternalTarget: Target, @unchecked Sendable { public override var type: TargetType { return TargetType.external } public let toolPath: MacroStringExpression public let arguments: MacroStringListExpression @@ -672,8 +659,7 @@ public final class ExternalTarget: Target, @unchecked Sendable super.init(model, pifLoader, signature: signature) } - @_spi(Testing) public override init(fromDictionary pifDict: ProjectModelItemPIF, signature: String, withPIFLoader pifLoader: PIFLoader, errors: [String] = [], warnings: [String] = []) throws - { + @_spi(Testing) public override init(fromDictionary pifDict: ProjectModelItemPIF, signature: String, withPIFLoader pifLoader: PIFLoader, errors: [String] = [], warnings: [String] = []) throws { // The tool path is required. toolPath = try pifLoader.userNamespace.parseString(Self.parseValueForKeyAsString(PIFKey_ExternalTarget_toolPath, pifDict: pifDict)) diff --git a/Sources/SWBCore/ProjectModel/Workspace.swift b/Sources/SWBCore/ProjectModel/Workspace.swift index f2c68c93..24d6bc7a 100644 --- a/Sources/SWBCore/ProjectModel/Workspace.swift +++ b/Sources/SWBCore/ProjectModel/Workspace.swift @@ -55,14 +55,13 @@ extension WorkspaceErrors: CustomStringConvertible { } } -public final class Workspace: ProjectModelItem, PIFObject, ReferenceLookupContext, Encodable -{ +public final class Workspace: ProjectModelItem, PIFObject, ReferenceLookupContext, Encodable { static func referencedObjects(for data: EncodedPIFValue) throws -> [PIFObjectReference] { switch data { case .json(let data): // The only direct references are to projects. guard case let .plArray(projects)? = data["projects"] else { return [] } - return projects.compactMap{ + return projects.compactMap { guard case let .plString(signature) = $0 else { return nil } return PIFObjectReference(signature: signature, type: .project) } @@ -73,7 +72,7 @@ public final class Workspace: ProjectModelItem, PIFObject, ReferenceLookupContex // FIXME: This sucks, we are doing the protocol decode twice: Don't require duplicate binary PIF decode in incremental PIF transfer let deserializer = MsgPackDeserializer(data) let model: SWBProtocol.Workspace = try deserializer.deserialize() - return model.projectSignatures.map{ PIFObjectReference(signature: $0, type: .project) } + return model.projectSignatures.map { PIFObjectReference(signature: $0, type: .project) } } } @@ -147,7 +146,7 @@ public final class Workspace: ProjectModelItem, PIFObject, ReferenceLookupContex self.guid = model.guid self.name = model.name self.path = model.path - self.projects = try model.projectSignatures.map{ try pifLoader.loadReference(signature: $0, type: Project.self) } + self.projects = try model.projectSignatures.map { try pifLoader.loadReference(signature: $0, type: Project.self) } // The PIFLoader creates the user namespace which is used during loading, but the workspace ultimately owns it. self.userNamespace = pifLoader.userNamespace @@ -180,8 +179,7 @@ public final class Workspace: ProjectModelItem, PIFObject, ReferenceLookupContex } /// Create the workspace from a PIF property list. - @_spi(Testing) public init(fromDictionary pifDict: ProjectModelItemPIF, signature: String, withPIFLoader pifLoader: PIFLoader) throws - { + @_spi(Testing) public init(fromDictionary pifDict: ProjectModelItemPIF, signature: String, withPIFLoader pifLoader: PIFLoader) throws { self.signature = signature // The PIFLoader creates the user namespace which is used during loading, but the workspace ultimately owns it. @@ -227,7 +225,7 @@ public final class Workspace: ProjectModelItem, PIFObject, ReferenceLookupContex /// /// The targets will appear in the order they are within each nested project. public var allTargets: AnySequence { - return AnySequence(projects.lazy.flatMap{ $0.targets }) + return AnySequence(projects.lazy.flatMap { $0.targets }) } /// Find project with the given project GUID. @@ -255,8 +253,10 @@ public final class Workspace: ProjectModelItem, PIFObject, ReferenceLookupContex /// /// - Precondition: workspace.contains(target) public func project(for target: Target) -> Project { - guard let project = projectsByTarget[target] - ?? (self.target(for: target.guid).map({ projectsByTarget[$0] }) ?? nil) else { + guard + let project = projectsByTarget[target] + ?? (self.target(for: target.guid).map({ projectsByTarget[$0] }) ?? nil) + else { preconditionFailure("workspace '\(name)' does not contain target '\(target.name)'") } return project @@ -326,7 +326,7 @@ public final class Workspace: ProjectModelItem, PIFObject, ReferenceLookupContex /// Resolve the reference for the given GUID. // // FIXME: This is fairly unfortunate, from a performance perspective, we could have pre-bound these things at load time; except that we reuse loaded objects during incremental PIF loading, so we need to guarantee that they are self-contained and don't contain external references. For now, we manage this by deferring the lookup until runtime. See: - public func lookupReference(for guid: String ) -> Reference? { + public func lookupReference(for guid: String) -> Reference? { return referencesByGUID[guid] } @@ -349,16 +349,12 @@ public final class Workspace: ProjectModelItem, PIFObject, ReferenceLookupContex } } - -extension Workspace: CustomStringConvertible -{ - public var description: String - { +extension Workspace: CustomStringConvertible { + public var description: String { return "\(type(of: self))<\(guid):\(name):\(path.str):\(projects.count) projects>" } } - public struct WorkspaceDiff: CustomStringConvertible, Sendable { public var leftProjects: [Project] = [] public var rightProjects: [Project] = [] @@ -370,25 +366,20 @@ public struct WorkspaceDiff: CustomStringConvertible, Sendable { public var rightReferences: [Reference] = [] public var hasChanges: Bool { - return !leftProjects.isEmpty || - !rightProjects.isEmpty || - !leftTargets.isEmpty || - !rightTargets.isEmpty || - !leftReferences.isEmpty || - !rightReferences.isEmpty + return !leftProjects.isEmpty || !rightProjects.isEmpty || !leftTargets.isEmpty || !rightTargets.isEmpty || !leftReferences.isEmpty || !rightReferences.isEmpty } public var description: String { return """ - \(type(of: self))< - leftProjects: \(leftProjects), - rightProjects: \(rightProjects), - leftTargets: \(leftTargets), - rightTargets: \(rightTargets), - leftReferences: \(leftReferences), - rightReferences: \(rightReferences) - > - """ + \(type(of: self))< + leftProjects: \(leftProjects), + rightProjects: \(rightProjects), + leftTargets: \(leftTargets), + rightTargets: \(rightTargets), + leftReferences: \(leftReferences), + rightReferences: \(rightReferences) + > + """ } } diff --git a/Sources/SWBCore/ProjectModel/WorkspaceHeaderIndex.swift b/Sources/SWBCore/ProjectModel/WorkspaceHeaderIndex.swift index ab4dd792..ff13bb7f 100644 --- a/Sources/SWBCore/ProjectModel/WorkspaceHeaderIndex.swift +++ b/Sources/SWBCore/ProjectModel/WorkspaceHeaderIndex.swift @@ -24,9 +24,11 @@ public final class WorkspaceHeaderIndex: Sendable { /// Construct the header index for a workspace. @_spi(Testing) public init(core: Core, workspace: Workspace) async { - self.projectHeaderInfo = await Dictionary(uniqueKeysWithValues: workspace.projects.concurrentMap(maximumParallelism: 10) { project in - await (project, ProjectHeaderInfo(core, project, workspace)) - }) + self.projectHeaderInfo = await Dictionary( + uniqueKeysWithValues: workspace.projects.concurrentMap(maximumParallelism: 10) { project in + await (project, ProjectHeaderInfo(core, project, workspace)) + } + ) } } @@ -90,7 +92,6 @@ public struct ProjectHeaderInfo: Sendable { } } - // Construct the set of known header files. let knownHeaders = { var knownHeaders = OrderedSet() @@ -100,12 +101,17 @@ public struct ProjectHeaderInfo: Sendable { self.knownHeaders = knownHeaders // Collect the per target information. - self.targetHeaderInfo = await Dictionary(uniqueKeysWithValues: project.targets.concurrentMap(maximumParallelism: 100, { target -> (BuildPhaseTarget, TargetHeaderInfo)? in - if case let target as BuildPhaseTarget = target, let headerInfo = TargetHeaderInfo(target, knownHeaders, workspace) { - return (target, headerInfo) - } - return nil - }).compactMap { $0 }) + self.targetHeaderInfo = await Dictionary( + uniqueKeysWithValues: project.targets.concurrentMap( + maximumParallelism: 100, + { target -> (BuildPhaseTarget, TargetHeaderInfo)? in + if case let target as BuildPhaseTarget = target, let headerInfo = TargetHeaderInfo(target, knownHeaders, workspace) { + return (target, headerInfo) + } + return nil + } + ).compactMap { $0 } + ) } } @@ -137,8 +143,9 @@ public struct TargetHeaderInfo: Sendable { for buildFile in headersPhase.buildFiles { // Ignore non-file references. guard case let .reference(guid) = buildFile.buildableItem, - let reference = workspace.lookupReference(for: guid), - let fileRef = reference as? FileReference else { continue } + let reference = workspace.lookupReference(for: guid), + let fileRef = reference as? FileReference + else { continue } // If we don't have any entry for the target, ignore it. // @@ -160,11 +167,11 @@ public struct TargetHeaderInfo: Sendable { } public struct HeaderDestDirs { - public let publicPath : Path - public let privatePath : Path - public let basePath : Path + public let publicPath: Path + public let privatePath: Path + public let basePath: Path - public init(publicPath: Path, privatePath: Path, basePath: Path ) { + public init(publicPath: Path, privatePath: Path, basePath: Path) { self.publicPath = publicPath self.privatePath = privatePath self.basePath = basePath @@ -180,9 +187,11 @@ public struct TargetHeaderInfo: Sendable { let publicHeadersPath = scope.evaluate(BuiltinMacros.PUBLIC_HEADERS_FOLDER_PATH) let privateHeadersPath = scope.evaluate(BuiltinMacros.PRIVATE_HEADERS_FOLDER_PATH) - return HeaderDestDirs(publicPath: wrapperPath.join(publicHeadersPath.basename), - privatePath: wrapperPath.join(privateHeadersPath.basename), - basePath: wrapperPath) + return HeaderDestDirs( + publicPath: wrapperPath.join(publicHeadersPath.basename), + privatePath: wrapperPath.join(privateHeadersPath.basename), + basePath: wrapperPath + ) } /// Utility method that generates the destination dir path for a given visibility. Returns `nil` if the path does not exist for that visibility. diff --git a/Sources/SWBCore/Provisioning.swift b/Sources/SWBCore/Provisioning.swift index aba0c0c9..f89864f6 100644 --- a/Sources/SWBCore/Provisioning.swift +++ b/Sources/SWBCore/Provisioning.swift @@ -38,7 +38,6 @@ public func computeBundleIdentifier(from scope: MacroEvaluationScope, bundleIden return scope.evaluate(bundleIdentifierFromInfoPlist, lookup: nil) } - /// Compute the identifier of the signing certificate to use. /// /// - Parameters: @@ -47,12 +46,11 @@ public func computeBundleIdentifier(from scope: MacroEvaluationScope, bundleIden /// - Returns: The signing certificate identifier to use. public func computeSigningCertificateIdentifier(from scope: MacroEvaluationScope, platform: Platform?) -> String { if platform?.isSimulator == true { - return "-" // always Ad-hoc sign for simulator even if CODE_SIGN_IDENTITY has been overridden + return "-" // always Ad-hoc sign for simulator even if CODE_SIGN_IDENTITY has been overridden } return scope.evaluate(BuiltinMacros.CODE_SIGN_IDENTITY) } - /// Determine the path for entitlements to use for code signing. /// /// - Parameters: diff --git a/Sources/SWBCore/ProvisioningTaskInputs.swift b/Sources/SWBCore/ProvisioningTaskInputs.swift index 827ff7ce..42cbaf42 100644 --- a/Sources/SWBCore/ProvisioningTaskInputs.swift +++ b/Sources/SWBCore/ProvisioningTaskInputs.swift @@ -15,8 +15,7 @@ public import SWBUtil // FIXME: Should we add some general mechanism for representing NSErrors in Swift Build? If so, we should also add a general transport structure to send it over the message pipe. // /// An error from computation of provisioning task inputs. -public struct ProvisioningTaskInputError: Codable, Hashable, Sendable -{ +public struct ProvisioningTaskInputError: Codable, Hashable, Sendable { public let description: String public let recoverySuggestion: String? } @@ -29,8 +28,7 @@ public struct ProvisioningTaskInputError: Codable, Hashable, Sendable /// 2. Ad-hoc signed. /// 3. Signed with a profile. /// 4. Signed without a profile. -public struct ProvisioningTaskInputs: Codable, Hashable, Sendable -{ +public struct ProvisioningTaskInputs: Codable, Hashable, Sendable { /// The SHA1 hash of the signing certificate, suitable for passing to the `codesign` tool. /// /// If this is nil, then `identityName`, `profileName`, `profileUUID` and `designatedRequirements` will all also be nil. @@ -72,8 +70,7 @@ public struct ProvisioningTaskInputs: Codable, Hashable, Sendable /// Any warnings here should be presented to the user, but signing may still proceed. public let warnings: [String] - public init(identityHash: String? = nil, identitySerialNumber: String? = nil, identityName: String? = nil, profileName: String? = nil, profileUUID: String? = nil, profilePath: Path? = nil, designatedRequirements: String? = nil, signedEntitlements: PropertyListItem = .plDict([:]), simulatedEntitlements: PropertyListItem = .plDict([:]), appIdentifierPrefix: String? = nil, teamIdentifierPrefix: String? = nil, isEnterpriseTeam: Bool? = false, useSigningTool: Bool? = false, signingToolKeyPath: String? = nil, signingToolKeyID: String? = nil, signingToolKeyIssuerID: String? = nil, keychainPath: String? = nil, errors: [[String: String]] = [], warnings: [String] = []) - { + public init(identityHash: String? = nil, identitySerialNumber: String? = nil, identityName: String? = nil, profileName: String? = nil, profileUUID: String? = nil, profilePath: Path? = nil, designatedRequirements: String? = nil, signedEntitlements: PropertyListItem = .plDict([:]), simulatedEntitlements: PropertyListItem = .plDict([:]), appIdentifierPrefix: String? = nil, teamIdentifierPrefix: String? = nil, isEnterpriseTeam: Bool? = false, useSigningTool: Bool? = false, signingToolKeyPath: String? = nil, signingToolKeyID: String? = nil, signingToolKeyIssuerID: String? = nil, keychainPath: String? = nil, errors: [[String: String]] = [], warnings: [String] = []) { self.identityHash = identityHash self.identitySerialNumber = identitySerialNumber self.identityName = identityName diff --git a/Sources/SWBCore/SDKRegistry.swift b/Sources/SWBCore/SDKRegistry.swift index 29f1596f..312f0d42 100644 --- a/Sources/SWBCore/SDKRegistry.swift +++ b/Sources/SWBCore/SDKRegistry.swift @@ -51,8 +51,7 @@ public final class SDK: Sendable { baseAndVersion = sdkCanonicalName.withoutSuffix(".\(supportedSuffix)") suffix = supportedSuffix break - } - else if sdkCanonicalName.hasSuffix(supportedSuffix) { + } else if sdkCanonicalName.hasSuffix(supportedSuffix) { baseAndVersion = sdkCanonicalName.withoutSuffix(supportedSuffix) suffix = supportedSuffix break @@ -148,7 +147,7 @@ public final class SDK: Sendable { public let librarySearchPaths: [Path] /// Provides the platform version mapping when working with macCatalyst and macOS variants. - public let versionMap: [String:[Version:Version]] + public let versionMap: [String: [Version: Version]] /// The SDK-specific directory macros. let directoryMacros: [StringMacroDeclaration] @@ -167,7 +166,7 @@ public final class SDK: Sendable { /// Note that this is technically "broken" for macOS, as the third version component in practice is more like a minor version, and macOS does not have true patch versions, but we'll respect the value in the SDK as-is for now. @_spi(Testing) public let maximumDeploymentTarget: Version? - init(_ canonicalName: String, canonicalNameComponents: CanonicalNameComponents?, _ aliases: Set, _ cohortPlatforms: [String], _ displayName: String, _ path: Path, _ version: Version?, _ productBuildVersion: String?, _ defaultSettings: [String: PropertyListItem], _ overrideSettings: [String: PropertyListItem], _ variants: [String: SDKVariant], _ defaultDeploymentTarget: Version?, _ defaultVariant: SDKVariant?, _ searchPaths: (header: [Path], framework: [Path], library: [Path]), _ directoryMacros: [StringMacroDeclaration], _ isBaseSDK: Bool, _ fallbackSettingConditionValues: [String], _ toolchains: [String], _ versionMap: [String:[Version:Version]], _ maximumDeploymentTarget: Version?) { + init(_ canonicalName: String, canonicalNameComponents: CanonicalNameComponents?, _ aliases: Set, _ cohortPlatforms: [String], _ displayName: String, _ path: Path, _ version: Version?, _ productBuildVersion: String?, _ defaultSettings: [String: PropertyListItem], _ overrideSettings: [String: PropertyListItem], _ variants: [String: SDKVariant], _ defaultDeploymentTarget: Version?, _ defaultVariant: SDKVariant?, _ searchPaths: (header: [Path], framework: [Path], library: [Path]), _ directoryMacros: [StringMacroDeclaration], _ isBaseSDK: Bool, _ fallbackSettingConditionValues: [String], _ toolchains: [String], _ versionMap: [String: [Version: Version]], _ maximumDeploymentTarget: Version?) { self.canonicalName = canonicalName self.canonicalNameComponents = canonicalNameComponents self.aliases = aliases @@ -338,8 +337,7 @@ public final class SDKVariant: PlatformInfoProvider, Sendable { // Additional settings for the SDK variants. In general these should be moved into the variant settings in SDKSettings.plist when possible. if name == MacCatalystInfo.sdkVariantName { modifiedSettings["IS_MACCATALYST"] = .plString("YES") - } - else if name == "macos" { + } else if name == "macos" { // Also set IS_MACCATALYST explicitly for the 'macos' variant, to make build setting interpolation easier. modifiedSettings["IS_MACCATALYST"] = .plString("NO") } @@ -363,17 +361,14 @@ public final class SDKVariant: PlatformInfoProvider, Sendable { if let min = minimumDeploymentTarget, let max = maximumDeploymentTarget { do { deploymentTargetRange = try VersionRange(start: min, end: max) - } - catch { + } catch { // ignore error for now (same as in Platform.deploymentTargetsCache) deploymentTargetRange = VersionRange() } - } - else { + } else { deploymentTargetRange = VersionRange() } - var validDeploymentTargets = [Version]() for deploymentTarget in supportedTargetDict["ValidDeploymentTargets"]?.arrayValue ?? [] { if let value = try? Version(deploymentTarget.stringValue ?? "") { @@ -407,16 +402,18 @@ public final class SDKVariant: PlatformInfoProvider, Sendable { self.minimumOSForSwiftConcurrency = try (supportedTargetDict["SwiftConcurrencyMinimumDeploymentTarget"]?.stringValue ?? concurrency).map { try Version($0) } self.minimumOSForSwiftSpan = try (supportedTargetDict["SwiftSpanMinimumDeploymentTarget"]?.stringValue ?? span).map { try Version($0) } - self.systemPrefix = supportedTargetDict["SystemPrefix"]?.stringValue ?? { - switch name { - case MacCatalystInfo.sdkVariantName: - return "/System/iOSSupport" - case "driverkit": - return "/System/DriverKit" - default: - return "" - } - }() + self.systemPrefix = + supportedTargetDict["SystemPrefix"]?.stringValue + ?? { + switch name { + case MacCatalystInfo.sdkVariantName: + return "/System/iOSSupport" + case "driverkit": + return "/System/DriverKit" + default: + return "" + } + }() } private static func fallbackDeviceFamiliesData(variantName name: String) throws -> PropertyListItem { @@ -439,7 +436,7 @@ public final class SDKVariant: PlatformInfoProvider, Sendable { "Identifier": .plInt(6), "Name": .plString("mac"), "DisplayName": .plString("Mac"), - ]) + ]), ]) default: // Other platforms don't have device families @@ -467,7 +464,7 @@ public final class SDKVariant: PlatformInfoProvider, Sendable { private static func fallbackRecommendedDeploymentTarget(variantName name: String) -> String? { switch name { - // Late Summer 2019 aligned, except iOS which got one final 12.x update in Winter 2020, making this version set the last minor update series of the Fall 2018 aligned releases. + // Late Summer 2019 aligned, except iOS which got one final 12.x update in Winter 2020, making this version set the last minor update series of the Fall 2018 aligned releases. case "macos", "macosx": return "10.14.6" case "iphoneos", "iphonesimulator": @@ -477,7 +474,7 @@ public final class SDKVariant: PlatformInfoProvider, Sendable { case "watchos", "watchsimulator": return "5.3" - // No Summer 2019 aligned versions since these were first introduced on or after Fall 2019, so simply use the minimum versions. + // No Summer 2019 aligned versions since these were first introduced on or after Fall 2019, so simply use the minimum versions. case "driverkit": return "19.0" case MacCatalystInfo.sdkVariantName: @@ -485,7 +482,7 @@ public final class SDKVariant: PlatformInfoProvider, Sendable { case "xros", "xrsimulator": return "1.0" - // Fall back to the default deployment target, which is equal to the SDK version. + // Fall back to the default deployment target, which is equal to the SDK version. default: return nil } @@ -631,14 +628,16 @@ public final class SDKRegistry: SDKRegistryLookup, CustomStringConvertible, Send private func registerSDKsInDirectory(_ path: Path, _ platform: Platform?) { guard let pathResolved = try? localFS.realpath(path) else { return } guard let contents = try? localFS.listdir(path) else { return } - guard let sdkPaths: [(sdkPath: Path, sdkPathResolved: Path)] = try? (contents.filter { $0.hasSuffix(".sdk") }.sorted(by: <).map { path.join($0) }.map{ ($0, try localFS.realpath($0)) }) else { return } + guard let sdkPaths: [(sdkPath: Path, sdkPathResolved: Path)] = try? (contents.filter { $0.hasSuffix(".sdk") }.sorted(by: <).map { path.join($0) }.map { ($0, try localFS.realpath($0)) }) else { return } // If you have SDKs A and L in the same directory, where L is a symlink to A, we'll ignore A and register L. - let sdkNamesTargetedByLinks = Set(sdkPaths.compactMap { (sdkPath, sdkPathResolved) -> String? in - guard localFS.isSymlink(sdkPath) else { return nil } - guard sdkPathResolved.dirname == pathResolved else { return nil } - return sdkPathResolved.basename - }) + let sdkNamesTargetedByLinks = Set( + sdkPaths.compactMap { (sdkPath, sdkPathResolved) -> String? in + guard localFS.isSymlink(sdkPath) else { return nil } + guard sdkPathResolved.dirname == pathResolved else { return nil } + return sdkPathResolved.basename + } + ) for (sdkPath, sdkPathResolved) in sdkPaths { guard !sdkNamesTargetedByLinks.contains(sdkPath.basename) else { continue } @@ -722,7 +721,8 @@ public final class SDKRegistry: SDKRegistryLookup, CustomStringConvertible, Send displayName = value } - let isBaseSDK = items["IsBaseSDK"]?.looselyTypedBoolValue + let isBaseSDK = + items["IsBaseSDK"]?.looselyTypedBoolValue ?? items["isBaseSDK"]?.looselyTypedBoolValue ?? false @@ -736,8 +736,7 @@ public final class SDKRegistry: SDKRegistryLookup, CustomStringConvertible, Send var keyIsAllowed = true if SDKRegistry.ignoredSparseSdkSettingKeys.contains(key) { keyIsAllowed = false - } - else { + } else { for suffix in SDKRegistry.ignoredSparseSdkSettingKeySuffixes { if key.hasSuffix(suffix) { keyIsAllowed = false @@ -770,14 +769,14 @@ public final class SDKRegistry: SDKRegistryLookup, CustomStringConvertible, Send var defaultSettings: [String: PropertyListItem] = [:] if case .plDict(let settingsItems)? = items["DefaultProperties"] { defaultSettings = filteredSettings(settingsItems) - .filter { $0.key != "TEST_FRAMEWORK_DEVELOPER_VARIANT_SUBPATH" } // rdar://107954685 (Remove watchOS special case for testing framework paths) + .filter { $0.key != "TEST_FRAMEWORK_DEVELOPER_VARIANT_SUBPATH" } // rdar://107954685 (Remove watchOS special case for testing framework paths) } // Parse the custom properties settings. var overrideSettings: [String: PropertyListItem] = [:] if case .plDict(let settingsItems)? = items["CustomProperties"] { overrideSettings = filteredSettings(settingsItems) - .filter { !$0.key.hasPrefix("SWIFT_MODULE_ONLY_") } // Rev-lock: don't set SWIFT_MODULE_ONLY_ in SDKs + .filter { !$0.key.hasPrefix("SWIFT_MODULE_ONLY_") } // Rev-lock: don't set SWIFT_MODULE_ONLY_ in SDKs } // Parse the Variants array and the SupportedTargets dictionary, then create the SDKVariant objects from them. Note that it is not guaranteed that any variant will have both sets of data, so we don't the presence of either one. @@ -835,8 +834,7 @@ public final class SDKRegistry: SDKRegistryLookup, CustomStringConvertible, Send let supportedTargetDict: [String: PropertyListItem] if case .plDict(let dict) = plist { supportedTargetDict = dict - } - else { + } else { supportedTargetDict = [:] } @@ -892,7 +890,7 @@ public final class SDKRegistry: SDKRegistryLookup, CustomStringConvertible, Send let searchPath = path.join(str) guard localFS.exists(searchPath), localFS.isDirectory(searchPath) else { // FIXME: Re-enable this when we want to warn about search paths an SDK declares which do not exist. -// delegate.warning(path, "header search path does not exist: \(searchPath.str)") + // delegate.warning(path, "header search path does not exist: \(searchPath.str)") continue } headerSearchPaths.append(searchPath) @@ -905,7 +903,7 @@ public final class SDKRegistry: SDKRegistryLookup, CustomStringConvertible, Send let searchPath = path.join(str) guard localFS.exists(searchPath), localFS.isDirectory(searchPath) else { // FIXME: Re-enable this when we want to warn about search paths an SDK declares which do not exist. -// delegate.warning(path, "framework search path does not exist: \(searchPath.str)") + // delegate.warning(path, "framework search path does not exist: \(searchPath.str)") continue } frameworkSearchPaths.append(searchPath) @@ -918,7 +916,7 @@ public final class SDKRegistry: SDKRegistryLookup, CustomStringConvertible, Send let searchPath = path.join(str) guard localFS.exists(searchPath), localFS.isDirectory(searchPath) else { // FIXME: Re-enable this when we want to warn about search paths an SDK declares which do not exist. -// delegate.warning(path, "library search path does not exist: \(searchPath.str)") + // delegate.warning(path, "library search path does not exist: \(searchPath.str)") continue } librarySearchPaths.append(searchPath) @@ -938,8 +936,7 @@ public final class SDKRegistry: SDKRegistryLookup, CustomStringConvertible, Send if case .plString(let versionString)? = items["Version"] { do { version = try Version(versionString) - } - catch { + } catch { delegate.error(path, "invalid 'Version' field: \(error)") return nil } @@ -966,15 +963,14 @@ public final class SDKRegistry: SDKRegistryLookup, CustomStringConvertible, Send } } - var versionMap: [String:[Version:Version]] = [:] + var versionMap: [String: [Version: Version]] = [:] if case .plDict(let container)? = items["VersionMap"] { for (key, dict) in container { - var mappings: [Version:Version] = [:] + var mappings: [Version: Version] = [:] for (from, to) in dict.dictValue ?? [:] { do { mappings[try Version(from)] = try Version(to.stringValue ?? "") - } - catch { + } catch { delegate.warning("Unable to create version map for '\(key)' mapping '\(from)' to '\(to)' for SDK '\(displayName)'.") } } @@ -998,8 +994,7 @@ public final class SDKRegistry: SDKRegistryLookup, CustomStringConvertible, Send if let sdkNameComponents = try? parseSDKName(canonicalName) { directoryMacros.append(try delegate.namespace.declareStringMacro("SDK_DIR_" + sdkNameComponents.basename.asLegalCIdentifier)) } - } - catch { + } catch { delegate.error("\(error)") return nil } @@ -1030,7 +1025,7 @@ public final class SDKRegistry: SDKRegistryLookup, CustomStringConvertible, Send "BUILD_VARIANTS", "CURRENT_ARCH", "PLATFORM_NAME", - "SDKROOT" + "SDKROOT", ]) private static let ignoredSparseSdkSettingKeySuffixes = Set([ @@ -1114,8 +1109,7 @@ public final class SDKRegistry: SDKRegistryLookup, CustomStringConvertible, Send if candidateComponents.version ?? Version(0) > prevSDK.components.version ?? Version(0) { matchedSDK = (candidateSDK, candidateComponents) } - } - else { + } else { matchedSDK = (candidateSDK, candidateComponents) } } @@ -1127,8 +1121,8 @@ public final class SDKRegistry: SDKRegistryLookup, CustomStringConvertible, Send public func lookup(_ path: Path) -> SDK? { #if !os(Windows) - // TODO: Turn this validation back on once our path handling is cleaned up a bit more - precondition(path.isAbsolute, "\(path.str) is not absolute") + // TODO: Turn this validation back on once our path handling is cleaned up a bit more + precondition(path.isAbsolute, "\(path.str) is not absolute") #endif // First see if we already have it in the cache. @@ -1164,8 +1158,8 @@ public final class SDKRegistry: SDKRegistryLookup, CustomStringConvertible, Send public func lookup(nameOrPath key: String, basePath: Path, activeRunDestination: RunDestinationInfo?) throws -> SDK? { #if !os(Windows) - // TODO: Turn this validation back on once our path handling is cleaned up a bit more - precondition(basePath.isAbsolute, "\(basePath.str) is not absolute") + // TODO: Turn this validation back on once our path handling is cleaned up a bit more + precondition(basePath.isAbsolute, "\(basePath.str) is not absolute") #endif // Check if this is a request for the boot system SDK. @@ -1219,7 +1213,8 @@ public final class SDKRegistry: SDKRegistryLookup, CustomStringConvertible, Send // by the cohort platforms of the run destination's SDK's platform. This is necessary to resolve // driverkit when we have a DriverKit run destination but with a platform-specific SDK. if let runDestination = activeRunDestination, - let cohortPlatforms = try? lookup(runDestination.sdk, activeRunDestination: nil)?.cohortPlatforms { + let cohortPlatforms = try? lookup(runDestination.sdk, activeRunDestination: nil)?.cohortPlatforms + { for cohortPlatform in cohortPlatforms { if let list = sdksByCohortPlatform[cohortPlatform] { return list @@ -1257,7 +1252,6 @@ public final class SDKRegistry: SDKRegistryLookup, CustomStringConvertible, Send // MARK: CustomStringConvertible conformance - public var description: String { return "<\(Swift.type(of: self)):\(self.type)>" @@ -1271,9 +1265,14 @@ public struct AmbiguousSDKLookupError: Hashable, Error { var diagnostic: Diagnostic { let prefix = forRunDestination ? "unable to resolve run destination SDK:" : "unable to resolve SDK:" - return Diagnostic(behavior: .error, location: .unknown, data: DiagnosticData("\(prefix) multiple SDKs match alias '\(canonicalName)'"), childDiagnostics: candidateSDKs.sorted(by: \.canonicalName).map { sdk in - Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("Candidate '\(sdk.canonicalName)' at path '\(sdk.path.str)'")) - }) + return Diagnostic( + behavior: .error, + location: .unknown, + data: DiagnosticData("\(prefix) multiple SDKs match alias '\(canonicalName)'"), + childDiagnostics: candidateSDKs.sorted(by: \.canonicalName).map { sdk in + Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("Candidate '\(sdk.canonicalName)' at path '\(sdk.path.str)'")) + } + ) } public func hash(into hasher: inout Hasher) { diff --git a/Sources/SWBCore/Settings/BuiltinMacros.swift b/Sources/SWBCore/Settings/BuiltinMacros.swift index 5570cc92..34a0047b 100644 --- a/Sources/SWBCore/Settings/BuiltinMacros.swift +++ b/Sources/SWBCore/Settings/BuiltinMacros.swift @@ -952,7 +952,7 @@ public final class BuiltinMacros { public static let PRODUCT_BUNDLE_PACKAGE_TYPE = BuiltinMacros.declareStringMacro("PRODUCT_BUNDLE_PACKAGE_TYPE") public static let PRODUCT_DEFINITION_PLIST = BuiltinMacros.declareStringMacro("PRODUCT_DEFINITION_PLIST") public static let PRODUCT_MODULE_NAME = BuiltinMacros.declareStringMacro("PRODUCT_MODULE_NAME") - public static let PRODUCT_SPECIFIC_LDFLAGS = BuiltinMacros.declareStringListMacro("PRODUCT_SPECIFIC_LDFLAGS") // FIXME: We shouldn't need to declare this, but it is a workaround for an instance of: [Swift Build] Unable to find XCTest module + public static let PRODUCT_SPECIFIC_LDFLAGS = BuiltinMacros.declareStringListMacro("PRODUCT_SPECIFIC_LDFLAGS") // FIXME: We shouldn't need to declare this, but it is a workaround for an instance of: [Swift Build] Unable to find XCTest module public static let PRODUCT_TYPE_FRAMEWORK_SEARCH_PATHS = BuiltinMacros.declarePathListMacro("PRODUCT_TYPE_FRAMEWORK_SEARCH_PATHS") public static let PRODUCT_TYPE_HEADER_SEARCH_PATHS = BuiltinMacros.declarePathListMacro("PRODUCT_TYPE_HEADER_SEARCH_PATHS") public static let PRODUCT_TYPE_LIBRARY_SEARCH_PATHS = BuiltinMacros.declarePathListMacro("PRODUCT_TYPE_LIBRARY_SEARCH_PATHS") @@ -1073,7 +1073,7 @@ public final class BuiltinMacros { public static let SWIFT_SYSTEM_INCLUDE_PATHS = BuiltinMacros.declarePathListMacro("SWIFT_SYSTEM_INCLUDE_PATHS") public static let PACKAGE_RESOURCE_BUNDLE_NAME = BuiltinMacros.declareStringMacro("PACKAGE_RESOURCE_BUNDLE_NAME") public static let PACKAGE_RESOURCE_TARGET_KIND = BuiltinMacros.declareEnumMacro("PACKAGE_RESOURCE_TARGET_KIND") as EnumMacroDeclaration - public static let USE_SWIFT_RESPONSE_FILE = BuiltinMacros.declareBooleanMacro("USE_SWIFT_RESPONSE_FILE") // remove in rdar://53000820 + public static let USE_SWIFT_RESPONSE_FILE = BuiltinMacros.declareBooleanMacro("USE_SWIFT_RESPONSE_FILE") // remove in rdar://53000820 public static let SWIFT_INSTALL_MODULE = BuiltinMacros.declareBooleanMacro("SWIFT_INSTALL_MODULE") public static let SWIFT_INSTALL_MODULE_FOR_DEPLOYMENT = BuiltinMacros.declareBooleanMacro("SWIFT_INSTALL_MODULE_FOR_DEPLOYMENT") public static let SWIFT_INSTALL_MODULE_ABI_DESCRIPTOR = BuiltinMacros.declareBooleanMacro("SWIFT_INSTALL_MODULE_ABI_DESCRIPTOR") @@ -1395,7 +1395,6 @@ public final class BuiltinMacros { return try! builtinNamespace.declarePathListMacro(name) } - private static let namespaceInitializationMutex = SWBMutex(()) /// Public access to the namespace forces initialization. @@ -2297,7 +2296,7 @@ public final class BuiltinMacros { PACKAGE_RESOURCE_TARGET_KIND, __ORIGINAL_SDK_DEFINED_LLVM_TARGET_TRIPLE_SYS, SWIFT_PLATFORM_TARGET_PREFIX, - USE_SWIFT_RESPONSE_FILE, // remove in rdar://53000820 + USE_SWIFT_RESPONSE_FILE, // remove in rdar://53000820 SWIFT_INSTALL_MODULE, SWIFT_INSTALL_MODULE_FOR_DEPLOYMENT, SWIFT_INSTALL_MODULE_ABI_DESCRIPTOR, @@ -2482,7 +2481,7 @@ public final class BuiltinMacros { BUILD_ACTIVE_RESOURCES_ONLY, ENABLE_ONLY_ACTIVE_RESOURCES, ENABLE_PLAYGROUND_RESULTS, - __SKIP_BUILD + __SKIP_BUILD, ] /// Force initialization of entitlements macros. @@ -2684,7 +2683,7 @@ public extension BuiltinMacros { return value.compactMap(fn) } - static func ifSet(_ macro:PathMacroDeclaration, in scope: MacroEvaluationScope, fn: (String) -> [String]) -> [String] { + static func ifSet(_ macro: PathMacroDeclaration, in scope: MacroEvaluationScope, fn: (String) -> [String]) -> [String] { let value = scope.evaluate(macro).str if value.isEmpty { return [] } return fn(value) @@ -2947,7 +2946,8 @@ extension Diagnostic.Location { extension BuildVersion.Platform { public func deploymentTargetSettingName(infoLookup: (any PlatformInfoLookup)?) -> String { guard let infoProvider = infoLookup?.lookupPlatformInfo(platform: self), - let dtsn = infoProvider.deploymentTargetSettingName else { + let dtsn = infoProvider.deploymentTargetSettingName + else { fatalError("Mach-O based platforms must provide a deployment target setting name") } return dtsn diff --git a/Sources/SWBCore/Settings/RecursiveSearchPathResolver.swift b/Sources/SWBCore/Settings/RecursiveSearchPathResolver.swift index 69fa4523..64d227a4 100644 --- a/Sources/SWBCore/Settings/RecursiveSearchPathResolver.swift +++ b/Sources/SWBCore/Settings/RecursiveSearchPathResolver.swift @@ -39,7 +39,7 @@ public final class RecursiveSearchPathResolver: Sendable { self.excludedPatterns = excludedPatterns self.includedPatterns = includedPatterns } - public static func <(lhs: Request, rhs: Request) -> Bool { + public static func < (lhs: Request, rhs: Request) -> Bool { if lhs.path != rhs.path { return lhs.path.str < rhs.path.str } if lhs.sourcePath != rhs.sourcePath { return lhs.sourcePath.str < rhs.sourcePath.str } if lhs.excludedPatterns != rhs.excludedPatterns { return lhs.excludedPatterns < rhs.excludedPatterns } @@ -80,7 +80,7 @@ public final class RecursiveSearchPathResolver: Sendable { self.warnings = warnings } - public static func ==(lhs: Result, rhs: Result) -> Bool { + public static func == (lhs: Result, rhs: Result) -> Bool { return lhs.paths == rhs.paths && lhs.warnings == rhs.warnings } @@ -147,7 +147,7 @@ public final class RecursiveSearchPathResolver: Sendable { /// The canonical list of cached results. public var allResults: [CachedResult] { - return requestCache.keys.sorted().map{ CachedResult(request: $0, result: requestCache[$0]!) } + return requestCache.keys.sorted().map { CachedResult(request: $0, result: requestCache[$0]!) } } /// Expand a recursive search path. @@ -259,7 +259,7 @@ public final class RecursiveSearchPathResolver: Sendable { } } -private func <(lhs: [String]?, rhs: [String]?) -> Bool { +private func < (lhs: [String]?, rhs: [String]?) -> Bool { switch (lhs, rhs) { case (let lhs?, let rhs?): return lhs.lexicographicallyPrecedes(rhs) diff --git a/Sources/SWBCore/Settings/Settings.swift b/Sources/SWBCore/Settings/Settings.swift index 32ba3c39..a23be8c1 100644 --- a/Sources/SWBCore/Settings/Settings.swift +++ b/Sources/SWBCore/Settings/Settings.swift @@ -106,7 +106,7 @@ fileprivate struct PreOverridesSettings { } fileprivate var universalDefaults: MacroValueAssignmentTable { return universalDefaultsCache.getValue(self) } - private let universalDefaultsCache = LazyCache{ (settings: CoreSettings) -> MacroValueAssignmentTable in settings.computeUniversalDefaults() } + private let universalDefaultsCache = LazyCache { (settings: CoreSettings) -> MacroValueAssignmentTable in settings.computeUniversalDefaults() } private func computeUniversalDefaults() -> MacroValueAssignmentTable { var table = MacroValueAssignmentTable(namespace: BuiltinMacros.namespace) @@ -227,35 +227,35 @@ fileprivate struct PreOverridesSettings { // FIXME: This never actually pushed anything to the table, so I've disabled it for now. #if false - private var unionedCustomizedCompilerDefaultsCache = Registry() - fileprivate func unionedCustomizedCompilerDefaults(domain: String) -> MacroValueAssignmentTable { - // FIXME: This table is barely used (for Xcode proper it is almost empty). We should figure out if there might be a simpler solution to this problem. - // - // That said, it would likely be used a lot more often if we started to pull compiler specific defaults out of the unioned table, although if we had alternate support for them (for example, recognizing the macro type as compiler specific and error out on access from invalid contents), then we might still be able to push them as a single unioned table. - return unionedCustomizedCompilerDefaultsCache.getOrInsert(domain) { - // Add the defaults from all the registered tools in the given domain. + private var unionedCustomizedCompilerDefaultsCache = Registry() + fileprivate func unionedCustomizedCompilerDefaults(domain: String) -> MacroValueAssignmentTable { + // FIXME: This table is barely used (for Xcode proper it is almost empty). We should figure out if there might be a simpler solution to this problem. // - // FIXME: This is somewhat wasteful, as we end up duplicating values for super specifications. However, that lets us keep the condition set required to enable a particular compiler very simple. - let unionedDefaults = unionedToolDefaults(domain: domain).table - let customizedDefaults = MacroValueAssignmentTable(namespace: core.specRegistry.internalMacroNamespace) - for spec in core.specRegistry.findSpecs(CompilerSpec.self, domain: domain) { - // Add all the necessary defaults. - for option in core.specRegistry.effectiveFlattenedBuildOptions(spec).values { - if let defaultValue = option.defaultValue { - // Only push the default value if it diverges from the existing default. - // - // FIXME: This optimization could be subsumed by the macro assignment table itself, but for now we do it here as an important special case. - if let existingDefault = unionedDefaults.lookupMacro(option.macro)?.expression, existingDefault == defaultValue { - continue - } + // That said, it would likely be used a lot more often if we started to pull compiler specific defaults out of the unioned table, although if we had alternate support for them (for example, recognizing the macro type as compiler specific and error out on access from invalid contents), then we might still be able to push them as a single unioned table. + return unionedCustomizedCompilerDefaultsCache.getOrInsert(domain) { + // Add the defaults from all the registered tools in the given domain. + // + // FIXME: This is somewhat wasteful, as we end up duplicating values for super specifications. However, that lets us keep the condition set required to enable a particular compiler very simple. + let unionedDefaults = unionedToolDefaults(domain: domain).table + let customizedDefaults = MacroValueAssignmentTable(namespace: core.specRegistry.internalMacroNamespace) + for spec in core.specRegistry.findSpecs(CompilerSpec.self, domain: domain) { + // Add all the necessary defaults. + for option in core.specRegistry.effectiveFlattenedBuildOptions(spec).values { + if let defaultValue = option.defaultValue { + // Only push the default value if it diverges from the existing default. + // + // FIXME: This optimization could be subsumed by the macro assignment table itself, but for now we do it here as an important special case. + if let existingDefault = unionedDefaults.lookupMacro(option.macro)?.expression, existingDefault == defaultValue { + continue + } - // FIXME: Need ability to push conditional assignments. + // FIXME: Need ability to push conditional assignments. + } } } + return customizedDefaults } - return customizedDefaults } - } #endif /// The cache for system build rules. @@ -477,8 +477,8 @@ final class WorkspaceSettings: Sendable { // Add the compiler specific settings. // // This is necessary to support tools that have distinct values for the same settings (otherwise the tool defaults above would covert it), and is used in conjunction with a compiler condition asserted by the build phase processing. - #if false // This never did anything, see comments at `unionedCustomizedCompilerDefaults` - push(coreSettings.unionedCustomizedCompilerDefaults(domain: domain)) + #if false // This never did anything, see comments at `unionedCustomizedCompilerDefaults` + push(coreSettings.unionedCustomizedCompilerDefaults(domain: domain)) #endif return BuiltinSettingsInfo(table: builtinsTable, exportedMacros: exportedMacros, errors: errors) @@ -589,7 +589,6 @@ final class WorkspaceSettings: Sendable { } } - /// This class represents the computed settings of a project and (optionally) target. public final class Settings: PlatformBuildContext, Sendable { /// The build parameters which were used to construct these settings. @@ -756,9 +755,9 @@ public final class Settings: PlatformBuildContext, Sendable { supportsMacCatalystMacros.formUnion(sdkVariantInfoExtension.supportsMacCatalystMacroNames) } - return supportsMacCatalystMacros.contains { scope.evaluate(scope.namespace.parseString("$(\($0)")).boolValue } || + return supportsMacCatalystMacros.contains { scope.evaluate(scope.namespace.parseString("$(\($0)")).boolValue } // For index build ensure zippered frameworks can be configured separately for both macOS and macCatalyst. - (scope.evaluate(BuiltinMacros.IS_ZIPPERED) && scope.evaluate(BuiltinMacros.INDEX_ENABLE_BUILD_ARENA)) + || (scope.evaluate(BuiltinMacros.IS_ZIPPERED) && scope.evaluate(BuiltinMacros.INDEX_ENABLE_BUILD_ARENA)) } public var enableTargetPlatformSpecialization: Bool { @@ -766,8 +765,7 @@ public final class Settings: PlatformBuildContext, Sendable { } public static func targetPlatformSpecializationEnabled(scope: MacroEvaluationScope) -> Bool { - return scope.evaluate(BuiltinMacros.ALLOW_TARGET_PLATFORM_SPECIALIZATION) || - SWBFeatureFlag.allowTargetPlatformSpecialization.value + return scope.evaluate(BuiltinMacros.ALLOW_TARGET_PLATFORM_SPECIALIZATION) || SWBFeatureFlag.allowTargetPlatformSpecialization.value } public var enableBuildRequestOverrides: Bool { @@ -826,7 +824,7 @@ public final class Settings: PlatformBuildContext, Sendable { // Construct the settings table. let builder = SettingsBuilder(workspaceContext, buildRequestContext, parameters, settingsContext, provisioningTaskInputs, impartedBuildProperties, artifactBundleInfo, includeExports: includeExports, sdkRegistry) - let (boundProperties, boundDeploymentTarget) = MacroNamespace.withExpressionInterningEnabled{ builder.construct() } + let (boundProperties, boundDeploymentTarget) = MacroNamespace.withExpressionInterningEnabled { builder.construct() } // Extract the constructed data. self.targetConfiguration = builder.targetConfiguration @@ -990,9 +988,11 @@ extension WorkspaceContext { } // Add the search paths from each loaded plugin. - paths.append(contentsOf: core.pluginManager.extensions(of: SpecificationsExtensionPoint.self).flatMap { ext in - ext.specificationSearchPaths(resourceSearchPaths: core.resourceSearchPaths).compactMap { try? $0.filePath } - }.sorted()) + paths.append( + contentsOf: core.pluginManager.extensions(of: SpecificationsExtensionPoint.self).flatMap { ext in + ext.specificationSearchPaths(resourceSearchPaths: core.resourceSearchPaths).compactMap { try? $0.filePath } + }.sorted() + ) // Add the binary paths for each toolchain. for toolchain in toolchains { @@ -1150,11 +1150,12 @@ private class SettingsBuilder { let pathDeclarations: [PathMacroDeclaration: String] let pathListDeclarations: [PathListMacroDeclaration: Array] - init(_ scope: MacroEvaluationScope, - _ stringDeclarations: [StringMacroDeclaration], - _ stringListDeclarations: [StringListMacroDeclaration], - _ pathDeclarations: [PathMacroDeclaration], - _ pathListDeclarations: [PathListMacroDeclaration] + init( + _ scope: MacroEvaluationScope, + _ stringDeclarations: [StringMacroDeclaration], + _ stringListDeclarations: [StringListMacroDeclaration], + _ pathDeclarations: [PathMacroDeclaration], + _ pathListDeclarations: [PathListMacroDeclaration] ) { self.stringDeclarations = Dictionary(uniqueKeysWithValues: stringDeclarations.map { ($0, scope.evaluate($0)) }) self.stringListDeclarations = Dictionary(uniqueKeysWithValues: stringListDeclarations.map { ($0, scope.evaluate($0)) }) @@ -1255,13 +1256,11 @@ private class SettingsBuilder { /// The namespace for user macros. Its parent is the namespace of the `WorkspaceContext` for the settings. let userNamespace: MacroNamespace - // Properties derived from the properties the builder was initialized with. /// The bound build configuration for the target. var targetConfiguration: BuildConfiguration? = nil - // Bound properties of the builder. These will sometimes come from the BoundProperties struct and are included here for convenience. The builder should be careful not to access these methods before they are added. // FIXME: We should find some compile-time way to enforce that these properties can't be used before they are added. @@ -1278,7 +1277,6 @@ private class SettingsBuilder { var moduleDependencies: [ModuleDependency] = [] var headerDependencies: [HeaderDependency] = [] - // Mutable state of the builder as we're building up the settings table. /// The table we build up of macro assignments. @@ -1290,8 +1288,8 @@ private class SettingsBuilder { var exportedNativeIDs = Set() var errors = OrderedSet() - var warnings = OrderedSet() - var notes = OrderedSet() + var warnings = OrderedSet() + var notes = OrderedSet() var diagnostics = OrderedSet() /// Target-specific counterpart of `diagnostics`. @@ -1368,7 +1366,6 @@ private class SettingsBuilder { self.includeExports = includeExports && !forBindingProperties } - // MARK: Settings construction /// Construct the settings data. @@ -1411,7 +1408,7 @@ private class SettingsBuilder { // Add the toolchain settings. // // We push in reverse order to honor the precedence correctly. - for (idx,toolchain) in boundProperties.toolchains.enumerated().reversed() { + for (idx, toolchain) in boundProperties.toolchains.enumerated().reversed() { addToolchainSettings(toolchain, isPrimary: idx == 0) } @@ -1419,7 +1416,7 @@ private class SettingsBuilder { var table = MacroValueAssignmentTable(namespace: userNamespace) // The order should match the order of TOOLCHAINS. - table.push(BuiltinMacros.EFFECTIVE_TOOLCHAINS_DIRS, literal: boundProperties.toolchains.map{ $0.path.str }) + table.push(BuiltinMacros.EFFECTIVE_TOOLCHAINS_DIRS, literal: boundProperties.toolchains.map { $0.path.str }) push(table) } @@ -1560,8 +1557,7 @@ private class SettingsBuilder { if scope.evaluate(BuiltinMacros.ENABLE_ON_DEMAND_RESOURCES) { if let productType = self.productType, !scope.evaluate(BuiltinMacros.SUPPORTS_ON_DEMAND_RESOURCES) { self.errors.append("On-Demand Resources is enabled (ENABLE_ON_DEMAND_RESOURCES = YES), but is not supported for \(productType.name.lowercased()) targets") - } - else { + } else { if scope.evaluate(BuiltinMacros.WRAP_ASSET_PACKS_IN_SEPARATE_DIRECTORIES) { self.errors.append("WRAP_ASSET_PACKS_IN_SEPARATE_DIRECTORIES=YES is not supported") } @@ -1626,9 +1622,11 @@ private class SettingsBuilder { } } - if scope.evaluate(BuiltinMacros.ENABLE_PROJECT_OVERRIDE_SPECS), let projectOverrideSpec = core.specRegistry.findSpecs(ProjectOverridesSpec.self, domain: "").filter({ spec in - spec.projectName == (scope.evaluate(BuiltinMacros.RC_ProjectName).nilIfEmpty ?? scope.evaluate(BuiltinMacros.SRCROOT).basename) - }).only { + if scope.evaluate(BuiltinMacros.ENABLE_PROJECT_OVERRIDE_SPECS), + let projectOverrideSpec = core.specRegistry.findSpecs(ProjectOverridesSpec.self, domain: "").filter({ spec in + spec.projectName == (scope.evaluate(BuiltinMacros.RC_ProjectName).nilIfEmpty ?? scope.evaluate(BuiltinMacros.SRCROOT).basename) + }).only + { push(projectOverrideSpec.buildSettings) self.warnings.append("Applying Swift Build settings override to project for \(projectOverrideSpec.bugReport).") } @@ -1656,14 +1654,12 @@ private class SettingsBuilder { do { self.moduleDependencies = try createScope(sdkToUse: boundProperties.sdk).evaluate(BuiltinMacros.MODULE_DEPENDENCIES).map { try ModuleDependency(entry: $0) } - } - catch { + } catch { errors.append("Failed to parse \(BuiltinMacros.MODULE_DEPENDENCIES.name): \(error)") } do { self.headerDependencies = try createScope(sdkToUse: boundProperties.sdk).evaluate(BuiltinMacros.HEADER_DEPENDENCIES).map { try HeaderDependency(entry: $0) } - } - catch { + } catch { errors.append("Failed to parse \(BuiltinMacros.HEADER_DEPENDENCIES.name): \(error)") } @@ -1675,7 +1671,6 @@ private class SettingsBuilder { return (boundProperties, boundDeploymentTarget) } - // MARK: Computing the bound properties /// Compute the "bound" properties needed to build a complete settings table. This is called on a separate instance of `SettingsBuilder` used to bind the properties. @@ -1695,7 +1690,7 @@ private class SettingsBuilder { } // Add the target settings, if configured. - if let target = self.target, let config = effectiveTargetConfig { + if let target = self.target, let config = effectiveTargetConfig { addTargetSettings(target, SpecLookupCtxt(specRegistry: core.specRegistry, platform: nil), config, nil) } @@ -1735,7 +1730,7 @@ private class SettingsBuilder { // - Then, we let the run destination override the SDK if needed. // - If the SDK or Platform changed, we do a second pass to add the new SDK/Platform settings. This ensures that we can resolve TOOLCHAINS accurately later. var sdkroot: String! = nil - for i in 0 ..< 2 { + for i in 0..<2 { // Perform the initial SDK resolution (this may drive the platform). sdkroot = createScope(effectiveTargetConfig, sdkToUse: sdk).evaluate(BuiltinMacros.SDKROOT).str @@ -1750,8 +1745,7 @@ private class SettingsBuilder { let supportsMacCatalyst = Settings.supportsMacCatalyst(scope: scope, core: core) if destinationIsMacCatalyst && supportsMacCatalyst { usesReplaceableAutomaticSDKRoot = true - } - else { + } else { usesReplaceableAutomaticSDKRoot = runDestinationIsSupported } } else { @@ -1821,7 +1815,7 @@ private class SettingsBuilder { } // Add the target settings, if configured. - if let target = self.target, let config = effectiveTargetConfig { + if let target = self.target, let config = effectiveTargetConfig { addTargetSettings(target, specLookupContext, config, nil) } @@ -1865,8 +1859,7 @@ private class SettingsBuilder { if project != nil && foundAmbiguousErrors.isEmpty { if sdk == nil { errors.append("unable to find sdk '\(sdkroot!)'") - } - else if platform == nil { + } else if platform == nil { errors.append("unable to find platform for sdk '\(sdkroot!)'") } } @@ -1885,14 +1878,12 @@ private class SettingsBuilder { // Look up the SDK from the workspace context's SDK registry. Here we let the registry detect whether the string we're looking up is the name of the SDK, or the path to one. if let sdk = try? sdkRegistry.lookup(nameOrPath: sparseSDKLookupStr, basePath: project.sourceRoot, activeRunDestination: parameters.activeRunDestination) { return sdk - } - else { + } else { // If we didn't find an SDK, then we treat the lookup string as a path and look for it inside the project directory. let lookupPath = project.sourceRoot.join(sparseSDKLookupStr) if let sdk = sdkRegistry.lookup(lookupPath) { return sdk - } - else { + } else { self.warnings.append("can't find additional SDK '\(sparseSDKLookupStr)'") } } @@ -1919,7 +1910,8 @@ private class SettingsBuilder { // If the build system was initialized as part of a swift toolchain, push that toolchain ahead of the default toolchain, if they are not the same (e.g. when on macOS where an Xcode install exists). if case .swiftToolchain(let path, xcodeDeveloperPath: _) = core.developerPath { if let developerPathToolchain = core.toolchainRegistry.toolchains.first(where: { $0.path.normalize() == path.normalize() }), - developerPathToolchain != coreSettings.defaultToolchain { + developerPathToolchain != coreSettings.defaultToolchain + { toolchains.append(developerPathToolchain) } } @@ -1931,7 +1923,7 @@ private class SettingsBuilder { // Determine the deployment targets that would be used. For enabling macCatalyst, we need to know the pre-mapped versions so we can inject that as SDK overrides. let scope = createScope(effectiveTargetConfig, sdkToUse: sdk) - let settings = BoundProperties.BoundSettings(scope, [BuiltinMacros.IPHONEOS_DEPLOYMENT_TARGET, BuiltinMacros.DEVELOPMENT_TEAM], [BuiltinMacros.OTHER_LDFLAGS,BuiltinMacros.PRODUCT_SPECIFIC_LDFLAGS],[BuiltinMacros.SDKROOT], []) + let settings = BoundProperties.BoundSettings(scope, [BuiltinMacros.IPHONEOS_DEPLOYMENT_TARGET, BuiltinMacros.DEVELOPMENT_TEAM], [BuiltinMacros.OTHER_LDFLAGS, BuiltinMacros.PRODUCT_SPECIFIC_LDFLAGS], [BuiltinMacros.SDKROOT], []) return BoundProperties(sdk: sdk, sdkVariant: sdkVariant, platform: platform, toolchains: toolchains, sparseSDKs: sparseSDKs, preOverrides: preOverrides, settings: settings) } @@ -2010,7 +2002,6 @@ private class SettingsBuilder { push(table, exportType) } - // MARK: Methods for pushing groups of properties /// Adds the base layer of build settings, including the environment and builtin defaults. @@ -2020,7 +2011,7 @@ private class SettingsBuilder { // FIXME: Remove rev-lock hack for new CopyStringsFile build setting pushTable(.exported) { $0.push(BuiltinMacros.STRINGS_FILE_INFOPLIST_RENAME, literal: true) } exportedMacroNames.formUnion(info.exportedMacros) - errors.append(contentsOf:info.errors) + errors.append(contentsOf: info.errors) // Default to preserving the module cache directory. pushTable(.exported) { @@ -2117,8 +2108,7 @@ private class SettingsBuilder { table.push(BuiltinMacros.IS_UNOPTIMIZED_BUILD, literal: (scope.evaluate(BuiltinMacros.GCC_OPTIMIZATION_LEVEL) == "0" || scope.evaluate(BuiltinMacros.SWIFT_OPTIMIZATION_LEVEL) == "-Onone")) // If unset, infer the default SWIFT_LIBRARY_LEVEL from the INSTALL_PATH. - if scope.evaluateAsString(BuiltinMacros.SWIFT_LIBRARY_LEVEL).isEmpty && - scope.evaluate(BuiltinMacros.MACH_O_TYPE) == "mh_dylib" { + if scope.evaluateAsString(BuiltinMacros.SWIFT_LIBRARY_LEVEL).isEmpty && scope.evaluate(BuiltinMacros.MACH_O_TYPE) == "mh_dylib" { let privateInstallPaths = scope.evaluate(BuiltinMacros.__KNOWN_SPI_INSTALL_PATHS).map { Path($0) } let publicInstallPaths = [ Path("/System/Library/Frameworks"), @@ -2126,7 +2116,8 @@ private class SettingsBuilder { Path("/usr/lib"), Path("/System/iOSSupport/System/Library/Frameworks"), Path("/System/iOSSupport/System/Library/SubFrameworks"), - Path("/System/iOSSupport/usr/lib"),] + Path("/System/iOSSupport/usr/lib"), + ] let installPath = scope.evaluate(BuiltinMacros.INSTALL_PATH) if table.contains(BuiltinMacros.SKIP_INSTALL) { @@ -2212,8 +2203,7 @@ private class SettingsBuilder { if scope.evaluate(BuiltinMacros.MERGEABLE_LIBRARY) { if scope.evaluate(BuiltinMacros.IS_UNOPTIMIZED_BUILD) { table.push(BuiltinMacros.ADD_MERGEABLE_DEBUG_HOOK, literal: true) - } - else { + } else { table.push(BuiltinMacros.MAKE_MERGEABLE, literal: true) } } @@ -2307,8 +2297,7 @@ private class SettingsBuilder { if target.type == .external { // External targets are special - they only look to see if SKIP_INSTALL is enabled in the target itself, and ignore the value of $(INSTALL_PATH). skipInstall = MacroEvaluationScope(table: config.buildSettings).evaluate(BuiltinMacros.SKIP_INSTALL) - } - else { + } else { skipInstall = scope.evaluate(BuiltinMacros.SKIP_INSTALL) || scope.evaluate(BuiltinMacros.INSTALL_PATH).isEmpty } // If we're skipping the install (or don't have a path to install to), then TARGET_BUILD_DIR is set to an UninstalledProducts location. Note that the external @@ -2337,7 +2326,7 @@ private class SettingsBuilder { // Handle support for not using per-configuration build directories. if !usePerConfigurationBuildLocations { table.push(BuiltinMacros.CONFIGURATION_BUILD_DIR, Static { BuiltinMacros.namespace.parseString("$(BUILD_DIR") }) - table.push(BuiltinMacros.CONFIGURATION_TEMP_DIR, Static { BuiltinMacros.namespace.parseString("$(PROJECT_TEMP_DIR")}) + table.push(BuiltinMacros.CONFIGURATION_TEMP_DIR, Static { BuiltinMacros.namespace.parseString("$(PROJECT_TEMP_DIR") }) } } @@ -2362,7 +2351,7 @@ private class SettingsBuilder { "-Xfrontend", "LiveExecutionResultsLogger", "-F", - "\(platformFrameworksDir)" + "\(platformFrameworksDir)", ] table.push(BuiltinMacros.OTHER_SWIFT_FLAGS, BuiltinMacros.namespace.parseStringList(["$(inherited)"] + otherSwiftFlags)) @@ -2416,7 +2405,8 @@ private class SettingsBuilder { platformTable.push(BuiltinMacros.CORRESPONDING_DEVICE_PLATFORM_NAME, literal: correspondingPlatform.name) platformTable.push(BuiltinMacros.CORRESPONDING_DEVICE_PLATFORM_DIR, literal: correspondingPlatform.path.str) if let correspondingSDKName = correspondingPlatform.sdkCanonicalName, - let correspondingSDK = try? sdkRegistry.lookup(correspondingSDKName, activeRunDestination: parameters.activeRunDestination) { + let correspondingSDK = try? sdkRegistry.lookup(correspondingSDKName, activeRunDestination: parameters.activeRunDestination) + { platformTable.push(BuiltinMacros.CORRESPONDING_DEVICE_SDK_NAME, literal: correspondingSDK.canonicalName) platformTable.push(BuiltinMacros.CORRESPONDING_DEVICE_SDK_DIR, literal: correspondingSDK.path.str) } @@ -2425,17 +2415,20 @@ private class SettingsBuilder { platformTable.push(BuiltinMacros.CORRESPONDING_SIMULATOR_PLATFORM_NAME, literal: correspondingPlatform.name) platformTable.push(BuiltinMacros.CORRESPONDING_SIMULATOR_PLATFORM_DIR, literal: correspondingPlatform.path.str) if let correspondingSDKName = correspondingPlatform.sdkCanonicalName, - let correspondingSDK = try? sdkRegistry.lookup(correspondingSDKName, activeRunDestination: parameters.activeRunDestination) { + let correspondingSDK = try? sdkRegistry.lookup(correspondingSDKName, activeRunDestination: parameters.activeRunDestination) + { platformTable.push(BuiltinMacros.CORRESPONDING_SIMULATOR_SDK_NAME, literal: correspondingSDK.canonicalName) platformTable.push(BuiltinMacros.CORRESPONDING_SIMULATOR_SDK_DIR, literal: correspondingSDK.path.str) } } // Add the default for SUPPORTED_PLATFORMS. - platformTable.push(BuiltinMacros.SUPPORTED_PLATFORMS, literal: core.platformRegistry.platforms.compactMap { + platformTable.push( + BuiltinMacros.SUPPORTED_PLATFORMS, + literal: core.platformRegistry.platforms.compactMap { return $0.familyName == platform.familyName ? $0.name : nil - }) - + } + ) // Add the platform default settings. platformTable.pushContentsOf(platform.defaultSettingsTable) @@ -2841,8 +2834,7 @@ private class SettingsBuilder { let info = buildRequestContext.getCachedMacroConfigFile(path, project: project, context: .baseConfiguration) if let sdk = sdk, settingsContext.purpose.bindToSDK { bindConditionParameters(bindTargetCondition(info.table), sdk) - } - else { + } else { // No bound SDK, so push the project's build settings unmodified. push(bindTargetCondition(info.table), .exported) } @@ -2869,8 +2861,7 @@ private class SettingsBuilder { // Add the project's config settings. if let sdk, settingsContext.purpose.bindToSDK { bindConditionParameters(bindTargetCondition(config.buildSettings), sdk) - } - else { + } else { // No bound SDK, so push the project's build settings unmodified. push(bindTargetCondition(config.buildSettings), .exported) } @@ -2894,8 +2885,7 @@ private class SettingsBuilder { let message = "`\(macro)` is not supported. Remove the build setting and conditionalize `PRODUCT_BUNDLE_IDENTIFIER` instead." if scope.evaluate(BuiltinMacros.__DIAGNOSE_DERIVE_MACCATALYST_PRODUCT_BUNDLE_IDENTIFIER_ERROR) { self.errors.append(message) - } - else { + } else { self.warnings.append(message) } } @@ -2959,14 +2949,14 @@ private class SettingsBuilder { } #if !RC_PLAYGROUNDS - // Xcode version settings. - let xcodeMajorStr = (core.xcodeVersion[0] * 100).toString(format: "%04d") - let xcodeMinorStr = (core.xcodeVersion[0] * 100 + core.xcodeVersion[1] * 10).toString(format: "%04d") - let xcodeActualStr = (core.xcodeVersion[0] * 100 + core.xcodeVersion[1] * 10 + core.xcodeVersion[2]).toString(format: "%04d") - table.push(BuiltinMacros.XCODE_VERSION_MAJOR, literal: xcodeMajorStr) - table.push(BuiltinMacros.XCODE_VERSION_MINOR, literal: xcodeMinorStr) - table.push(BuiltinMacros.XCODE_VERSION_ACTUAL, literal: xcodeActualStr) - table.push(BuiltinMacros.XCODE_PRODUCT_BUILD_VERSION, literal: core.xcodeProductBuildVersionString) + // Xcode version settings. + let xcodeMajorStr = (core.xcodeVersion[0] * 100).toString(format: "%04d") + let xcodeMinorStr = (core.xcodeVersion[0] * 100 + core.xcodeVersion[1] * 10).toString(format: "%04d") + let xcodeActualStr = (core.xcodeVersion[0] * 100 + core.xcodeVersion[1] * 10 + core.xcodeVersion[2]).toString(format: "%04d") + table.push(BuiltinMacros.XCODE_VERSION_MAJOR, literal: xcodeMajorStr) + table.push(BuiltinMacros.XCODE_VERSION_MINOR, literal: xcodeMinorStr) + table.push(BuiltinMacros.XCODE_VERSION_ACTUAL, literal: xcodeActualStr) + table.push(BuiltinMacros.XCODE_PRODUCT_BUILD_VERSION, literal: core.xcodeProductBuildVersionString) #endif // Backward compatibility settings. @@ -3048,8 +3038,7 @@ private class SettingsBuilder { let info = buildRequestContext.getCachedMacroConfigFile(path, project: project, context: .baseConfiguration) if let sdk = sdk, settingsContext.purpose.bindToSDK { bindConditionParameters(bindTargetCondition(info.table), sdk) - } - else { + } else { // No bound SDK, so push the target xcconfig's build settings unmodified. push(bindTargetCondition(info.table), .exported) } @@ -3070,8 +3059,7 @@ private class SettingsBuilder { // FIXME: Cache this table, but we can only do that once we share the namespace. if let sdk, settingsContext.purpose.bindToSDK { bindConditionParameters(bindTargetCondition(config.buildSettings), sdk) - } - else { + } else { // No bound SDK, so push the target's build settings unmodified. push(bindTargetCondition(config.buildSettings), .exported) } @@ -3191,8 +3179,7 @@ private class SettingsBuilder { if let toolchain = core.toolchainRegistry.lookup(toolchainIdentifier), !toolchain.overrideSettings.isEmpty { do { push(try userNamespace.parseTable(toolchain.overrideSettings, allowUserDefined: true), .exported) - } - catch { + } catch { self.errors.append("unable to create override build settings table from toolchain `\(toolchainIdentifier)`") } } @@ -3412,8 +3399,7 @@ private class SettingsBuilder { BuiltinMacros.EXECUTABLE_DEBUG_DYLIB_MAPPED_PLATFORM, table.namespace.parseLiteralString("\(appleLDPreviousPlatform)") ) - } - else { + } else { table.push( BuiltinMacros.EXECUTABLE_DEBUG_DYLIB_INSTALL_NAME, table.namespace.parseString("@rpath/$(EXECUTABLE_NAME).debug.dylib") @@ -3428,9 +3414,9 @@ private class SettingsBuilder { } if isExecutableProduct, - scope.evaluate(BuiltinMacros.ENABLE_HARDENED_RUNTIME), - // Check for ad-hoc - scope.evaluate(BuiltinMacros.CODE_SIGN_IDENTITY) == "-" + scope.evaluate(BuiltinMacros.ENABLE_HARDENED_RUNTIME), + // Check for ad-hoc + scope.evaluate(BuiltinMacros.CODE_SIGN_IDENTITY) == "-" { // Hardened runtime was enabled with ad-hoc codesigning. This is not compatible // with debug dylib mode so we'll ignore it and emit a note. @@ -3446,7 +3432,7 @@ private class SettingsBuilder { } static func targetSupportedPlatforms(scope: MacroEvaluationScope, core: Core, runDestinationPlatform: Platform, emitWarning: (String) -> Void = { _ in }) -> [Platform] { - let targetSupportedPlatforms = scope.evaluate(BuiltinMacros.SUPPORTED_PLATFORMS).compactMap{ core.platformRegistry.lookup(name: $0) } + let targetSupportedPlatforms = scope.evaluate(BuiltinMacros.SUPPORTED_PLATFORMS).compactMap { core.platformRegistry.lookup(name: $0) } // Warn if we couldn't find a supported platform for the given list. if targetSupportedPlatforms.isEmpty { @@ -3467,7 +3453,7 @@ private class SettingsBuilder { if Settings.targetPlatformSpecializationEnabled(scope: scope) && sdk != nil { return } - } catch { /* fallthrough */ } + } catch { /* fallthrough */ } // Destination info: since runDestination.{platform,sdk} were set by the IDE, we expect them to resolve in Swift Build correctly guard let runDestination = self.parameters.activeRunDestination else { return } @@ -3544,8 +3530,7 @@ private class SettingsBuilder { } else { requiredSDKCanonicalName = destinationSDK.canonicalName } - } - else if destinationPlatform.isSimulator, targetPlatform == nil || (targetPlatform !== destinationPlatform && targetPlatform?.familyName == destinationPlatform.familyName) { + } else if destinationPlatform.isSimulator, targetPlatform == nil || (targetPlatform !== destinationPlatform && targetPlatform?.familyName == destinationPlatform.familyName) { // Simulator: If the target specifies an SDK for a platform in the destination platform family, use the equivalent SDK for the destination platform. For example, if the target specifies iphoneos4.2 as its SDK, use iphonesimulator4.2 if such an SDK exists. let targetSuffix = targetSDK.canonicalNameSuffix?.nilIfEmpty.map { ".\($0)" } ?? "" let candidates: [String] = [ @@ -3558,16 +3543,13 @@ private class SettingsBuilder { let resolvedCandidates = candidates.map { ($0, try? sdkRegistry.lookup($0, activeRunDestination: runDestination)?.canonicalName) } requiredSDKCanonicalName = resolvedCandidates.compactMap { $0.1 }.first - } - else if (destinationPlatformIsMacOS || destinationPlatformIsLinux || destinationPlatform.isSimulator) && targetPlatform === destinationPlatform { + } else if (destinationPlatformIsMacOS || destinationPlatformIsLinux || destinationPlatform.isSimulator) && targetPlatform === destinationPlatform { // If the target specifies an SDK for the destination platform, don't override its choice of SDK. - } - else { + } else { // The target specifies an SDK not for the destination platform, but claims to support the destination platform. requiredSDKCanonicalName = getLatestSDKCanonicalName(for: destinationPlatform) } - } - else if destinationPlatformIsDeviceOrSimulator { + } else if destinationPlatformIsDeviceOrSimulator { // For multiplatform builds, we want to ensure consistency between simulator- vs non-simulator environments, so if this target supports a platform that matches the simulator-ness of the destination platform, then use it. let isDeployment = destinationPlatform.isDeploymentPlatform @@ -3618,7 +3600,8 @@ private class SettingsBuilder { func pushHostTargetPlatformSettingsIfNeeded(for runDestination: RunDestinationInfo, to scope: MacroEvaluationScope) { if let hostTargetedPlatform = runDestination.hostTargetedPlatform, - scope.evaluate(BuiltinMacros.SUPPORTED_HOST_TARGETED_PLATFORMS).contains(hostTargetedPlatform) { + scope.evaluate(BuiltinMacros.SUPPORTED_HOST_TARGETED_PLATFORMS).contains(hostTargetedPlatform) + { pushTable(.exported) { $0.push(BuiltinMacros.HOST_TARGETED_PLATFORM_NAME, literal: hostTargetedPlatform) } } } @@ -3654,9 +3637,11 @@ private class SettingsBuilder { BuiltinMacros.PROVISIONING_PROFILE_SPECIFIER, ]) - targetDiagnostics.append(contentsOf: inputs.warnings.map { - Diagnostic(behavior: .warning, location: signingDiagnosticsLocation, data: DiagnosticData($0)) - }) + targetDiagnostics.append( + contentsOf: inputs.warnings.map { + Diagnostic(behavior: .warning, location: signingDiagnosticsLocation, data: DiagnosticData($0)) + } + ) let errors = inputs.errors.map { error -> String in var errorString = error.description @@ -3670,9 +3655,11 @@ private class SettingsBuilder { return errorString } - targetDiagnostics.append(contentsOf: errors.map { - Diagnostic(behavior: UserDefaults.disableSigningProvisioningErrors ? .warning : .error, location: signingDiagnosticsLocation, data: DiagnosticData($0)) - }) + targetDiagnostics.append( + contentsOf: errors.map { + Diagnostic(behavior: UserDefaults.disableSigningProvisioningErrors ? .warning : .error, location: signingDiagnosticsLocation, data: DiagnosticData($0)) + } + ) if !errors.isEmpty { return @@ -3713,8 +3700,7 @@ private class SettingsBuilder { identityName = "Ad Hoc" warnings.append("\(target.name) isn't code signed but requires entitlements. Falling back to ad hoc signing.") - } - else if !platformRequiresEntitlements { + } else if !platformRequiresEntitlements { // Warn the user when code signing is disabled, is not required, but they are specifying entitlements that cannot be used without code signing. warnings.append("\(target.name) isn't code signed but requires entitlements. It is not possible to add entitlements to a binary without signing it.") } @@ -3828,8 +3814,9 @@ private class SettingsBuilder { table.push(BuiltinMacros.INDEX_DIRECTORY_REMAP_VFS_FILE, Static { BuiltinMacros.namespace.parseString("$(OBJROOT)/index-overlay.yaml") }) if let arena = parameters.arena, - let productsPath = arena.indexRegularBuildProductsPath, - let intermediatesPath = arena.indexRegularBuildIntermediatesPath { + let productsPath = arena.indexRegularBuildProductsPath, + let intermediatesPath = arena.indexRegularBuildIntermediatesPath + { table.push(BuiltinMacros.INDEX_REGULAR_BUILD_PRODUCTS_DIR, BuiltinMacros.namespace.parseString(productsPath.str)) table.push(BuiltinMacros.INDEX_REGULAR_BUILD_INTERMEDIATES_DIR, BuiltinMacros.namespace.parseString(intermediatesPath.str)) } @@ -3886,10 +3873,12 @@ private class SettingsBuilder { table.push(BuiltinMacros.DERIVED_DATA_DIR, BuiltinMacros.namespace.parseString(derivedDataPath!.str)) if let arena = parameters.arena { - for (macro,path) in [(BuiltinMacros.SYMROOT, arena.buildProductsPath), - (BuiltinMacros.OBJROOT, arena.buildIntermediatesPath), - (BuiltinMacros.SHARED_PRECOMPS_DIR, arena.pchPath), - (BuiltinMacros.INDEX_PRECOMPS_DIR, arena.indexPCHPath)] { + for (macro, path) in [ + (BuiltinMacros.SYMROOT, arena.buildProductsPath), + (BuiltinMacros.OBJROOT, arena.buildIntermediatesPath), + (BuiltinMacros.SHARED_PRECOMPS_DIR, arena.pchPath), + (BuiltinMacros.INDEX_PRECOMPS_DIR, arena.indexPCHPath), + ] { if !path.isEmpty { table.push(macro, BuiltinMacros.namespace.parseString(path.str)) } @@ -4054,7 +4043,8 @@ private class SettingsBuilder { // contains only a single architecture, use that. This is the code path for generic run destinations, // where the supportedArchitectures should not be treated as priority-ordered. if activeRunDestination.disableOnlyActiveArch, - let supportedArch = Set(activeRunDestination.supportedArchitectures).intersection(archsSet).only { + let supportedArch = Set(activeRunDestination.supportedArchitectures).intersection(archsSet).only + { return supportedArch } @@ -4062,7 +4052,8 @@ private class SettingsBuilder { // which is also in the proposed archs list. This is the code path for concrete run destinations, // where the supportedArchitectures should be treated as priority-ordered. if !activeRunDestination.disableOnlyActiveArch, - let supportedArch = activeRunDestination.supportedArchitectures.first(where: archsSet.contains) { + let supportedArch = activeRunDestination.supportedArchitectures.first(where: archsSet.contains) + { return supportedArch } @@ -4071,7 +4062,8 @@ private class SettingsBuilder { // Have a matching compatible architecture, so use that (eg. arm64e when the run destination is // arm64). if let compatibleArchs = compatibilityArchMap[activeRunDestination.targetArchitecture], - let compatibleArch = Set(compatibleArchs).intersection(archsSet).only { + let compatibleArch = Set(compatibleArchs).intersection(archsSet).only + { return compatibleArch } @@ -4118,16 +4110,13 @@ private class SettingsBuilder { if spec.deprecatedError { errors.append(deprecatedArchMessage) return false - } - else if !supportedArchDeploymentTarget && spec.errorOutsideDeploymentTargetRange { + } else if !supportedArchDeploymentTarget && spec.errorOutsideDeploymentTargetRange { errors.append(deprecatedArchDeploymentTargetMessage) return false - } - else if spec.deprecated { + } else if spec.deprecated { warnings.append(deprecatedArchMessage) return true - } - else if !supportedArchDeploymentTarget { + } else if !supportedArchDeploymentTarget { warnings.append(deprecatedArchDeploymentTargetMessage) return true } @@ -4151,11 +4140,14 @@ private class SettingsBuilder { // Detect discouraged overrides of SWIFT_PLATFORM_TARGET_PREFIX and use this as a signal to suppress // module only architectures let tripleOverridesApplied = scope.evaluate(BuiltinMacros.SWIFT_PLATFORM_TARGET_PREFIX) != scope.evaluate(BuiltinMacros.__ORIGINAL_SDK_DEFINED_LLVM_TARGET_TRIPLE_SYS) - let moduleOnlyArchs = (onlyActiveArchApplied || tripleOverridesApplied) ? [] : originalModuleOnlyArchs - .filter { validArchs.contains($0) } - .filter { !excludedArchs.contains($0) } - .filter { !effectiveArchs.contains($0) } - .removingDuplicates() + let moduleOnlyArchs = + (onlyActiveArchApplied || tripleOverridesApplied) + ? [] + : originalModuleOnlyArchs + .filter { validArchs.contains($0) } + .filter { !excludedArchs.contains($0) } + .filter { !effectiveArchs.contains($0) } + .removingDuplicates() table.push(BuiltinMacros.__SWIFT_MODULE_ONLY_ARCHS__, literal: originalModuleOnlyArchs) table.push(BuiltinMacros.SWIFT_MODULE_ONLY_ARCHS, literal: moduleOnlyArchs) @@ -4176,13 +4168,12 @@ private class SettingsBuilder { BuiltinMacros.CCHROOT, BuiltinMacros.CONFIGURATION_BUILD_DIR, BuiltinMacros.SHARED_PRECOMPS_DIR, BuiltinMacros.CONFIGURATION_TEMP_DIR, BuiltinMacros.TARGET_TEMP_DIR, BuiltinMacros.TEMP_DIR, - BuiltinMacros.PROJECT_DIR, BuiltinMacros.BUILT_PRODUCTS_DIR + BuiltinMacros.PROJECT_DIR, BuiltinMacros.BUILT_PRODUCTS_DIR, ] for macro in macrosToNormalize { table.push(macro, literal: (project?.sourceRoot ?? workspaceContext.workspace.path.dirname).join(scope.evaluate(macro), normalize: true).str) } - // FIXME: Xcode also normalizes SDKROOT to an absolute path here, although native targets also do this (in a different place). // Compute the resolved value for GCC_VERSION, if not otherwise set. @@ -4235,9 +4226,12 @@ private class SettingsBuilder { let shellCodec: any CommandSequenceEncodable = UNIXShellCommandCodec(encodingStrategy: .backslashes, encodingBehavior: .argumentsOnly) - table.push(BuiltinMacros.ALL_SETTINGS, literal: macros.map({ macro in - "\(macro)=" + shellCodec.encode([scope.evaluate(scope.namespace.parseString("$\(macro)"))]) - })) + table.push( + BuiltinMacros.ALL_SETTINGS, + literal: macros.map({ macro in + "\(macro)=" + shellCodec.encode([scope.evaluate(scope.namespace.parseString("$\(macro)"))]) + }) + ) } // If testability is enabled, then that overrides certain other settings, and in a way that the user cannot override: They're either using testability, or they're not. @@ -4320,7 +4314,7 @@ private class SettingsBuilder { let toolchainPath = Path(scope.evaluateAsString(BuiltinMacros.TOOLCHAIN_DIR)) guard let toolchain = core.toolchainRegistry.toolchains.first(where: { $0.path == toolchainPath }), - let defaultToolchain = core.toolchainRegistry.defaultToolchain + let defaultToolchain = core.toolchainRegistry.defaultToolchain else { return [] } @@ -4363,7 +4357,8 @@ private class SettingsBuilder { // Ensure only a single variant is set if we're in an index build - either the first from `BUILD_VARIANTS` or `INDEX_BUILD_VARIANT` if it's set. var variants: [String] = scope.evaluate(BuiltinMacros.BUILD_VARIANTS) if parameters.action == .indexBuild, - let firstVariant = variants.first { + let firstVariant = variants.first + { let indexVariant = scope.evaluate(BuiltinMacros.INDEX_BUILD_VARIANT) if indexVariant.isEmpty || !variants.contains(indexVariant) { variants = [firstVariant] @@ -4428,8 +4423,7 @@ private class SettingsBuilder { let allSDKs: [SDK] if let baseSDK { allSDKs = [baseSDK].appending(contentsOf: sparseSDKs) - } - else { + } else { allSDKs = sparseSDKs } // Collect all the macros and SDKs into a map of [macro: [(macro, sdk)] entries, so we know if there are any collisions. @@ -4533,8 +4527,7 @@ private class SettingsBuilder { } // If any sanitizer is enabled, and this product type has a runpath to its Frameworks directory defined, then we want to add that path to the runpath search path if it's not already present. - if scope.evaluate(BuiltinMacros.ENABLE_ADDRESS_SANITIZER) || scope.evaluate(BuiltinMacros.ENABLE_THREAD_SANITIZER) || scope.evaluate(BuiltinMacros.ENABLE_UNDEFINED_BEHAVIOR_SANITIZER) - { + if scope.evaluate(BuiltinMacros.ENABLE_ADDRESS_SANITIZER) || scope.evaluate(BuiltinMacros.ENABLE_THREAD_SANITIZER) || scope.evaluate(BuiltinMacros.ENABLE_UNDEFINED_BEHAVIOR_SANITIZER) { if let frameworksRunpath = productType?.frameworksRunpathSearchPath(in: scope)?.str { if !scope.evaluate(BuiltinMacros.LD_RUNPATH_SEARCH_PATHS).contains(frameworksRunpath) { tableSet.push(.none, BuiltinMacros.LD_RUNPATH_SEARCH_PATHS, BuiltinMacros.namespace.parseStringList(["$(inherited)", frameworksRunpath])) @@ -4560,9 +4553,9 @@ private class SettingsBuilder { if scope.evaluateAsString(macro).isEmpty || scope.evaluate(macro) { switch searchPathMacro { case BuiltinMacros.HEADER_SEARCH_PATHS: - tableSet.push(.none, searchPathMacro, Static { BuiltinMacros.namespace.parseStringList(["$(BUILT_PRODUCTS_DIR)/include", "$(inherited)"]) } ) + tableSet.push(.none, searchPathMacro, Static { BuiltinMacros.namespace.parseStringList(["$(BUILT_PRODUCTS_DIR)/include", "$(inherited)"]) }) default: - tableSet.push(.none, searchPathMacro, Static { BuiltinMacros.namespace.parseStringList(["$(BUILT_PRODUCTS_DIR)", "$(inherited)"]) } ) + tableSet.push(.none, searchPathMacro, Static { BuiltinMacros.namespace.parseStringList(["$(BUILT_PRODUCTS_DIR)", "$(inherited)"]) }) } } } @@ -4603,7 +4596,7 @@ private class SettingsBuilder { /// Helper function to process the SDK remapping for a specific `variant` and `arch`. func processRemapping(_ scope: MacroEvaluationScope, _ originalValues: [Path], variant: String? = nil, arch: String? = nil) { // Get the evaluated paths for this search path build setting, and remap them into the SDKs as appropriate. - let values = originalValues.flatMap{ (path: Path) -> [Path] in + let values = originalValues.flatMap { (path: Path) -> [Path] in var results = [Path]() // Remap into the sparse SDKs. This effectively adds search paths into the sparse SDKs. @@ -4646,7 +4639,7 @@ private class SettingsBuilder { MacroCondition(parameter: BuiltinMacros.archCondition, valuePattern: arch), ]) } - table.push(macro, literal: values.map{ $0.str }, conditions: conditions) + table.push(macro, literal: values.map { $0.str }, conditions: conditions) } } @@ -4669,13 +4662,13 @@ private class SettingsBuilder { for arch in archs { let scope = scope.subscopeBindingArchAndTriple(arch: arch) let originalValues = scope.evaluate(macro) - processRemapping(scope, originalValues.map{ Path($0) }, variant: variant, arch: arch) + processRemapping(scope, originalValues.map { Path($0) }, variant: variant, arch: arch) } } } else { // If CURRENT_* macros were not found while evaluating the macro, then use the original values already parsed // to process the remapping. - processRemapping(scope, originalValues.map{ Path($0) }) + processRemapping(scope, originalValues.map { Path($0) }) } } @@ -4738,8 +4731,7 @@ private class SettingsBuilder { let assignedSDKVariantDeploymentTarget: Version if let deploymentTarget = try? Version(assignedSDKVariantDeploymentTargetString) { assignedSDKVariantDeploymentTarget = deploymentTarget - } - else { + } else { self.notes.append("The \(buildTarget) deployment target '\(sdkVariantDeploymentTargetMacro!.name)' is set to '\(assignedSDKVariantDeploymentTargetString)' - setting to default value '\(defaultSDKVariantDeploymentTarget.description)'.") assignedSDKVariantDeploymentTarget = defaultSDKVariantDeploymentTarget } @@ -4777,8 +4769,7 @@ private class SettingsBuilder { getMacOSDeploymentTargetFromIOSDeploymentTarget = true } // If macOS deployment target is already defined, we do not check it against a macCatalyst-specific minimum value. Building a macCatalyst target with a macOS deployment target earlier than the release in which macCatalyst first shipped (10.15) is allowed for zippered products. - } - else { + } else { getMacOSDeploymentTargetFromIOSDeploymentTarget = true } if getMacOSDeploymentTargetFromIOSDeploymentTarget { @@ -4787,8 +4778,7 @@ private class SettingsBuilder { platformDeploymentTarget = macOSDeploymentTarget table.push(BuiltinMacros.MACOSX_DEPLOYMENT_TARGET, literal: macOSDeploymentTarget.description) } - } - else { + } else { sdkVariantDeploymentTargetMacro = nil sdkVariantDeploymentTarget = nil @@ -4805,8 +4795,7 @@ private class SettingsBuilder { if let macOSDeploymentTarget = platformDeploymentTarget { // 13.0 is a backstop in case anything isn't defined here. candidateiOSDeploymentTarget = sdk?.versionMap["macOS_iOSMac"]?[macOSDeploymentTarget] ?? Version(13, 0) - } - else { + } else { candidateiOSDeploymentTarget = Version(13, 0) } } @@ -4865,7 +4854,7 @@ private class SettingsBuilder { for setting in [ // We should move the check for ONLY_ACTIVE_ARCH here from getCommonTargetTaskOverrides(). //BuiltinMacros.ONLY_ACTIVE_ARCH, - BuiltinMacros.VALID_ARCHS, + BuiltinMacros.VALID_ARCHS ] { let definedAtLevels = allProjectSettingsLevels.compactMap { settings in settings.table?.contains(setting) == true ? settings.level : nil @@ -4930,7 +4919,7 @@ private class SettingsBuilder { // Check settings which should either not be defined at all, or which we warn if defined to a specific value. for (setting, values, explanation): (MacroDeclaration, [String]?, String?) in [ // We no longer ship libstdc++ so we warn about it specifically. c.f. - (BuiltinMacros.CLANG_CXX_LIBRARY, ["libstdc++"], "The 'libstdc++' C++ Standard Library is no longer available, and this setting can be removed."), + (BuiltinMacros.CLANG_CXX_LIBRARY, ["libstdc++"], "The 'libstdc++' C++ Standard Library is no longer available, and this setting can be removed.") ] { if let values, !values.isEmpty { // If values is not empty, then we only emit warnings is we detect that the setting is defined to one of the given values. @@ -4942,8 +4931,7 @@ private class SettingsBuilder { break } } - } - else { + } else { // If values is empty then we emit a warning if the setting is defined at all. let assignedValue = scope.evaluateAsString(setting) if !assignedValue.isEmpty { @@ -4984,8 +4972,8 @@ private class SettingsBuilder { // .append because this setting is commonly used to work around rdar://73504582 (BuiltinMacros.PRODUCT_SPECIFIC_LDFLAGS, .append), - (BuiltinMacros.__108704016_DEVELOPER_TOOLCHAIN_DIR_MISUSE_IS_WARNING, .none), // don't allow this to be overridden at the project level - (BuiltinMacros.RESCHEDULE_INDEPENDENT_HEADERS_PHASES, .none), // don't allow this to be overridden at the project level + (BuiltinMacros.__108704016_DEVELOPER_TOOLCHAIN_DIR_MISUSE_IS_WARNING, .none), // don't allow this to be overridden at the project level + (BuiltinMacros.RESCHEDULE_INDEPENDENT_HEADERS_PHASES, .none), // don't allow this to be overridden at the project level ] as [(MacroDeclaration, SettingMutability)] { let definedAtLevels = allProjectSettingsLevels.compactMap { settings -> String? in guard let assignment = settings.table?.valueAssignments[setting] else { @@ -5125,8 +5113,9 @@ private class SettingsBuilder { let paths: [Path] = buildFiles.compactMap { buildFile in // Considering only file references. guard case let .reference(guid) = buildFile.buildableItem, - let reference = workspaceContext.workspace.lookupReference(for: guid), - let fileRef = reference as? FileReference else { + let reference = workspaceContext.workspace.lookupReference(for: guid), + let fileRef = reference as? FileReference + else { return nil } @@ -5198,9 +5187,11 @@ private class SettingsBuilder { // when they have certain localization file extensions but not others. // A pattern prefix here is a pattern up to but not including the file extension suffix. - var fileExtensionsToMatchingPatternPrefixes: [String: Set] = Dictionary(uniqueKeysWithValues: locFileExtensions.map({ locExtension in - return (locExtension, []) - })) + var fileExtensionsToMatchingPatternPrefixes: [String: Set] = Dictionary( + uniqueKeysWithValues: locFileExtensions.map({ locExtension in + return (locExtension, []) + }) + ) for pattern in patterns { guard let fileExtensionDotIndex = indexOfFileExtensionDot(inPattern: pattern) else { continue @@ -5298,17 +5289,25 @@ extension BuildConfiguration { } extension StandardTarget { - public func linksAnyFramework(names: [String], in scope: MacroEvaluationScope, workspaceContext: WorkspaceContext, specLookupContext: any SpecLookupContext, boundSettings: [StringListMacroDeclaration:[String]], filePathResolver: FilePathResolver) -> Bool { + public func linksAnyFramework(names: [String], in scope: MacroEvaluationScope, workspaceContext: WorkspaceContext, specLookupContext: any SpecLookupContext, boundSettings: [StringListMacroDeclaration: [String]], filePathResolver: FilePathResolver) -> Bool { // Look for an explicit reference to any of the frameworks in the target's linked frameworks let frameworkNames = names.map({ return "\($0).framework" }) if let frameworkFileType = specLookupContext.lookupFileType(identifier: "wrapper.framework"), let buildPhase = frameworksBuildPhase, - buildPhase.containsFiles(ofType: frameworkFileType, workspaceContext.workspace, specLookupContext, scope, filePathResolver, { fileRef in - if let basename = fileRef.path.asLiteralString.map(Path.init)?.basename { - return frameworkNames.contains(basename) + buildPhase.containsFiles( + ofType: frameworkFileType, + workspaceContext.workspace, + specLookupContext, + scope, + filePathResolver, + { fileRef in + if let basename = fileRef.path.asLiteralString.map(Path.init)?.basename { + return frameworkNames.contains(basename) + } + return false } - return false - }) { + ) + { return true } diff --git a/Sources/SWBCore/Settings/StackedSearchPaths.swift b/Sources/SWBCore/Settings/StackedSearchPaths.swift index ebde1977..3c5b5866 100644 --- a/Sources/SWBCore/Settings/StackedSearchPaths.swift +++ b/Sources/SWBCore/Settings/StackedSearchPaths.swift @@ -103,7 +103,7 @@ extension StackedSearchPathLookupError: CustomStringConvertible { public var description: String { switch self { case let .unableToFind(subject, operatingSystem, searchPaths): - let candidates = searchPaths.flatMap { $0.paths.map { $0.join(subject.fileName(operatingSystem: operatingSystem)).str }} + let candidates = searchPaths.flatMap { $0.paths.map { $0.join(subject.fileName(operatingSystem: operatingSystem)).str } } return "unable to find \(subject.fileName(operatingSystem: operatingSystem).str) among search paths: \(candidates.joined(separator: ", "))" } } diff --git a/Sources/SWBCore/ShellScript.swift b/Sources/SWBCore/ShellScript.swift index 2c42f82f..66cc8c49 100644 --- a/Sources/SWBCore/ShellScript.swift +++ b/Sources/SWBCore/ShellScript.swift @@ -136,7 +136,8 @@ public func computeScriptEnvironment(_ type: ScriptType, scope: MacroEvaluationS BuiltinMacros.INSTALL_OWNER, BuiltinMacros.INSTALL_GROUP, BuiltinMacros.INSTALL_MODE_FLAG, BuiltinMacros.DEVELOPMENT_LANGUAGE, - BuiltinMacros.FRAMEWORK_VERSION] + BuiltinMacros.FRAMEWORK_VERSION, + ] for macro in alwaysPresentStringMacros { result[macro.name] = scope.evaluate(macro) } @@ -149,7 +150,8 @@ public func computeScriptEnvironment(_ type: ScriptType, scope: MacroEvaluationS BuiltinMacros.TEMP_FILE_DIR, BuiltinMacros.DERIVED_FILES_DIR, BuiltinMacros.TARGET_BUILD_DIR, - BuiltinMacros.BUILT_PRODUCTS_DIR] + BuiltinMacros.BUILT_PRODUCTS_DIR, + ] for macro in alwaysPresentPathMacros { result[macro.name] = scope.evaluate(macro).str } diff --git a/Sources/SWBCore/SigningSupport.swift b/Sources/SWBCore/SigningSupport.swift index 68921059..4c97527e 100644 --- a/Sources/SWBCore/SigningSupport.swift +++ b/Sources/SWBCore/SigningSupport.swift @@ -21,8 +21,7 @@ public enum EntitlementsVariant: Int, Serializable, Sendable { } /// Provides contextual behavior for code signing based on the type of platform being targeted. -public protocol PlatformSigningContext: Sendable -{ +public protocol PlatformSigningContext: Sendable { func adHocSigningAllowed(_ scope: MacroEvaluationScope) -> Bool func useAdHocSigningIfSigningIsRequiredButNotSpecified(_ scope: MacroEvaluationScope) -> Bool @@ -34,30 +33,25 @@ public protocol PlatformSigningContext: Sendable func supportsAppSandboxAndHardenedRuntime() -> Bool } -extension PlatformSigningContext -{ +extension PlatformSigningContext { /// Returns `true` is ad hoc signing is allowed for the platform. /// /// This is only in the signing context because some contexts want to call it from `useAdHocSigningIfSigningIsRequiredButNotSpecified()`. - @_spi(Testing) public func adHocSigningAllowed(_ scope: MacroEvaluationScope) -> Bool - { + @_spi(Testing) public func adHocSigningAllowed(_ scope: MacroEvaluationScope) -> Bool { // Yup, amazingly AD_HOC_CODE_SIGNING_ALLOWED is considered to be true if either it is defined to be true *or* if it is empty. It has to be explicitly false to be false. I infer this is for compatibility reasons so the platforms or SDKs don't need to be mass-revved. return scope.evaluateAsString(BuiltinMacros.AD_HOC_CODE_SIGNING_ALLOWED).isEmpty || scope.evaluate(BuiltinMacros.AD_HOC_CODE_SIGNING_ALLOWED) } /// Returns `true` if ad hoc signing should be used if signing is required but no signing identity is provided. - @_spi(Testing) public func useAdHocSigningIfSigningIsRequiredButNotSpecified(_ scope: MacroEvaluationScope) -> Bool - { + @_spi(Testing) public func useAdHocSigningIfSigningIsRequiredButNotSpecified(_ scope: MacroEvaluationScope) -> Bool { return false } - @_spi(Testing) public func shouldPassEntitlementsFileContentToCodeSign() -> Bool - { + @_spi(Testing) public func shouldPassEntitlementsFileContentToCodeSign() -> Bool { return true } - @_spi(Testing) public func requiresEntitlements(_ scope: MacroEvaluationScope, hasProfile: Bool, productFileType: FileTypeSpec) -> Bool - { + @_spi(Testing) public func requiresEntitlements(_ scope: MacroEvaluationScope, hasProfile: Bool, productFileType: FileTypeSpec) -> Bool { return hasProfile || scope.evaluate(BuiltinMacros.ENTITLEMENTS_REQUIRED) } @@ -66,45 +60,34 @@ extension PlatformSigningContext } } - /// Provides behavior for code signing for the macOS platform. -@_spi(Testing) public struct MacSigningContext: PlatformSigningContext -{ +@_spi(Testing) public struct MacSigningContext: PlatformSigningContext { @_spi(Testing) public func supportsAppSandboxAndHardenedRuntime() -> Bool { return true } } - /// Provides behavior for code signing for device platforms. -@_spi(Testing) public struct DeviceSigningContext: PlatformSigningContext -{ - @_spi(Testing) public func useAdHocSigningIfSigningIsRequiredButNotSpecified(_ scope: MacroEvaluationScope) -> Bool - { +@_spi(Testing) public struct DeviceSigningContext: PlatformSigningContext { + @_spi(Testing) public func useAdHocSigningIfSigningIsRequiredButNotSpecified(_ scope: MacroEvaluationScope) -> Bool { return adHocSigningAllowed(scope) } - @_spi(Testing) public func requiresEntitlements(_ scope: MacroEvaluationScope, hasProfile: Bool, productFileType: FileTypeSpec) -> Bool - { + @_spi(Testing) public func requiresEntitlements(_ scope: MacroEvaluationScope, hasProfile: Bool, productFileType: FileTypeSpec) -> Bool { // Entitlements are only required if what we're signing is not a framework. return productFileType.isFramework ? false : (hasProfile || scope.evaluate(BuiltinMacros.ENTITLEMENTS_REQUIRED)) } } - /// Provides behavior for code signing for simulator platforms. -@_spi(Testing) public struct SimulatorSigningContext: PlatformSigningContext -{ - @_spi(Testing) public func shouldPassEntitlementsFileContentToCodeSign() -> Bool - { +@_spi(Testing) public struct SimulatorSigningContext: PlatformSigningContext { + @_spi(Testing) public func shouldPassEntitlementsFileContentToCodeSign() -> Bool { // We don't want to codesign with entitlements because we put them in the LD_ENTITLEMENTS_SECTION. return false } - @_spi(Testing) public func requiresEntitlements(_ scope: MacroEvaluationScope, hasProfile: Bool, productFileType: FileTypeSpec) -> Bool - { + @_spi(Testing) public func requiresEntitlements(_ scope: MacroEvaluationScope, hasProfile: Bool, productFileType: FileTypeSpec) -> Bool { // We don't need entitlements when building for the simulator. return false } } - diff --git a/Sources/SWBCore/SpecImplementations/CommandLineToolSpec.swift b/Sources/SWBCore/SpecImplementations/CommandLineToolSpec.swift index cee454e4..afb4c153 100644 --- a/Sources/SWBCore/SpecImplementations/CommandLineToolSpec.swift +++ b/Sources/SWBCore/SpecImplementations/CommandLineToolSpec.swift @@ -36,7 +36,7 @@ public protocol InputFileGroupable: RegionVariable { var regionVariantName: String? { get } } -extension FileToBuild: InputFileGroupable { } +extension FileToBuild: InputFileGroupable {} public protocol InputFileGroupingStrategyFactory: Sendable { func makeStrategy(specIdentifier: String) -> any InputFileGroupingStrategy @@ -62,7 +62,7 @@ public extension InputFileGroupingStrategy { } /// A grouping strategy that groups all files in a build phase which match a given build rule into the same group. For example, all files to be processed by the Swift compiler in a build phase will be passed to a single invocation of the compiler. -@_spi(Testing) public final class AllInputFilesGroupingStrategy : InputFileGroupingStrategy, Encodable { +@_spi(Testing) public final class AllInputFilesGroupingStrategy: InputFileGroupingStrategy, Encodable { /// Group identifier that’s returned for every path. let groupIdentifier: String @@ -78,7 +78,7 @@ public extension InputFileGroupingStrategy { } /// A grouping strategy that groups all files in a build phase with the same filename base into a single invocation of the tool, but files with different bases will be passed to different invocations. -@_spi(Testing) public final class CommonFileBaseInputFileGroupingStrategy : InputFileGroupingStrategy { +@_spi(Testing) public final class CommonFileBaseInputFileGroupingStrategy: InputFileGroupingStrategy { /// Name of the tool to which the grouping strategy belongs (used as a part of the returned group identifier). let toolName: String @@ -99,7 +99,6 @@ protocol DependencyInfoEditableTaskPayload: TaskPayload { // MARK: - /// A class that adopts this protocol can be used to collect information for creating tasks for a given command line tool spec, e.g. information from elsewhere in the build phase or target which is not local to the input files of the task being created. public protocol BuildPhaseInfoForToolSpec: AnyObject, Sendable { // Certainly other parameters can be added here, or ways to collect broader information than just on individual files, but the initial implementation only covers what it was needed for. @@ -108,7 +107,6 @@ public protocol BuildPhaseInfoForToolSpec: AnyObject, Sendable { func addToContext(_ ftb: FileToBuild) } - /// Discovered info about a command line tool spec. public protocol DiscoveredCommandLineToolSpecInfo: BuildOptionGenerationContext, Sendable { } @@ -248,14 +246,14 @@ extension DiscoveredCommandLineToolSpecInfo { } return try await producer.discoveredCommandLineToolSpecInfo(delegate, toolPath.basename, ["/usr/bin/what", "-q", toolPath.str]) { executionResult in let outputString = String(decoding: executionResult.stdout, as: UTF8.self).trimmingCharacters(in: .whitespacesAndNewlines) - let lines = Set(outputString.split(separator: "\n").map(String.init)) // version info is printed once per architecture slice, but we never expect them to differ + let lines = Set(outputString.split(separator: "\n").map(String.init)) // version info is printed once per architecture slice, but we never expect them to differ return try construct(AppleGenericVersionInfo(string: lines.only ?? outputString)) } } } -open class CommandLineToolSpec : PropertyDomainSpec, SpecType, TaskTypeDescription, @unchecked Sendable { - package enum CommandLineTemplateArg : Sendable { +open class CommandLineToolSpec: PropertyDomainSpec, SpecType, TaskTypeDescription, @unchecked Sendable { + package enum CommandLineTemplateArg: Sendable { /// Placeholder for the dynamically computed executable path. // // FIXME: Note, this is only used by 'Ld.xcspec', there might be a simpler implementation. @@ -443,9 +441,11 @@ open class CommandLineToolSpec : PropertyDomainSpec, SpecType, TaskTypeDescripti parser.error("invalid 'CommandLine' template placeholder arg: '\(str)'") return .literal(value: parser.delegate.internalMacroNamespace.parseLiteralStringList([str])) default: - return .literal(value: parser.delegate.internalMacroNamespace.parseStringList(str) { diag in + return .literal( + value: parser.delegate.internalMacroNamespace.parseStringList(str) { diag in parser.handleMacroDiagnostic(diag, "macro parsing error in 'CommandLine' template") - }) + } + ) } } } @@ -468,9 +468,11 @@ open class CommandLineToolSpec : PropertyDomainSpec, SpecType, TaskTypeDescripti parser.error("invalid 'RuleName' template placeholder arg: '\(str)'") return .literal(value: parser.delegate.internalMacroNamespace.parseLiteralStringList([str])) default: - return .literal(value: parser.delegate.internalMacroNamespace.parseStringList(str) { diag in + return .literal( + value: parser.delegate.internalMacroNamespace.parseStringList(str) { diag in parser.handleMacroDiagnostic(diag, "macro parsing error in 'RuleName' template") - }) + } + ) } } } @@ -479,8 +481,7 @@ open class CommandLineToolSpec : PropertyDomainSpec, SpecType, TaskTypeDescripti // Parse the execution description, which is a macro-expandable display description of a single invocation of the tool. if let execDescString = parser.parseString("ExecDescription") { self.execDescription = parser.delegate.internalMacroNamespace.parseString(execDescString) - } - else { + } else { self.execDescription = nil } @@ -559,14 +560,19 @@ open class CommandLineToolSpec : PropertyDomainSpec, SpecType, TaskTypeDescripti if let envVariables = parser.parseObject("EnvironmentVariables", inherited: false) { if case .plDict(let items) = envVariables { var variables: [(String, MacroStringExpression)] = [] - for (key,valueData) in items.sorted(by: \.0) { + for (key, valueData) in items.sorted(by: \.0) { guard case .plString(let value) = valueData else { parser.error("invalid value for '\(key)' key in 'EnvironmentVariables' (expected string)") continue } - variables.append((key, parser.delegate.internalMacroNamespace.parseString(value) { diag in - parser.handleMacroDiagnostic(diag, "macro parsing error in 'EnvironmentVariables' for key '\(key)'") - })) + variables.append( + ( + key, + parser.delegate.internalMacroNamespace.parseString(value) { diag in + parser.handleMacroDiagnostic(diag, "macro parsing error in 'EnvironmentVariables' for key '\(key)'") + } + ) + ) } self.environmentVariables = variables } else { @@ -587,8 +593,7 @@ open class CommandLineToolSpec : PropertyDomainSpec, SpecType, TaskTypeDescripti if let additionalDirectoriesToCreate = parser.parseStringList("AdditionalDirectoriesToCreate") { self.additionalDirectoriesToCreate = additionalDirectoriesToCreate.map({ parser.delegate.internalMacroNamespace.parseString($0) }) - } - else { + } else { self.additionalDirectoriesToCreate = nil } @@ -618,7 +623,7 @@ open class CommandLineToolSpec : PropertyDomainSpec, SpecType, TaskTypeDescripti for grouping in groupings { // We should really have something more extensible here, but for now this will do. switch grouping { - case "tool": groupingStrategies.append(AllInputFilesGroupingStrategy(groupIdentifier: parser.proxy.data["Identifier"]!.description)) + case "tool": groupingStrategies.append(AllInputFilesGroupingStrategy(groupIdentifier: parser.proxy.data["Identifier"]!.description)) case "common-file-base": groupingStrategies.append(CommonFileBaseInputFileGroupingStrategy(toolName: parser.proxy.data["Identifier"]!.description)) default: if let strategy = parser.delegate.groupingStrategy(name: grouping, specIdentifier: parser.proxy.data["Identifier"]!.description) { @@ -629,8 +634,7 @@ open class CommandLineToolSpec : PropertyDomainSpec, SpecType, TaskTypeDescripti } } self.inputFileGroupingStrategies = groupingStrategies - } - else { + } else { self.inputFileGroupingStrategies = nil } @@ -649,7 +653,7 @@ open class CommandLineToolSpec : PropertyDomainSpec, SpecType, TaskTypeDescripti // // FIXME: Eliminate any of these fields which are unused. parser.parseStringList("AdditionalFilesToClean") - parser.parseString("AdditionalInputFiles") // FIXME: This should be a string list. + parser.parseString("AdditionalInputFiles") // FIXME: This should be a string list. parser.parseBool("CaresAboutInclusionDependencies") parser.parseString("CommandIdentifier") parser.parseObject("CommandOutputParser") @@ -815,7 +819,7 @@ open class CommandLineToolSpec : PropertyDomainSpec, SpecType, TaskTypeDescripti outputs.append(delegate.createNode(onlyPath)) return .dependencyInfo(onlyPath) } else { - assert(!only.value.isEmpty) // shouldn't be possible to get an empty array here + assert(!only.value.isEmpty) // shouldn't be possible to get an empty array here delegate.error("Multiple build options specified dependency info in ld64 format") } case .makefile: @@ -823,7 +827,7 @@ open class CommandLineToolSpec : PropertyDomainSpec, SpecType, TaskTypeDescripti outputs.append(delegate.createNode(onlyPath)) return .makefile(onlyPath) } else { - assert(!only.value.isEmpty) // shouldn't be possible to get an empty array here + assert(!only.value.isEmpty) // shouldn't be possible to get an empty array here outputs.append(contentsOf: only.value.map(delegate.createNode)) return .makefiles(only.value) } @@ -900,13 +904,11 @@ open class CommandLineToolSpec : PropertyDomainSpec, SpecType, TaskTypeDescripti if swapOutputsWithInputsForIndexing { if let input = cbc.inputs.only, outputs.count == 1 { indexingInputReplacement = input.absolutePath - } - else { + } else { delegate.warning("SwapOutputsWithInputsForIndexing is enabled, but there were \(cbc.inputs.count) inputs and \(outputs.count) outputs (expected 1 and 1)") indexingInputReplacement = nil } - } - else { + } else { indexingInputReplacement = nil } @@ -916,12 +918,14 @@ open class CommandLineToolSpec : PropertyDomainSpec, SpecType, TaskTypeDescripti // Add the additional outputs defined by the spec. These are not declared as outputs but should be processed by the tool separately. let additionalEvaluatedOutputsResult = await additionalEvaluatedOutputs(cbc, delegate) - outputs.append(contentsOf: additionalEvaluatedOutputsResult.outputs.map { output in - if let fileTypeIdentifier = output.fileType, let fileType = cbc.producer.lookupFileType(identifier: fileTypeIdentifier) { - delegate.declareOutput(FileToBuild(absolutePath: output.path, fileType: fileType)) + outputs.append( + contentsOf: additionalEvaluatedOutputsResult.outputs.map { output in + if let fileTypeIdentifier = output.fileType, let fileType = cbc.producer.lookupFileType(identifier: fileTypeIdentifier) { + delegate.declareOutput(FileToBuild(absolutePath: output.path, fileType: fileType)) + } + return delegate.createNode(output.path) } - return delegate.createNode(output.path) - }) + ) if let infoPlistContent = additionalEvaluatedOutputsResult.generatedInfoPlistContent { delegate.declareGeneratedInfoPlistContent(infoPlistContent) @@ -931,10 +935,12 @@ open class CommandLineToolSpec : PropertyDomainSpec, SpecType, TaskTypeDescripti let executionDescription = resolveExecutionDescription(cbc, delegate, lookup: lookup) // Create the inputs. - var inputs: [any PlannedNode] = cbc.inputs.flatMap{ input -> [any PlannedNode] in + var inputs: [any PlannedNode] = cbc.inputs.flatMap { input -> [any PlannedNode] in if areInputsDirectoryTrees { - return [delegate.createDirectoryTreeNode(input.absolutePath), - delegate.createNode(input.absolutePath)] as [any PlannedNode] + return [ + delegate.createDirectoryTreeNode(input.absolutePath), + delegate.createNode(input.absolutePath), + ] as [any PlannedNode] } else { return [delegate.createNode(input.absolutePath)] as [any PlannedNode] } @@ -951,11 +957,16 @@ open class CommandLineToolSpec : PropertyDomainSpec, SpecType, TaskTypeDescripti await inputs.append(contentsOf: additionalInputDependencies(cbc, delegate, optionContext: discoveredCommandLineToolSpecInfo(cbc.producer, cbc.scope, delegate), lookup: lookup).map(delegate.createNode)) delegate.createTask( - type: self, dependencyData: dependencyData, payload: payload, - ruleInfo: ruleInfo, commandLine: commandLine, + type: self, + dependencyData: dependencyData, + payload: payload, + ruleInfo: ruleInfo, + commandLine: commandLine, environment: EnvironmentBindings(environment), workingDirectory: cbc.producer.defaultWorkingDirectory, - inputs: inputs, outputs: outputs, mustPrecede: [], + inputs: inputs, + outputs: outputs, + mustPrecede: [], action: createTaskAction(cbc, delegate), execDescription: executionDescription, preparesForIndexing: cbc.preparesForIndexing, @@ -1007,18 +1018,20 @@ open class CommandLineToolSpec : PropertyDomainSpec, SpecType, TaskTypeDescripti let inputFileType = cbc.inputs.first?.fileType let lookup = { self.lookup($0, cbc, delegate) } let optionContext = await discoveredCommandLineToolSpecInfo(producer, scope, delegate) - result.outputs.append(contentsOf: cbc.producer.effectiveFlattenedOrderedBuildOptions(self, filter: .all).flatMap { buildOption -> [(Path, String?)] in - // Check if the effective arguments for this build option were non-empty as a proxy for whether it got filtered out by architecture mismatch, etc. - guard let outputDependencies = buildOption.outputDependencies, !buildOption.getArgumentsForCommand(producer, scope: scope, inputFileType: inputFileType, optionContext: optionContext, lookup: lookup).isEmpty else { - return [] - } - return outputDependencies.compactMap { outputDependency in - guard let path = Path(scope.evaluate(outputDependency.path, lookup: lookup)).nilIfEmpty else { - return nil + result.outputs.append( + contentsOf: cbc.producer.effectiveFlattenedOrderedBuildOptions(self, filter: .all).flatMap { buildOption -> [(Path, String?)] in + // Check if the effective arguments for this build option were non-empty as a proxy for whether it got filtered out by architecture mismatch, etc. + guard let outputDependencies = buildOption.outputDependencies, !buildOption.getArgumentsForCommand(producer, scope: scope, inputFileType: inputFileType, optionContext: optionContext, lookup: lookup).isEmpty else { + return [] + } + return outputDependencies.compactMap { outputDependency in + guard let path = Path(scope.evaluate(outputDependency.path, lookup: lookup)).nilIfEmpty else { + return nil + } + return (path.normalize(), outputDependency.fileType.map { scope.evaluate($0, lookup: lookup).nilIfEmpty } ?? nil) } - return (path.normalize(), outputDependency.fileType.map { scope.evaluate($0, lookup: lookup).nilIfEmpty } ?? nil) } - }) + ) return result } @@ -1065,7 +1078,7 @@ open class CommandLineToolSpec : PropertyDomainSpec, SpecType, TaskTypeDescripti switch macro { case BuiltinMacros.DerivedFilesDir: return Static { BuiltinMacros.namespace.parseString("$(DERIVED_SOURCES_DIR)") } - case BuiltinMacros.InputFile, BuiltinMacros.InputFilePath, BuiltinMacros.InputPath: + case BuiltinMacros.InputFile, BuiltinMacros.InputFilePath, BuiltinMacros.InputPath: return namespace.parseLiteralString(firstInput(\.absolutePath.str) ?? "") case BuiltinMacros.InputFileDir: return namespace.parseLiteralString(firstInput(\.absolutePath.dirname.str) ?? "") @@ -1155,9 +1168,9 @@ open class CommandLineToolSpec : PropertyDomainSpec, SpecType, TaskTypeDescripti return cbc.inputs.map { $0.absolutePath.str } case .output: // We always resolve the Output via a recursive macro evaluation. See constructTasks() for more information. - return [cbc.scope.evaluate(BuiltinMacros.OutputPath, lookup: { return self.lookup($0, cbc, delegate, lookup) } )] + return [cbc.scope.evaluate(BuiltinMacros.OutputPath, lookup: { return self.lookup($0, cbc, delegate, lookup) })] case .literal(let expr): - return cbc.scope.evaluate(expr, lookup: { return self.lookup($0, cbc, delegate, lookup) } ) + return cbc.scope.evaluate(expr, lookup: { return self.lookup($0, cbc, delegate, lookup) }) } } } @@ -1245,7 +1258,7 @@ open class CommandLineToolSpec : PropertyDomainSpec, SpecType, TaskTypeDescripti case .output: // We always resolve the Output via a recursive macro evaluation. See constructTasks() for more information. - return [.path(Path(cbc.scope.evaluate(BuiltinMacros.OutputPath, lookup: { return self.lookup($0, cbc, delegate, lookup) } )))] + return [.path(Path(cbc.scope.evaluate(BuiltinMacros.OutputPath, lookup: { return self.lookup($0, cbc, delegate, lookup) })))] case .specialArgs: return specialArgs.map { .literal(ByteString(encodingAsUTF8: $0)) } @@ -1436,14 +1449,14 @@ extension CommandLineToolSpec.RuleInfoTemplateArg: ExpressibleByStringLiteral { } } -open class GenericCommandLineToolSpec : CommandLineToolSpec, @unchecked Sendable { +open class GenericCommandLineToolSpec: CommandLineToolSpec, @unchecked Sendable { required public init(_ parser: SpecParser, _ basedOnSpec: Spec?) { super.init(parser, basedOnSpec, isGeneric: true) } } /// A general-purpose output parser for scraping traditional POSIX-style diagnostics. Output is passed through to the delegate as it is received, while diagnostic parsing is done line-by-line as each newline is encountered. -open class GenericOutputParser : TaskOutputParser { +open class GenericOutputParser: TaskOutputParser { /// The delegate that's informed about output and diagnostics. public let delegate: any TaskOutputParserDelegate @@ -1606,7 +1619,7 @@ open class GenericOutputParser : TaskOutputParser { } } -@_spi(Testing) public final class ShellScriptOutputParser : GenericOutputParser { +@_spi(Testing) public final class ShellScriptOutputParser: GenericOutputParser { override func parseLine(_ lineBytes: S) -> Bool where S.Element == UInt8 { if !super.parseLine(lineBytes) { // Use the non-failable constructor to recover from potentially invalid UTF-8 @@ -1658,8 +1671,8 @@ public final class SerializedDiagnosticsOutputParser: TaskOutputParser { fileprivate extension Diagnostic.FixIt { // For better performance, these are declared outside the initializer, so they are just created once, but are really just an implementation detail of the initializer. - static private let fixitRangeRegex = RegEx(patternLiteral: "^([^:]+):([0-9]+):([0-9]+)-([0-9]+):([0-9]+): +fixit: (.*)$") // filename + range - static private let fixitLineColumnRegex = RegEx(patternLiteral: "^([^:]+):([0-9]+):([0-9]+): +fixit: (.*)$") // filename + line + column + static private let fixitRangeRegex = RegEx(patternLiteral: "^([^:]+):([0-9]+):([0-9]+)-([0-9]+):([0-9]+): +fixit: (.*)$") // filename + range + static private let fixitLineColumnRegex = RegEx(patternLiteral: "^([^:]+):([0-9]+):([0-9]+): +fixit: (.*)$") // filename + line + column init?(_ string: String, ignorePaths: Set, workingDirectory: Path) { // reminder: fixit lines should look like this: FILE:LINE:COL-LINE:COL: fixit: REPLACEMENT\n diff --git a/Sources/SWBCore/SpecImplementations/CompilerSpec.swift b/Sources/SWBCore/SpecImplementations/CompilerSpec.swift index ca3d1696..3b854da0 100644 --- a/Sources/SWBCore/SpecImplementations/CompilerSpec.swift +++ b/Sources/SWBCore/SpecImplementations/CompilerSpec.swift @@ -14,7 +14,7 @@ import Foundation public import SWBUtil import SWBMacro -open class CompilerSpec : CommandLineToolSpec, @unchecked Sendable { +open class CompilerSpec: CommandLineToolSpec, @unchecked Sendable { class public override var typeName: String { return "Compiler" } @@ -28,15 +28,16 @@ open class CompilerSpec : CommandLineToolSpec, @unchecked Sendable { @_spi(Testing) public let supportedLanguageVersions: [Version] public override init(_ parser: SpecParser, _ basedOnSpec: Spec?, isGeneric: Bool) { - supportedLanguageVersions = parser.parseStringList("SupportedLanguageVersions")?.compactMap { - do { - return try Version($0) - } catch { - // FIXME: This should eventually become an error. - parser.warning("Could not parse `SupportedLanguageVersions`: \(error)") - return nil - } - } ?? [] + supportedLanguageVersions = + parser.parseStringList("SupportedLanguageVersions")?.compactMap { + do { + return try Version($0) + } catch { + // FIXME: This should eventually become an error. + parser.warning("Could not parse `SupportedLanguageVersions`: \(error)") + return nil + } + } ?? [] // Parse and ignore keys we have no use for. // @@ -123,7 +124,7 @@ extension ProjectFailuresBlockList { } } -open class GenericCompilerSpec : CompilerSpec, @unchecked Sendable { +open class GenericCompilerSpec: CompilerSpec, @unchecked Sendable { required public init(_ parser: SpecParser, _ basedOnSpec: Spec?) { super.init(parser, basedOnSpec, isGeneric: true) } diff --git a/Sources/SWBCore/SpecImplementations/FileTypes.swift b/Sources/SWBCore/SpecImplementations/FileTypes.swift index e7ca3976..b2ae7b7d 100644 --- a/Sources/SWBCore/SpecImplementations/FileTypes.swift +++ b/Sources/SWBCore/SpecImplementations/FileTypes.swift @@ -10,46 +10,46 @@ // //===----------------------------------------------------------------------===// -class ApplicationWrapperFileTypeSpec : FileTypeSpec, SpecClassType, @unchecked Sendable { +class ApplicationWrapperFileTypeSpec: FileTypeSpec, SpecClassType, @unchecked Sendable { static let className = "PBXApplicationWrapperFileType" } -class CFBundleWrapperFileTypeSpec : FileTypeSpec, SpecClassType, @unchecked Sendable { +class CFBundleWrapperFileTypeSpec: FileTypeSpec, SpecClassType, @unchecked Sendable { static let className = "PBXCFBundleWrapperFileType" } -class FrameworkWrapperFileTypeSpec : FileTypeSpec, SpecClassType, @unchecked Sendable { +class FrameworkWrapperFileTypeSpec: FileTypeSpec, SpecClassType, @unchecked Sendable { static let className = "PBXFrameworkWrapperFileType" } -class HTMLFileTypeSpec : FileTypeSpec, SpecClassType, @unchecked Sendable { +class HTMLFileTypeSpec: FileTypeSpec, SpecClassType, @unchecked Sendable { static let className = "PBXHTMLFileType" } -class MachOFileTypeSpec : FileTypeSpec, SpecClassType, @unchecked Sendable { +class MachOFileTypeSpec: FileTypeSpec, SpecClassType, @unchecked Sendable { static let className = "PBXMachOFileType" } -class PlistFileTypeSpec : FileTypeSpec, SpecClassType, @unchecked Sendable { +class PlistFileTypeSpec: FileTypeSpec, SpecClassType, @unchecked Sendable { static let className = "PBXPlistFileType" } -class PlugInKitPluginWrapperFileTypeSpec : FileTypeSpec, SpecClassType, @unchecked Sendable { +class PlugInKitPluginWrapperFileTypeSpec: FileTypeSpec, SpecClassType, @unchecked Sendable { static let className = "PBXPlugInKitPluginWrapperFileType" } -class SpotlightImporternWrapperFileTypeSpec : FileTypeSpec, SpecClassType, @unchecked Sendable { +class SpotlightImporternWrapperFileTypeSpec: FileTypeSpec, SpecClassType, @unchecked Sendable { static let className = "PBXSpotlightImporternWrapperFileType" } -class StaticFrameworkWrapperFileTypeSpec : FileTypeSpec, SpecClassType, @unchecked Sendable { +class StaticFrameworkWrapperFileTypeSpec: FileTypeSpec, SpecClassType, @unchecked Sendable { static let className = "XCStaticFrameworkWrapperFileType" } -class XPCServiceWrapperFileTypeSpec : FileTypeSpec, SpecClassType, @unchecked Sendable { +class XPCServiceWrapperFileTypeSpec: FileTypeSpec, SpecClassType, @unchecked Sendable { static let className = "PBXXPCServiceWrapperFileType" } -class XCFrameworkWrapperFileTypeSpec : FileTypeSpec, SpecClassType, @unchecked Sendable { +class XCFrameworkWrapperFileTypeSpec: FileTypeSpec, SpecClassType, @unchecked Sendable { static let className = "PBXXCFrameworkWrapperFileType" } diff --git a/Sources/SWBCore/SpecImplementations/LinkerSpec.swift b/Sources/SWBCore/SpecImplementations/LinkerSpec.swift index 6149d4c4..fd1b880d 100644 --- a/Sources/SWBCore/SpecImplementations/LinkerSpec.swift +++ b/Sources/SWBCore/SpecImplementations/LinkerSpec.swift @@ -15,7 +15,7 @@ public import SWBMacro public let reexportedBinariesDirectoryName = "ReexportedBinaries" -open class LinkerSpec : CommandLineToolSpec, @unchecked Sendable { +open class LinkerSpec: CommandLineToolSpec, @unchecked Sendable { /// Specifier for an individual library to be linked. public struct LibrarySpecifier { public enum Kind: CaseIterable, CustomStringConvertible { @@ -28,11 +28,11 @@ open class LinkerSpec : CommandLineToolSpec, @unchecked Sendable { public var description: String { switch self { - case .static: return "static library" - case .dynamic: return "dynamic library" - case .textBased: return "text-based stub" - case .framework: return "framework" - case .object: return "object file" + case .static: return "static library" + case .dynamic: return "dynamic library" + case .textBased: return "text-based stub" + case .framework: return "framework" + case .object: return "object file" case .objectLibrary: return "object library" } } @@ -155,7 +155,7 @@ open class LinkerSpec : CommandLineToolSpec, @unchecked Sendable { } } -open class GenericLinkerSpec : LinkerSpec, @unchecked Sendable { +open class GenericLinkerSpec: LinkerSpec, @unchecked Sendable { required public init(_ parser: SpecParser, _ basedOnSpec: Spec?) { super.init(parser, basedOnSpec, isGeneric: true) } diff --git a/Sources/SWBCore/SpecImplementations/ProductTypes.swift b/Sources/SWBCore/SpecImplementations/ProductTypes.swift index 3492c1de..688e9be3 100644 --- a/Sources/SWBCore/SpecImplementations/ProductTypes.swift +++ b/Sources/SWBCore/SpecImplementations/ProductTypes.swift @@ -13,7 +13,7 @@ public import SWBUtil public import SWBMacro -public class ProductTypeSpec : Spec, SpecType, @unchecked Sendable { +public class ProductTypeSpec: Spec, SpecType, @unchecked Sendable { /// The level to elevate the deprecation message as. public enum DeprecationLevel { case warning @@ -141,8 +141,7 @@ public class ProductTypeSpec : Spec, SpecType, @unchecked Sendable { if let specIdItem = plist["ValidationToolSpec"] { if case .plString(let specId) = specIdItem { productValidationToolSpecIdentifier = specId - } - else { + } else { parser.error("Value for 'ValidationToolSpec' in the 'Validation' dictionary for product type '\(parser.proxy.identifier)' must be a string but is: \(specIdItem)") } } @@ -151,49 +150,50 @@ public class ProductTypeSpec : Spec, SpecType, @unchecked Sendable { } self.productValidationToolSpecIdentifier = productValidationToolSpecIdentifier - self.buildPhaseFileRefAdditions = { () -> [String: [BuildPhaseFileRefAddition]]? in - let keyPath = "BuildPhaseFileRefAdditions" - guard let item = parser.parseObject(keyPath) else { return nil } - guard case .plDict(let dict) = item else { parser.error("Expected dict in \(keyPath)"); return nil } - var result = [String: [BuildPhaseFileRefAddition]](minimumCapacity: dict.count) - - for (identifier, valuesPL) in dict { - let phaseKeyPath = "\(keyPath)['\(identifier)']" - guard case .plArray(let valuesArray) = valuesPL else { parser.error("Expected array in \(phaseKeyPath)"); return nil } - var resultValues = [BuildPhaseFileRefAddition]() - - for (index, valuePL) in valuesArray.enumerated() { - let fileKeyPath = "\(phaseKeyPath)['\(identifier)'][\(index)]" - guard case .plDict(let valueDict) = valuePL else { - parser.error("Expected dict in \(fileKeyPath), but got \(valuesPL)") - return nil - } - - // Path must be present and must be a string - guard case .plString(let path)? = valueDict["Path"] else { - parser.error("Expected string in \(fileKeyPath)['Path']") - return nil - } - - // RegionVariantName must be a non-empty string if it is present, if it is missing this indicates non-localized content - let regionVariantName: String? = { - guard let regionVariantNamePL = valueDict["RegionVariantName"] else { + self.buildPhaseFileRefAdditions = + { () -> [String: [BuildPhaseFileRefAddition]]? in + let keyPath = "BuildPhaseFileRefAdditions" + guard let item = parser.parseObject(keyPath) else { return nil } + guard case .plDict(let dict) = item else { parser.error("Expected dict in \(keyPath)"); return nil } + var result = [String: [BuildPhaseFileRefAddition]](minimumCapacity: dict.count) + + for (identifier, valuesPL) in dict { + let phaseKeyPath = "\(keyPath)['\(identifier)']" + guard case .plArray(let valuesArray) = valuesPL else { parser.error("Expected array in \(phaseKeyPath)"); return nil } + var resultValues = [BuildPhaseFileRefAddition]() + + for (index, valuePL) in valuesArray.enumerated() { + let fileKeyPath = "\(phaseKeyPath)['\(identifier)'][\(index)]" + guard case .plDict(let valueDict) = valuePL else { + parser.error("Expected dict in \(fileKeyPath), but got \(valuesPL)") return nil } - guard case .plString(let regionVariantName) = regionVariantNamePL, !regionVariantName.isEmpty else { - parser.warning("Expected non-empty string in \(fileKeyPath)['RegionVariantName']") + + // Path must be present and must be a string + guard case .plString(let path)? = valueDict["Path"] else { + parser.error("Expected string in \(fileKeyPath)['Path']") return nil } - return regionVariantName - }() - resultValues.append(BuildPhaseFileRefAddition(path: BuiltinMacros.namespace.parseString(path), regionVariantName: BuiltinMacros.namespace.parseString(regionVariantName ?? ""))) - } + // RegionVariantName must be a non-empty string if it is present, if it is missing this indicates non-localized content + let regionVariantName: String? = { + guard let regionVariantNamePL = valueDict["RegionVariantName"] else { + return nil + } + guard case .plString(let regionVariantName) = regionVariantNamePL, !regionVariantName.isEmpty else { + parser.warning("Expected non-empty string in \(fileKeyPath)['RegionVariantName']") + return nil + } + return regionVariantName + }() + + resultValues.append(BuildPhaseFileRefAddition(path: BuiltinMacros.namespace.parseString(path), regionVariantName: BuiltinMacros.namespace.parseString(regionVariantName ?? ""))) + } - result[identifier] = resultValues - } + result[identifier] = resultValues + } - return result + return result }() ?? [:] self.infoPlistAdditions = parser.parseObject("InfoPlistAdditions")?.withConcreteBooleans(forKeys: ProductTypeSpec.booleanizedInfoPlistKeys) @@ -207,8 +207,7 @@ public class ProductTypeSpec : Spec, SpecType, @unchecked Sendable { let parsedLevel = parser.parseString("DeprecationLevel") ?? "warning" if let level = DeprecationLevel(level: parsedLevel) { return DeprecationInfo(reason: reason, level: level) - } - else { + } else { parser.error("invalid 'DeprecationLevel' value of '\(parsedLevel)'") } } @@ -371,11 +370,9 @@ public class ProductTypeSpec : Spec, SpecType, @unchecked Sendable { } } - // MARK: Bundle product types - -public class BundleProductTypeSpec : ProductTypeSpec, SpecClassType, @unchecked Sendable { +public class BundleProductTypeSpec: ProductTypeSpec, SpecClassType, @unchecked Sendable { public class var className: String { return "PBXBundleProductType" } @@ -383,13 +380,12 @@ public class BundleProductTypeSpec : ProductTypeSpec, SpecClassType, @unchecked // autoConfigureAsMergeableLibrary() is not overridden here: Even if its MACH_O_TYPE has been changed to 'mh_dylib', automatically building a generic bundle as mergeable is outside what we want to handle automatically. (We might change our mind in the future.) // This was originally implemented in an extension in InfoPlistTaskProducer.swift for rdar://78512102, but it has been adopted in other places so has been moved here. It might be that this logic doesn't even belong on this class. - public static func validateBuildComponents(_ buildComponents: [String], scope: MacroEvaluationScope) -> Bool - { - return buildComponents.contains("build") || (buildComponents.contains("installLoc") && scope.evaluate(BuiltinMacros.INSTALLLOC_LANGUAGE).isEmpty) || buildComponents.contains("exportLoc") - } + public static func validateBuildComponents(_ buildComponents: [String], scope: MacroEvaluationScope) -> Bool { + return buildComponents.contains("build") || (buildComponents.contains("installLoc") && scope.evaluate(BuiltinMacros.INSTALLLOC_LANGUAGE).isEmpty) || buildComponents.contains("exportLoc") + } } -public final class ApplicationProductTypeSpec : BundleProductTypeSpec, @unchecked Sendable { +public final class ApplicationProductTypeSpec: BundleProductTypeSpec, @unchecked Sendable { class public override var className: String { return "PBXApplicationProductType" } @@ -419,7 +415,7 @@ public final class ApplicationExtensionProductTypeSpec: BundleProductTypeSpec, @ } } -public class FrameworkProductTypeSpec : BundleProductTypeSpec, @unchecked Sendable { +public class FrameworkProductTypeSpec: BundleProductTypeSpec, @unchecked Sendable { class public override var className: String { return "PBXFrameworkProductType" } @@ -448,39 +444,44 @@ public class FrameworkProductTypeSpec : BundleProductTypeSpec, @unchecked Sendab location: wrapperFolderPath.join(currentVersionFolderPath), toPath: Path(scope.evaluateAsString(BuiltinMacros.FRAMEWORK_VERSION)), effectiveToPath: nil - ) ) + ) + ) // Resources -> Versions/Current/Resources let unlocalizedResourcesFolderName = Path(scope.evaluateAsString(BuiltinMacros.UNLOCALIZED_RESOURCES_FOLDER_PATH)).basename descriptors.insert( SymlinkDescriptor( - location: wrapperFolderPath.join(unlocalizedResourcesFolderName), + location: wrapperFolderPath.join(unlocalizedResourcesFolderName), toPath: currentVersionFolderPath.join(unlocalizedResourcesFolderName), effectiveToPath: frameworkVersionFolderPath.join(unlocalizedResourcesFolderName) - ) ) + ) + ) // Headers -> Versions/Current/Headers let headersResourcesFolderName = Path(scope.evaluateAsString(BuiltinMacros.PUBLIC_HEADERS_FOLDER_PATH)).basename descriptors.insert( SymlinkDescriptor( - location: wrapperFolderPath.join(headersResourcesFolderName), + location: wrapperFolderPath.join(headersResourcesFolderName), toPath: currentVersionFolderPath.join(headersResourcesFolderName), effectiveToPath: frameworkVersionFolderPath.join(headersResourcesFolderName) - ) ) + ) + ) // PrivateHeaders -> Versions/Current/PrivateHeaders let privateHeadersFolderName = Path(scope.evaluateAsString(BuiltinMacros.PRIVATE_HEADERS_FOLDER_PATH)).basename descriptors.insert( SymlinkDescriptor( - location: wrapperFolderPath.join(privateHeadersFolderName), + location: wrapperFolderPath.join(privateHeadersFolderName), toPath: currentVersionFolderPath.join(privateHeadersFolderName), effectiveToPath: frameworkVersionFolderPath.join(privateHeadersFolderName) - ) ) + ) + ) // Modules -> Versions/Current/Modules let modulesFolderName = Path(scope.evaluateAsString(BuiltinMacros.MODULES_FOLDER_PATH)).basename descriptors.insert( SymlinkDescriptor( - location: wrapperFolderPath.join(modulesFolderName), + location: wrapperFolderPath.join(modulesFolderName), toPath: currentVersionFolderPath.join(modulesFolderName), effectiveToPath: frameworkVersionFolderPath.join(modulesFolderName) - ) ) + ) + ) // Frameworks -> Versions/Current/Frameworks let frameworksFolderName = Path(scope.evaluateAsString(BuiltinMacros.FRAMEWORKS_FOLDER_PATH)).basename descriptors.insert( @@ -488,15 +489,17 @@ public class FrameworkProductTypeSpec : BundleProductTypeSpec, @unchecked Sendab location: wrapperFolderPath.join(frameworksFolderName), toPath: currentVersionFolderPath.join(frameworksFolderName), effectiveToPath: frameworkVersionFolderPath.join(frameworksFolderName) - ) ) + ) + ) // PlugIns -> Versions/Current/PlugIns let pluginsFolderName = Path(scope.evaluateAsString(BuiltinMacros.PLUGINS_FOLDER_PATH)).basename descriptors.insert( SymlinkDescriptor( - location: wrapperFolderPath.join(pluginsFolderName), + location: wrapperFolderPath.join(pluginsFolderName), toPath: currentVersionFolderPath.join(pluginsFolderName), effectiveToPath: frameworkVersionFolderPath.join(pluginsFolderName) - ) ) + ) + ) // Extensions -> Versions/Current/Extensions let extensionsFolderName = Path(scope.evaluateAsString(BuiltinMacros.EXTENSIONS_FOLDER_PATH)).basename descriptors.insert( @@ -504,23 +507,26 @@ public class FrameworkProductTypeSpec : BundleProductTypeSpec, @unchecked Sendab location: wrapperFolderPath.join(extensionsFolderName), toPath: currentVersionFolderPath.join(extensionsFolderName), effectiveToPath: frameworkVersionFolderPath.join(extensionsFolderName) - ) ) + ) + ) // -> Versions/Current/ let executableName = scope.evaluateAsString(BuiltinMacros.EXECUTABLE_NAME) descriptors.insert( SymlinkDescriptor( - location: wrapperFolderPath.join(executableName), + location: wrapperFolderPath.join(executableName), toPath: currentVersionFolderPath.join(executableName), effectiveToPath: frameworkVersionFolderPath.join(executableName) - ) ) + ) + ) // XPCServices -> Versions/Current/XPCServices let xpcServicesFolderName = Path(scope.evaluateAsString(BuiltinMacros.XPCSERVICES_FOLDER_PATH)).basename descriptors.insert( SymlinkDescriptor( - location: wrapperFolderPath.join(xpcServicesFolderName), + location: wrapperFolderPath.join(xpcServicesFolderName), toPath: currentVersionFolderPath.join(xpcServicesFolderName), effectiveToPath: frameworkVersionFolderPath.join(xpcServicesFolderName) - ) ) + ) + ) // ExtraModules -> Versions/Current/ExtraModules if scope.evaluate(BuiltinMacros.BUILD_PACKAGE_FOR_DISTRIBUTION) { descriptors.insert( @@ -528,7 +534,8 @@ public class FrameworkProductTypeSpec : BundleProductTypeSpec, @unchecked Sendab location: wrapperFolderPath.join("ExtraModules"), toPath: currentVersionFolderPath.join("ExtraModules"), effectiveToPath: frameworkVersionFolderPath.join("ExtraModules") - ) ) + ) + ) } return descriptors @@ -539,47 +546,47 @@ public class FrameworkProductTypeSpec : BundleProductTypeSpec, @unchecked Sendab return ArtifactInfo(kind: .framework, path: path) } -/* - /// Build setting expressions to evaluate to determine how to create symbolic links for the product structure. - static let productStructureSymlinkBuildSettings = [SymlinkDescriptor]([ - // Versions/Current -> A - SymlinkDescriptor( - location: BuiltinMacros.namespace.parseString("$(TARGET_BUILT_DIR)/$(WRAPPER_NAME)/$(VERSIONS_PATH)/$(CURRENT_VERSION)"), - toPath: BuiltinMacros.namespace.parseString("$(FRAMEWORK_VERSION)")), - // Resources -> Versions/Current/Resources - SymlinkDescriptor( - location: BuiltinMacros.namespace.parseString("$(TARGET_BUILT_DIR)/$(WRAPPER_NAME)/$(UNLOCALIZED_RESOURCES_FOLDER_PATH:file)"), - toPath: BuiltinMacros.namespace.parseString("$(VERSIONS_PATH)/$(CURRENT_VERSION)/$(UNLOCALIZED_RESOURCES_FOLDER_PATH:file)")), - // Headers -> Versions/Current/Headers - SymlinkDescriptor( - location: BuiltinMacros.namespace.parseString("$(TARGET_BUILT_DIR)/$(WRAPPER_NAME)/$(PUBLIC_HEADERS_FOLDER_PATH:file)"), - toPath: BuiltinMacros.namespace.parseString("$(VERSIONS_PATH)/$(CURRENT_VERSION)/$(PUBLIC_HEADERS_FOLDER_PATH:file)")), - // PrivateHeaders -> Versions/Current/PrivateHeaders - SymlinkDescriptor( - location: BuiltinMacros.namespace.parseString("$(TARGET_BUILT_DIR)/$(WRAPPER_NAME)/$(PRIVATE_HEADERS_FOLDER_PATH:file)"), - toPath: BuiltinMacros.namespace.parseString("$(VERSIONS_PATH)/$(CURRENT_VERSION)/$(PRIVATE_HEADERS_FOLDER_PATH:file)")), - // Modules -> Versions/Current/Modules - SymlinkDescriptor( - location: BuiltinMacros.namespace.parseString("$(TARGET_BUILT_DIR)/$(WRAPPER_NAME)/$(MODULES_FOLDER_PATH:file)"), - toPath: BuiltinMacros.namespace.parseString("$(VERSIONS_PATH)/$(CURRENT_VERSION)/$(MODULES_FOLDER_PATH:file)")), - // PlugIns -> Versions/Current/PlugIns - SymlinkDescriptor( - location: BuiltinMacros.namespace.parseString("$(TARGET_BUILT_DIR)/$(WRAPPER_NAME)/$(PLUGINS_FOLDER_PATH:file)"), - toPath: BuiltinMacros.namespace.parseString("$(VERSIONS_PATH)/$(CURRENT_VERSION)/$(PLUGINS_FOLDER_PATH:file)")), - // Extensions -> Versions/Current/Extensions - SymlinkDescriptor( - location: BuiltinMacros.namespace.parseString("$(TARGET_BUILT_DIR)/$(WRAPPER_NAME)/$(EXTENSIONS_FOLDER_PATH:file)"), - toPath: BuiltinMacros.namespace.parseString("$(VERSIONS_PATH)/$(CURRENT_VERSION)/$(EXTENSIONS_FOLDER_PATH:file)")), - // -> Versions/Current/ - SymlinkDescriptor( - location: BuiltinMacros.namespace.parseString("$(TARGET_BUILT_DIR)/$(WRAPPER_NAME)/$(EXECUTABLE_NAME)"), - toPath: BuiltinMacros.namespace.parseString("$(VERSIONS_PATH)/$(CURRENT_VERSION)/$(EXECUTABLE_NAME)")), - // XPCServices -> Versions/Current/XPCServices - SymlinkDescriptor( - location: BuiltinMacros.namespace.parseString("$(TARGET_BUILT_DIR)/$(WRAPPER_NAME)/$(XPCSERVICES_FOLDER_PATH:file)"), - toPath: BuiltinMacros.namespace.parseString("$(VERSIONS_PATH)/$(CURRENT_VERSION)/$(XPCSERVICES_FOLDER_PATH:file)")), - ]) -*/ + /* + /// Build setting expressions to evaluate to determine how to create symbolic links for the product structure. + static let productStructureSymlinkBuildSettings = [SymlinkDescriptor]([ + // Versions/Current -> A + SymlinkDescriptor( + location: BuiltinMacros.namespace.parseString("$(TARGET_BUILT_DIR)/$(WRAPPER_NAME)/$(VERSIONS_PATH)/$(CURRENT_VERSION)"), + toPath: BuiltinMacros.namespace.parseString("$(FRAMEWORK_VERSION)")), + // Resources -> Versions/Current/Resources + SymlinkDescriptor( + location: BuiltinMacros.namespace.parseString("$(TARGET_BUILT_DIR)/$(WRAPPER_NAME)/$(UNLOCALIZED_RESOURCES_FOLDER_PATH:file)"), + toPath: BuiltinMacros.namespace.parseString("$(VERSIONS_PATH)/$(CURRENT_VERSION)/$(UNLOCALIZED_RESOURCES_FOLDER_PATH:file)")), + // Headers -> Versions/Current/Headers + SymlinkDescriptor( + location: BuiltinMacros.namespace.parseString("$(TARGET_BUILT_DIR)/$(WRAPPER_NAME)/$(PUBLIC_HEADERS_FOLDER_PATH:file)"), + toPath: BuiltinMacros.namespace.parseString("$(VERSIONS_PATH)/$(CURRENT_VERSION)/$(PUBLIC_HEADERS_FOLDER_PATH:file)")), + // PrivateHeaders -> Versions/Current/PrivateHeaders + SymlinkDescriptor( + location: BuiltinMacros.namespace.parseString("$(TARGET_BUILT_DIR)/$(WRAPPER_NAME)/$(PRIVATE_HEADERS_FOLDER_PATH:file)"), + toPath: BuiltinMacros.namespace.parseString("$(VERSIONS_PATH)/$(CURRENT_VERSION)/$(PRIVATE_HEADERS_FOLDER_PATH:file)")), + // Modules -> Versions/Current/Modules + SymlinkDescriptor( + location: BuiltinMacros.namespace.parseString("$(TARGET_BUILT_DIR)/$(WRAPPER_NAME)/$(MODULES_FOLDER_PATH:file)"), + toPath: BuiltinMacros.namespace.parseString("$(VERSIONS_PATH)/$(CURRENT_VERSION)/$(MODULES_FOLDER_PATH:file)")), + // PlugIns -> Versions/Current/PlugIns + SymlinkDescriptor( + location: BuiltinMacros.namespace.parseString("$(TARGET_BUILT_DIR)/$(WRAPPER_NAME)/$(PLUGINS_FOLDER_PATH:file)"), + toPath: BuiltinMacros.namespace.parseString("$(VERSIONS_PATH)/$(CURRENT_VERSION)/$(PLUGINS_FOLDER_PATH:file)")), + // Extensions -> Versions/Current/Extensions + SymlinkDescriptor( + location: BuiltinMacros.namespace.parseString("$(TARGET_BUILT_DIR)/$(WRAPPER_NAME)/$(EXTENSIONS_FOLDER_PATH:file)"), + toPath: BuiltinMacros.namespace.parseString("$(VERSIONS_PATH)/$(CURRENT_VERSION)/$(EXTENSIONS_FOLDER_PATH:file)")), + // -> Versions/Current/ + SymlinkDescriptor( + location: BuiltinMacros.namespace.parseString("$(TARGET_BUILT_DIR)/$(WRAPPER_NAME)/$(EXECUTABLE_NAME)"), + toPath: BuiltinMacros.namespace.parseString("$(VERSIONS_PATH)/$(CURRENT_VERSION)/$(EXECUTABLE_NAME)")), + // XPCServices -> Versions/Current/XPCServices + SymlinkDescriptor( + location: BuiltinMacros.namespace.parseString("$(TARGET_BUILT_DIR)/$(WRAPPER_NAME)/$(XPCSERVICES_FOLDER_PATH:file)"), + toPath: BuiltinMacros.namespace.parseString("$(VERSIONS_PATH)/$(CURRENT_VERSION)/$(XPCSERVICES_FOLDER_PATH:file)")), + ]) + */ override func computeAdditionalLinkerArgs(_ producer: any CommandProducer, scope: MacroEvaluationScope, lookup: @escaping ((MacroDeclaration) -> MacroStringExpression?)) -> (args: [String], inputs: [Path]) { if scope.evaluate(BuiltinMacros.MACH_O_TYPE) != "staticlib" { @@ -598,20 +605,20 @@ public class FrameworkProductTypeSpec : BundleProductTypeSpec, @unchecked Sendab } } -public final class StaticFrameworkProductTypeSpec : FrameworkProductTypeSpec, @unchecked Sendable { +public final class StaticFrameworkProductTypeSpec: FrameworkProductTypeSpec, @unchecked Sendable { class public override var className: String { return "XCStaticFrameworkProductType" } } -public final class KernelExtensionProductTypeSpec : BundleProductTypeSpec, @unchecked Sendable { +public final class KernelExtensionProductTypeSpec: BundleProductTypeSpec, @unchecked Sendable { class public override var className: String { return "XCKernelExtensionProductType" } } /// The product type for XCTest unit and UI test bundles. -public final class XCTestBundleProductTypeSpec : BundleProductTypeSpec, @unchecked Sendable { +public final class XCTestBundleProductTypeSpec: BundleProductTypeSpec, @unchecked Sendable { class public override var className: String { return "PBXXCTestBundleProductType" } @@ -676,8 +683,7 @@ public final class XCTestBundleProductTypeSpec : BundleProductTypeSpec, @uncheck // Add to the macro definition table based on how tests are being run (XCTRunner, TEST_HOST, or neither). if type(of: self).usesXCTRunner(scope) { addXCTRunnerSettings(to: &table, scope, platform, &warnings, &errors) - } - else if type(of: self).usesTestHost(scope) { + } else if type(of: self).usesTestHost(scope) { addTestHostSettings(to: &table, Path(scope.evaluate(BuiltinMacros.TEST_HOST)), scope, platform, &warnings, &errors) } @@ -691,7 +697,7 @@ public final class XCTestBundleProductTypeSpec : BundleProductTypeSpec, @uncheck // Define TARGET_BUILD_SUBPATH so the target builds to $(TARGET_BUILD_DIR)/$(TARGET_BUILD_SUBPATH) (or slightly different for deployment location builds). // Should PBXXCTestBundleProductType override BUILT_PRODUCTS_DIR when it overrides TARGET_BUILD_DIR? table.push(BuiltinMacros.TARGET_BUILD_SUBPATH, table.namespace.parseString("/$(XCTRUNNER_PRODUCT_NAME)$(_WRAPPER_CONTENTS_DIR)/PlugIns")) - table.push(BuiltinMacros.DWARF_DSYM_FOLDER_PATH, table.namespace.parseString("$(TARGET_BUILD_DIR)")) // Do we really want dSYMs to go inside of the host app's PlugIns dir? + table.push(BuiltinMacros.DWARF_DSYM_FOLDER_PATH, table.namespace.parseString("$(TARGET_BUILD_DIR)")) // Do we really want dSYMs to go inside of the host app's PlugIns dir? // Entitlements are always required for a UI test target. table.push(BuiltinMacros.ENTITLEMENTS_REQUIRED, literal: true) @@ -708,12 +714,12 @@ public final class XCTestBundleProductTypeSpec : BundleProductTypeSpec, @uncheck let targetBuildDir = scope.evaluate(BuiltinMacros.TARGET_BUILD_DIR) let testHost = adjustedTestHost(originalTestHost: testHost, addingSettingsToTable: &table, scope) -// This check is disabled due to . See UnsupportedBehaviorTaskConstructionTests.testOverridingTargetBuildDirInApplicationUnitTestTarget(). -// guard targetBuildDir.isAncestor(of: testHost) else { -// // TEST_HOST must be inside of TARGET_BUILD_DIR. -// errors.append("$(TEST_HOST) is not a descendant of $(TARGET_BUILD_DIR) (\(testHost.str) !<= \(targetBuildDir.str))") -// return -// } + // This check is disabled due to . See UnsupportedBehaviorTaskConstructionTests.testOverridingTargetBuildDirInApplicationUnitTestTarget(). + // guard targetBuildDir.isAncestor(of: testHost) else { + // // TEST_HOST must be inside of TARGET_BUILD_DIR. + // errors.append("$(TEST_HOST) is not a descendant of $(TARGET_BUILD_DIR) (\(testHost.str) !<= \(targetBuildDir.str))") + // return + // } // testHost is a path to the executable for the app, we need to strip off the executable to get the contents directory: // ./MacApp.app/Contents/MacOS/MacApp OR @@ -782,11 +788,9 @@ public final class XCTestBundleProductTypeSpec : BundleProductTypeSpec, @uncheck } } - // MARK: Standalone binary Product types - -public class StandaloneExecutableProductTypeSpec : ProductTypeSpec, SpecClassType, @unchecked Sendable { +public class StandaloneExecutableProductTypeSpec: ProductTypeSpec, SpecClassType, @unchecked Sendable { public class var className: String { return "XCStandaloneExecutableProductType" } @@ -806,7 +810,7 @@ public class LibraryProductTypeSpec: StandaloneExecutableProductTypeSpec, @unche } } -public final class DynamicLibraryProductTypeSpec : LibraryProductTypeSpec, @unchecked Sendable { +public final class DynamicLibraryProductTypeSpec: LibraryProductTypeSpec, @unchecked Sendable { class public override var className: String { return "PBXDynamicLibraryProductType" } @@ -833,7 +837,7 @@ public final class DynamicLibraryProductTypeSpec : LibraryProductTypeSpec, @unch } -public final class StaticLibraryProductTypeSpec : LibraryProductTypeSpec, @unchecked Sendable { +public final class StaticLibraryProductTypeSpec: LibraryProductTypeSpec, @unchecked Sendable { class public override var className: String { return "PBXStaticLibraryProductType" } @@ -850,7 +854,7 @@ public final class StaticLibraryProductTypeSpec : LibraryProductTypeSpec, @unche } } -public final class ToolProductTypeSpec : StandaloneExecutableProductTypeSpec, @unchecked Sendable { +public final class ToolProductTypeSpec: StandaloneExecutableProductTypeSpec, @unchecked Sendable { class public override var className: String { return "PBXToolProductType" } @@ -862,8 +866,7 @@ public final class ToolProductTypeSpec : StandaloneExecutableProductTypeSpec, @u } /// Describes a symbolic link to create. -public struct SymlinkDescriptor: Hashable -{ +public struct SymlinkDescriptor: Hashable { /// Where the symbolic link will be created. This should evaluate to an absolute path. public let location: Path /// The path the symbolic link points to. This may be a relative path. @@ -876,7 +879,7 @@ public struct SymlinkDescriptor: Hashable hasher.combine(toPath) } - public static func ==(lhs: SymlinkDescriptor, rhs: SymlinkDescriptor) -> Bool { + public static func == (lhs: SymlinkDescriptor, rhs: SymlinkDescriptor) -> Bool { return lhs.location == rhs.location && lhs.toPath == rhs.toPath } } diff --git a/Sources/SWBCore/SpecImplementations/PropertyDomainSpec.swift b/Sources/SWBCore/SpecImplementations/PropertyDomainSpec.swift index e237aecc..7c3b70fa 100644 --- a/Sources/SWBCore/SpecImplementations/PropertyDomainSpec.swift +++ b/Sources/SWBCore/SpecImplementations/PropertyDomainSpec.swift @@ -27,7 +27,7 @@ private protocol BuildOptionType: Sendable { /// Declare a macro suitable for the given option type. func declareMacro(_ namespace: MacroNamespace, _ name: String) throws -> MacroDeclaration } -private final class BoolBuildOptionType : BuildOptionType { +private final class BoolBuildOptionType: BuildOptionType { let typeName = "Boolean" let isListType = false let supportsValuesDefinitions = true @@ -36,7 +36,7 @@ private final class BoolBuildOptionType : BuildOptionType { return try namespace.declareBooleanMacro(name) } } -private final class CodeSignIdentityBuildOptionType : BuildOptionType { +private final class CodeSignIdentityBuildOptionType: BuildOptionType { let typeName = "CodeSignIdentity" let isListType = false let supportsValuesDefinitions = false @@ -45,7 +45,7 @@ private final class CodeSignIdentityBuildOptionType : BuildOptionType { return try namespace.declareStringMacro(name) } } -private final class CodeSignStyleBuildOptionType : BuildOptionType { +private final class CodeSignStyleBuildOptionType: BuildOptionType { let typeName = "CodeSignStyle" let isListType = false let supportsValuesDefinitions = true @@ -54,7 +54,7 @@ private final class CodeSignStyleBuildOptionType : BuildOptionType { return try namespace.declareStringMacro(name) } } -private final class CompilerVersionBuildOptionType : BuildOptionType { +private final class CompilerVersionBuildOptionType: BuildOptionType { let typeName = "CompilerVersion" let isListType = false let supportsValuesDefinitions = false @@ -63,7 +63,7 @@ private final class CompilerVersionBuildOptionType : BuildOptionType { return try namespace.declareStringMacro(name) } } -private final class DevelopmentTeamBuildOptionType : BuildOptionType { +private final class DevelopmentTeamBuildOptionType: BuildOptionType { let typeName = "DevelopmentTeam" let isListType = false let supportsValuesDefinitions = false @@ -72,7 +72,7 @@ private final class DevelopmentTeamBuildOptionType : BuildOptionType { return try namespace.declareStringMacro(name) } } -private final class EnumBuildOptionType : BuildOptionType { +private final class EnumBuildOptionType: BuildOptionType { let typeName = "Enumeration" let isListType = false let supportsValuesDefinitions = true @@ -129,7 +129,7 @@ private final class EnumBuildOptionType : BuildOptionType { } } } -private final class StringBuildOptionType : BuildOptionType { +private final class StringBuildOptionType: BuildOptionType { let typeName = "String" let isListType = false let supportsValuesDefinitions = false @@ -138,7 +138,7 @@ private final class StringBuildOptionType : BuildOptionType { return try namespace.declareStringMacro(name) } } -private final class StringListBuildOptionType : BuildOptionType { +private final class StringListBuildOptionType: BuildOptionType { let typeName = "StringList" let isListType = true let supportsValuesDefinitions = false @@ -147,7 +147,7 @@ private final class StringListBuildOptionType : BuildOptionType { return try namespace.declareStringListMacro(name) } } -private final class OpenCLArchitecturesBuildOptionType : BuildOptionType { +private final class OpenCLArchitecturesBuildOptionType: BuildOptionType { let typeName = "OpenCLArchitectures" let isListType = true let supportsValuesDefinitions = false @@ -156,7 +156,7 @@ private final class OpenCLArchitecturesBuildOptionType : BuildOptionType { return try namespace.declareStringListMacro(name) } } -private final class PathBuildOptionType : BuildOptionType { +private final class PathBuildOptionType: BuildOptionType { let typeName = "Path" let isListType = false let supportsValuesDefinitions = false @@ -165,7 +165,7 @@ private final class PathBuildOptionType : BuildOptionType { return try namespace.declarePathMacro(name) } } -private final class PathListBuildOptionType : BuildOptionType { +private final class PathListBuildOptionType: BuildOptionType { let typeName = "PathList" let isListType = true let supportsValuesDefinitions = false @@ -174,7 +174,7 @@ private final class PathListBuildOptionType : BuildOptionType { return try namespace.declarePathListMacro(name) } } -private final class ProvisioningProfileBuildOptionType : BuildOptionType { +private final class ProvisioningProfileBuildOptionType: BuildOptionType { let typeName = "ProvisioningProfile" let isListType = false let supportsValuesDefinitions = false @@ -183,7 +183,7 @@ private final class ProvisioningProfileBuildOptionType : BuildOptionType { return try namespace.declareStringMacro(name) } } -private final class ProvisioningProfileSpecifierBuildOptionType : BuildOptionType { +private final class ProvisioningProfileSpecifierBuildOptionType: BuildOptionType { let typeName = "ProvisioningProfileSpecifier" let isListType = false let supportsValuesDefinitions = false @@ -281,11 +281,11 @@ private let buildOptionTypes: [String: any BuildOptionType] = [ @_spi(Testing) public final class BuildOption: CustomStringConvertible, Sendable { /// Helper type for representing the type of command line argument specifier that was used for a build option. private enum CommandLineSpecifier { - case arrayArgs(value: [String]) - case dictArgs(value: [String: PropertyListItem]) - case stringArgs(value: String) - case flag(value: String) - case prefixFlag(value: String) + case arrayArgs(value: [String]) + case dictArgs(value: [String: PropertyListItem]) + case stringArgs(value: String) + case flag(value: String) + case prefixFlag(value: String) } /// The name (build setting) of the option. @@ -371,7 +371,7 @@ private let buildOptionTypes: [String: any BuildOptionType] = [ var valueNameOpt: String? = nil var commandLineTemplateOpt: BuildOptionValue.CommandLineTemplateSpecifier? = nil var commandLineKey: String? = nil - for (key,valueData) in data { + for (key, valueData) in data { switch key { case "Value": guard case .plString(let value) = valueData else { @@ -507,7 +507,7 @@ private let buildOptionTypes: [String: any BuildOptionType] = [ valueDefns[valueName] = BuildOptionValue(commandLineTemplate: nil) case .plDict(let items): // Parse the option from the definition dict. - guard let (valueName,option) = parseBuildOptionValue(parser, name, type, items) else { continue } + guard let (valueName, option) = parseBuildOptionValue(parser, name, type, items) else { continue } if valueDefns[valueName] != nil { error("duplicate value definition '\(valueName)'") continue @@ -562,7 +562,7 @@ private let buildOptionTypes: [String: any BuildOptionType] = [ return nil } } - for (key,valueData) in items.sorted(byKey: <) { + for (key, valueData) in items.sorted(byKey: <) { // Convert the valueData to the appropriate option. guard let template = getTemplateForData(key, valueData) else { continue } @@ -668,7 +668,7 @@ private let buildOptionTypes: [String: any BuildOptionType] = [ // For boolean types, validate and normalize the keys. if type is BoolBuildOptionType { if let valueDefns = valueDefnsOpt { - for (key,item) in valueDefns { + for (key, item) in valueDefns { if key == "NO" { emptyValueDefn = item } else if key == "YES" { @@ -870,7 +870,7 @@ private let buildOptionTypes: [String: any BuildOptionType] = [ switch key { case "CommandLineArgs": switch valueData { - // FIXME: The string form is very rarely used, although conceptually it makes sense. We should verify it is worth the complexity though. + // FIXME: The string form is very rarely used, although conceptually it makes sense. We should verify it is worth the complexity though. case .plString(let value): commandLineSpecifierOpt = .stringArgs(value: value) case .plArray(let value): @@ -1109,8 +1109,7 @@ private let buildOptionTypes: [String: any BuildOptionType] = [ return nil } return VersionRange(start: startVersion) - } - else if valueItems.count == 2 { + } else if valueItems.count == 2 { guard case .plString(let startValue) = valueItems[0], let startVersion = try? Version(startValue) else { error("could not parse first element of value '\(valueItems)' in array for '\(key)'") return nil @@ -1122,11 +1121,10 @@ private let buildOptionTypes: [String: any BuildOptionType] = [ do { return try VersionRange(start: startVersion, end: endVersion) } catch let e { - error("unexpected value '\(valueItems)' in array for '\(key)' - \(e)") // e == "version range start must be less than or equal to end, but \(start) greater than \(end)" + error("unexpected value '\(valueItems)' in array for '\(key)' - \(e)") // e == "version range start must be less than or equal to end, but \(start) greater than \(end)" return nil } - } - else { + } else { error("unexpected value '\(valueItems)' in array for '\(key)' - array contains more than 2 elements") return nil } @@ -1170,7 +1168,7 @@ private let buildOptionTypes: [String: any BuildOptionType] = [ } // If we have a name and no type was assigned, infer from the known macro type. - if let name = nameOpt, type == nil { + if let name = nameOpt, type == nil { if let macro = parser.delegate.internalMacroNamespace.lookupMacroDeclaration(name) { if macro is StringMacroDeclaration { // This is the default, no warning about type incompatibility. @@ -1402,14 +1400,15 @@ private let buildOptionTypes: [String: any BuildOptionType] = [ // Handle list typed options. guard !type.isListType else { - var values = switch self.macro { - case let macro as StringListMacroDeclaration: - scope.evaluate(macro, lookup: lookup) - case let macro as PathListMacroDeclaration: - scope.evaluate(macro, lookup: lookup) - default: - fatalError("invalid macro type for List option") - } + var values = + switch self.macro { + case let macro as StringListMacroDeclaration: + scope.evaluate(macro, lookup: lookup) + case let macro as PathListMacroDeclaration: + scope.evaluate(macro, lookup: lookup) + default: + fatalError("invalid macro type for List option") + } // Get the value definition to use. let valueDefnOpt = values.isEmpty ? emptyValueDefn : otherValueDefn @@ -1419,8 +1418,7 @@ private let buildOptionTypes: [String: any BuildOptionType] = [ return [] } - if self.name == "TAPI_HEADER_SEARCH_PATHS" && (scope.evaluate(BuiltinMacros.TAPI_ENABLE_PROJECT_HEADERS) || - scope.evaluate(BuiltinMacros.TAPI_USE_SRCROOT)) { + if self.name == "TAPI_HEADER_SEARCH_PATHS" && (scope.evaluate(BuiltinMacros.TAPI_ENABLE_PROJECT_HEADERS) || scope.evaluate(BuiltinMacros.TAPI_USE_SRCROOT)) { return values.map { valuesArePaths ? .path(Path($0)) : .literal(ByteString(encodingAsUTF8: $0)) } } @@ -1538,14 +1536,15 @@ private let buildOptionTypes: [String: any BuildOptionType] = [ } // Handle list typed options. guard !type.isListType else { - let values = switch self.macro { - case let macro as StringListMacroDeclaration: - cbc.scope.evaluate(macro, lookup: lookup) - case let macro as PathListMacroDeclaration: - cbc.scope.evaluate(macro, lookup: lookup) - default: - fatalError("invalid macro type for List option") - } + let values = + switch self.macro { + case let macro as StringListMacroDeclaration: + cbc.scope.evaluate(macro, lookup: lookup) + case let macro as PathListMacroDeclaration: + cbc.scope.evaluate(macro, lookup: lookup) + default: + fatalError("invalid macro type for List option") + } // FIXME: This is probably not right - we likely need to quote or escape the values here. return values.isEmpty ? nil : (key, values.joined(separator: " ")) } @@ -1601,14 +1600,15 @@ private let buildOptionTypes: [String: any BuildOptionType] = [ // Handle list typed options. guard !type.isListType else { - let values = switch self.macro { - case let macro as StringListMacroDeclaration: - scope.evaluate(macro, lookup: lookup) - case let macro as PathListMacroDeclaration: - scope.evaluate(macro, lookup: lookup) - default: - fatalError("invalid macro type for List option") - } + let values = + switch self.macro { + case let macro as StringListMacroDeclaration: + scope.evaluate(macro, lookup: lookup) + case let macro as PathListMacroDeclaration: + scope.evaluate(macro, lookup: lookup) + default: + fatalError("invalid macro type for List option") + } // Get the value definition to use. let valueDefnOpt = values.isEmpty ? emptyValueDefn : otherValueDefn @@ -1641,7 +1641,6 @@ private let buildOptionTypes: [String: any BuildOptionType] = [ } } - /// A `BuildOptionGenerationContext` provides information that a `BuildOption` need to generate command-line options for a tool. public protocol BuildOptionGenerationContext { /// The path to the tool in question. @@ -1660,7 +1659,7 @@ extension BuildOptionGenerationContext { } /// This is a shared base class, but cannot itself be a declared spec type. -open class PropertyDomainSpec : Spec, @unchecked Sendable { +open class PropertyDomainSpec: Spec, @unchecked Sendable { /// The ordered list of build options associated with this spec, not including any buildOptions from its BasedOn spec (see `flattenedBuildOptions` and `flattenedOrderedBuildOptions` instead). @_spi(Testing) public let buildOptions: [BuildOption] @@ -1693,7 +1692,7 @@ open class PropertyDomainSpec : Spec, @unchecked Sendable { return flattenedOrderedBuildOptionsCache.getValue(self) } private var flattenedOrderedBuildOptionsCache = LazyCache { (spec: PropertyDomainSpec) -> [BuildOption] in - // We start with a list of ordered option names defined by our basedOnSpec. + // We start with a list of ordered option names defined by our basedOnSpec. var orderedOptionNames = OrderedSet((spec.basedOnSpec != nil) ? (spec.basedOnSpec! as! PropertyDomainSpec).flattenedOrderedBuildOptions.map({ $0.name }) : []) // Now we go through our own options, and decide where to add each one. An invariant is that no option should appear in the list more than once. for option in spec.buildOptions { @@ -1709,8 +1708,7 @@ open class PropertyDomainSpec : Spec, @unchecked Sendable { if let index = orderedOptionNames.firstIndex(of: option.name) { orderedOptionNames.remove(at: index) orderedOptionNames.insert(option.name, at: index) - } - else { + } else { orderedOptionNames.append(option.name) } } @@ -1754,7 +1752,6 @@ open class PropertyDomainSpec : Spec, @unchecked Sendable { } } - /// Extensions to PropertyDomainSpec for performance testing. extension PropertyDomainSpec { diff --git a/Sources/SWBCore/SpecImplementations/SpecParser.swift b/Sources/SWBCore/SpecImplementations/SpecParser.swift index 535b1019..d1918dde 100644 --- a/Sources/SWBCore/SpecImplementations/SpecParser.swift +++ b/Sources/SWBCore/SpecImplementations/SpecParser.swift @@ -50,7 +50,7 @@ public class SpecParser { } /// The set of keys which are parsed by the proxy machinery, and shouldn't count as unused. - private static let keysParsedByProxy = Set([ "Class", "Domain", "_Domain", "Identifier", "Type", "BasedOn" ]) + private static let keysParsedByProxy = Set(["Class", "Domain", "_Domain", "Identifier", "Type", "BasedOn"]) @_spi(Testing) public func complete() { for key in proxy.data.keys { @@ -111,7 +111,7 @@ public class SpecParser { return parseItemAsStringList(key, value) } - func parseItemAsStringList(_ key:String, _ value: PropertyListItem) -> [String]? { + func parseItemAsStringList(_ key: String, _ value: PropertyListItem) -> [String]? { // Extract the value. guard case .plArray(let arrayValue) = value else { error("unexpected item: \(value) while parsing key \(key) (expected array of strings)") @@ -174,9 +174,9 @@ public class SpecParser { return parseItemAsBuildSettings(key, value, baseSettings: baseSettings) } - func parseItemAsBuildSettings(_ key:String, _ value: PropertyListItem, baseSettings: MacroValueAssignmentTable? = nil) -> MacroValueAssignmentTable? { + func parseItemAsBuildSettings(_ key: String, _ value: PropertyListItem, baseSettings: MacroValueAssignmentTable? = nil) -> MacroValueAssignmentTable? { // This holds our "last-in-wins" values for our macro assignments. - var values: [String:(macro:MacroDeclaration, conditions:MacroConditionSet?, expression:MacroExpression)] = [:] + var values: [String: (macro: MacroDeclaration, conditions: MacroConditionSet?, expression: MacroExpression)] = [:] // A helper function to populate the `values` table correctly based on the macro/conditions set. func setBuildSetting(macro: MacroDeclaration, conditions: MacroConditionSet?, expr: MacroExpression) { @@ -210,9 +210,8 @@ public class SpecParser { let conditionSet: MacroConditionSet? if let conditions { - conditionSet = MacroConditionSet(conditions: conditions.map{ MacroCondition(parameter: namespace.declareConditionParameter($0.0), valuePattern: $0.1) }) - } - else { + conditionSet = MacroConditionSet(conditions: conditions.map { MacroCondition(parameter: namespace.declareConditionParameter($0.0), valuePattern: $0.1) }) + } else { conditionSet = nil } @@ -226,22 +225,22 @@ public class SpecParser { case (.path, .plString(_)): return try namespace.declarePathMacro(macroName) case (.pathList, .plString(_)), - (.pathList, .plArray(_)): + (.pathList, .plArray(_)): return try namespace.declarePathListMacro(macroName) - case (.boolean, .plString(_)), // builtin boolean macro + OpenStep plist string => boolean - (.boolean, .plBool(_)), - (nil, .plBool(_)): + case (.boolean, .plString(_)), // builtin boolean macro + OpenStep plist string => boolean + (.boolean, .plBool(_)), + (nil, .plBool(_)): return try namespace.declareBooleanMacro(macroName) case (.string, .plString(_)), - (nil, .plString(_)): + (nil, .plString(_)): // Both StringMacroDeclaration and EnumMacroDeclaration use the string macro type, but are represented by different classes. If the existing declaration is an enum, it was defined in BuiltinMacros, so prefer it. if let existingDeclaration, existingDeclaration is AnyEnumMacroDeclaration { return existingDeclaration } return try namespace.declareStringMacro(macroName) - case (.stringList, .plString(_)), // some string lists are declared as strings in xcspecs - (.stringList, .plArray(_)), - (nil, .plArray(_)): + case (.stringList, .plString(_)), // some string lists are declared as strings in xcspecs + (.stringList, .plArray(_)), + (nil, .plArray(_)): return try namespace.declareStringListMacro(macroName) case let (macroType, _): // Using .userDefined here in the nil case is not strictly correct since we're not actually attempting to register a user defined macro, but close enough (we just need _some_ value for the error) - if a plist value was a dictionary for example we'd get an error about the 'dictionary' type being inconsistent with the user defined macro type. @@ -344,11 +343,11 @@ public class SpecParser { } // If we have an array we traverse its entries. if case .plArray(let entries) = value { - return try entries.compactMap{ try callBlock($0) } + return try entries.compactMap { try callBlock($0) } } // Otherwise, if we are asked to allow a single unarrayed element, we just invoke the block once. else if allowUnarrayedElement { - return try [value].compactMap{ try callBlock($0) } + return try [value].compactMap { try callBlock($0) } } // Otherwise, we have an unexpected item. else { diff --git a/Sources/SWBCore/SpecImplementations/SpecRegistry.swift b/Sources/SWBCore/SpecImplementations/SpecRegistry.swift index b6e0164b..b8f00e3f 100644 --- a/Sources/SWBCore/SpecImplementations/SpecRegistry.swift +++ b/Sources/SWBCore/SpecImplementations/SpecRegistry.swift @@ -217,20 +217,25 @@ public final class SpecProxy { if let basedOnProxy = self.basedOnProxy { basedOnSpec = basedOnProxy.load(registry) if basedOnSpec == nil { - let loadingErrorDiagnostic = Diagnostic(behavior: .error, location: .path(self.path), data: DiagnosticData("unable to load \(specifierString) due to errors loading base spec"), childDiagnostics: { - if case let .error(diagnostics) = basedOnProxy.loadedSpec { - return diagnostics - } else { - return [] - } - }()) + let loadingErrorDiagnostic = Diagnostic( + behavior: .error, + location: .path(self.path), + data: DiagnosticData("unable to load \(specifierString) due to errors loading base spec"), + childDiagnostics: { + if case let .error(diagnostics) = basedOnProxy.loadedSpec { + return diagnostics + } else { + return [] + } + }() + ) loadedSpec = .error(diagnostics: [loadingErrorDiagnostic]) registry.emit(loadingErrorDiagnostic) return nil } } - final class Delegate : SpecParserDelegate { + final class Delegate: SpecParserDelegate { let specRegistry: SpecRegistry let proxy: SpecProxy var internalMacroNamespace: MacroNamespace { @@ -310,7 +315,7 @@ extension SpecProxy: CustomStringConvertible { } // Private ordering of SpecProxy objects. -private func <(lhs: SpecProxy, rhs: SpecProxy) -> Bool { +private func < (lhs: SpecProxy, rhs: SpecProxy) -> Bool { // Every (identifier, domain) pair should be unique, so this should define a total ordering. return (lhs.identifier < rhs.identifier) || (lhs.identifier == rhs.identifier && lhs.domain < rhs.domain) } @@ -417,9 +422,17 @@ public final class SpecRegistry: Sendable { } } - self.inputFileGroupingStrategyFactories = await pluginManager.extensions(of: InputFileGroupingStrategyExtensionPoint.self).reduce([:], { $0.merging($1.groupingStrategies(), uniquingKeysWith: { _, _ in - preconditionFailure("attempt to register duplicate input file grouping strategy") - }) }) + self.inputFileGroupingStrategyFactories = await pluginManager.extensions(of: InputFileGroupingStrategyExtensionPoint.self).reduce( + [:], + { + $0.merging( + $1.groupingStrategies(), + uniquingKeysWith: { _, _ in + preconditionFailure("attempt to register duplicate input file grouping strategy") + } + ) + } + ) // Register all the specs concurrently. await withTaskGroup(of: [SpecProxy].self, returning: Void.self) { group in @@ -575,13 +588,13 @@ public final class SpecRegistry: Sendable { guard case .plString(let className) = classItem else { error(path, "invalid 'Class' field") - return (success:false, nil) + return (success: false, nil) } // Look up the spec class. guard let classType = specClassesByClassName[className] else { error(path, "unknown spec 'Class': '\(className)'") - return (success:false, nil) + return (success: false, nil) } return (success: true, classType) @@ -867,7 +880,7 @@ public final class SpecRegistry: Sendable { private let proxyCache = Cache() /// Get all specs in the registry of the given spec type `T` in the given `domain`. - public func findSpecs(_ type: T.Type, domain: String = "", includeInherited: Bool = true) -> [T] where T : SpecType { + public func findSpecs(_ type: T.Type, domain: String = "", includeInherited: Bool = true) -> [T] where T: SpecType { var result = Array() for proxy in findProxiesInSubregistry(T.self, domain: domain, includeInherited: includeInherited) { if let spec = proxy.load(self) { @@ -907,9 +920,10 @@ public final class SpecRegistry: Sendable { @discardableResult @_spi(Testing) public func validateSpecDomainInversion(reportError: ((String) -> (Void))? = nil) -> Bool { // See the hacks in registerSpec(), and rdar://problem/22361888. - let reportError = reportError ?? { error in - print(error) - } + let reportError = + reportError ?? { error in + print(error) + } // Find places where a proxy illegally depends on a spec overridden in a subdomain. // @@ -936,7 +950,7 @@ public final class SpecRegistry: Sendable { guard let basedOn = proxy.basedOn else { continue } // If the proxy's basedOn reference has a domain specifier, we require it to resolve to a fixed proxy. - let (_,rhs) = basedOn.split(":") + let (_, rhs) = basedOn.split(":") if !rhs.isEmpty { continue } @@ -1041,9 +1055,10 @@ extension SpecRegistry { case let productRef as ProductReference: // For a product reference, we look up its producing target's PackageTypeSpec, and return the file type the package type defines. if let standardTargetRef = productRef.target as? StandardTarget, - let productType = getSpec(standardTargetRef.productTypeIdentifier, domain: domain) as? ProductTypeSpec, - let packageType = getSpec(productType.defaultPackageTypeIdentifier, domain: domain) as? PackageTypeSpec, - let fileTypeIdent = packageType.productReferenceFileTypeIdentifier { + let productType = getSpec(standardTargetRef.productTypeIdentifier, domain: domain) as? ProductTypeSpec, + let packageType = getSpec(productType.defaultPackageTypeIdentifier, domain: domain) as? PackageTypeSpec, + let fileTypeIdent = packageType.productReferenceFileTypeIdentifier + { return lookupFileType(identifier: fileTypeIdent, domain: domain) } return nil @@ -1089,7 +1104,7 @@ extension SpecLookupContext { } /// Get all specs in the registry of the given spec type `T` in the given `domain`. - func findSpecs(_ type: T.Type, includeInherited: Bool = true) -> [T] where T : SpecType { + func findSpecs(_ type: T.Type, includeInherited: Bool = true) -> [T] where T: SpecType { return specRegistry.findSpecs(type, domain: domain, includeInherited: includeInherited) } diff --git a/Sources/SWBCore/SpecImplementations/Specs.swift b/Sources/SWBCore/SpecImplementations/Specs.swift index cb8cf844..d44b04fe 100644 --- a/Sources/SWBCore/SpecImplementations/Specs.swift +++ b/Sources/SWBCore/SpecImplementations/Specs.swift @@ -121,7 +121,7 @@ extension Spec: Hashable { hasher.combine(ObjectIdentifier(self)) } - public static func ==(lhs: Spec, rhs: Spec) -> Bool { + public static func == (lhs: Spec, rhs: Spec) -> Bool { return lhs === rhs } } @@ -132,7 +132,7 @@ extension Spec: CustomStringConvertible { } } -public final class ArchitectureSpec : Spec, SpecType, @unchecked Sendable { +public final class ArchitectureSpec: Spec, SpecType, @unchecked Sendable { class public override var typeName: String { return "Architecture" } @@ -225,8 +225,7 @@ public final class ArchitectureSpec : Spec, SpecType, @unchecked Sendable { guard lower < upper else { throw StubError.error("expected that \(lower) < \(upper)") } return Range(uncheckedBounds: (lower, upper)) - } - catch { + } catch { parser.error("\(key): \(error)") return nil } @@ -247,7 +246,7 @@ public final class ArchitectureSpec : Spec, SpecType, @unchecked Sendable { } } -public final class ProjectOverridesSpec : Spec, SpecType, @unchecked Sendable { +public final class ProjectOverridesSpec: Spec, SpecType, @unchecked Sendable { class public override var typeName: String { return "ProjectOverrides" } @@ -269,7 +268,7 @@ public final class ProjectOverridesSpec : Spec, SpecType, @unchecked Sendable { } } -public class FileTypeSpec : Spec, SpecType, @unchecked Sendable { +public class FileTypeSpec: Spec, SpecType, @unchecked Sendable { class public override var typeName: String { return "FileType" } @@ -368,7 +367,7 @@ public class FileTypeSpec : Spec, SpecType, @unchecked Sendable { } } -public final class PackageTypeSpec : Spec, SpecType, @unchecked Sendable { +public final class PackageTypeSpec: Spec, SpecType, @unchecked Sendable { class public override var typeName: String { return "PackageType" } @@ -442,7 +441,7 @@ public final class PackageTypeSpec : Spec, SpecType, @unchecked Sendable { ] } -public final class PlatformSpec : Spec, SpecType, @unchecked Sendable { +public final class PlatformSpec: Spec, SpecType, @unchecked Sendable { class public override var typeName: String { return "Platform" } @@ -451,7 +450,7 @@ public final class PlatformSpec : Spec, SpecType, @unchecked Sendable { } } -public final class BuildSettingsSpec : PropertyDomainSpec, SpecType, @unchecked Sendable { +public final class BuildSettingsSpec: PropertyDomainSpec, SpecType, @unchecked Sendable { class public override var typeName: String { return "BuildSettings" } @@ -471,7 +470,7 @@ public final class BuildSettingsExtensionSpec: PropertyDomainSpec, SpecType, @un } } -public final class BuildSystemSpec : PropertyDomainSpec, SpecType, @unchecked Sendable { +public final class BuildSystemSpec: PropertyDomainSpec, SpecType, @unchecked Sendable { class public override var typeName: String { return "BuildSystem" } @@ -480,7 +479,7 @@ public final class BuildSystemSpec : PropertyDomainSpec, SpecType, @unchecked Se } } -public final class BuildPhaseSpec : Spec, SpecType, @unchecked Sendable { +public final class BuildPhaseSpec: Spec, SpecType, @unchecked Sendable { class public override var typeName: String { return "BuildPhase" } diff --git a/Sources/SWBCore/SpecImplementations/Tools/AppShortcutStringsMetadataCompiler.swift b/Sources/SWBCore/SpecImplementations/Tools/AppShortcutStringsMetadataCompiler.swift index a711c9ff..314ef56f 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/AppShortcutStringsMetadataCompiler.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/AppShortcutStringsMetadataCompiler.swift @@ -91,16 +91,18 @@ final public class AppShortcutStringsMetadataCompilerSpec: GenericCommandLineToo } let commandLine = await commandLineFromTemplate(cbc, delegate, optionContext: discoveredCommandLineToolSpecInfo(cbc.producer, cbc.scope, delegate), lookup: lookup).map(\.asString) - delegate.createTask(type: self, - ruleInfo: defaultRuleInfo(cbc, delegate), - commandLine: commandLine, - environment: environmentFromSpec(cbc, delegate), - workingDirectory: cbc.producer.defaultWorkingDirectory, - inputs: inputs, - outputs: [outputOrderingNode], - action: nil, - execDescription: resolveExecutionDescription(cbc, delegate), - enableSandboxing: enableSandboxing) + delegate.createTask( + type: self, + ruleInfo: defaultRuleInfo(cbc, delegate), + commandLine: commandLine, + environment: environmentFromSpec(cbc, delegate), + workingDirectory: cbc.producer.defaultWorkingDirectory, + inputs: inputs, + outputs: [outputOrderingNode], + action: nil, + execDescription: resolveExecutionDescription(cbc, delegate), + enableSandboxing: enableSandboxing + ) } public override func customOutputParserType(for task: any ExecutableTask) -> (any TaskOutputParser.Type)? { @@ -133,7 +135,8 @@ public final class AppShortcutStringsValidationOutputParser: TaskOutputParser { var diagnosticLocation: Diagnostic.Location { guard let path else { return .unknown } if let languageCode, - let key { + let key + { return .path(Path(path), fileLocation: .object(identifier: "\(languageCode):\(key)")) } return .path(Path(path), line: line) diff --git a/Sources/SWBCore/SpecImplementations/Tools/CCompiler.swift b/Sources/SWBCore/SpecImplementations/Tools/CCompiler.swift index ead99dcf..7b0428a4 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/CCompiler.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/CCompiler.swift @@ -14,9 +14,8 @@ public import SWBUtil public import SWBMacro import Foundation - /// Abstract C Compiler. This is not a concrete implementation, but rather it uses various information in the command build context to choose a specific compiler and to call `constructTasks()` on that compiler. This provides a level of indirection for projects that just want their source files compiled using the default C compiler. Depending on the context, the default C compiler for any particular combination of platform, architecture, and other factors may be Clang, ICC, GCC, or some other compiler. -class AbstractCCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatibleCompilerCommandLineBuilder, @unchecked Sendable { +class AbstractCCompilerSpec: CompilerSpec, SpecIdentifierType, GCCCompatibleCompilerCommandLineBuilder, @unchecked Sendable { static let identifier = "com.apple.compilers.gcc" override func resolveConcreteSpec(_ cbc: CommandBuildContext) -> CommandLineToolSpec { @@ -42,7 +41,7 @@ public struct ClangPrefixInfo: Serializable, Hashable, Encodable, Sendable { struct PCHInfo: Serializable, Hashable, Encodable { let output: Path - let hashCriteria: Path? // Should be non-optional, but blocked on: [Swift Build] Complete handling of PCH precompiling + let hashCriteria: Path? // Should be non-optional, but blocked on: [Swift Build] Complete handling of PCH precompiling let commandLine: [ByteString] private enum CodingKeys: CodingKey { @@ -105,16 +104,18 @@ public struct ClangIndexingPayload: Serializable, Encodable, Sendable { let responseFileAttachmentPaths: [Path: Path] let responseFileFormat: ResponseFileFormat - init(sourceFileIndex: Int, - outputFileIndex: Int, - sourceLanguageIndex: Int, - builtProductsDir: Path, - assetSymbolIndexPath: Path, - workingDir: Path, - prefixInfo: ClangPrefixInfo?, - toolchains: [String], - responseFileAttachmentPaths: [Path: Path], - responseFileFormat: ResponseFileFormat) { + init( + sourceFileIndex: Int, + outputFileIndex: Int, + sourceLanguageIndex: Int, + builtProductsDir: Path, + assetSymbolIndexPath: Path, + workingDir: Path, + prefixInfo: ClangPrefixInfo?, + toolchains: [String], + responseFileAttachmentPaths: [Path: Path], + responseFileFormat: ResponseFileFormat + ) { self.sourceFileIndex = sourceFileIndex self.outputFileIndex = outputFileIndex self.sourceLanguageIndex = sourceLanguageIndex @@ -210,12 +211,12 @@ public struct ClangSourceFileIndexingInfo: SourceFileIndexingInfo { public static func indexingCommandLine(from commandLine: [ByteString], workingDir: Path, prefixInfo: ClangPrefixInfo? = nil, addSupplementary: Bool = true, replaceCompile: Bool = true, responseFileMapping: [Path: Path], responseFileFormat: ResponseFileFormat?) -> [ByteString] { var result = [ByteString]() var iterator = commandLine.makeIterator() - let _ = iterator.next() // Skip compiler path + let _ = iterator.next() // Skip compiler path while let arg = iterator.next() { if skippedArgsWithValues.contains(arg) { // Skip arg and value - _ = iterator.next() // Ignore failure... + _ = iterator.next() // Ignore failure... } else if skippedArgsWithoutValues.contains(arg) { // Skip } else if arg == "-c" && replaceCompile { @@ -231,7 +232,8 @@ public struct ClangSourceFileIndexingInfo: SourceFileIndexingInfo { } if let includePath = includePathBytes.stringValue, - pchInfo.output.str.hasPrefix(includePath) { + pchInfo.output.str.hasPrefix(includePath) + { result.append(ByteString(encodingAsUTF8: prefixInfo.input.str)) } else { result.append(includePathBytes) @@ -239,9 +241,10 @@ public struct ClangSourceFileIndexingInfo: SourceFileIndexingInfo { } else if arg.bytes.starts(with: ByteString(stringLiteral: "-fbuild-session-file=").bytes) { // Skip } else if arg.starts(with: ByteString(unicodeScalarLiteral: "@")), - let attachmentPath = responseFileMapping[Path(arg.asString.dropFirst())], - let responseFileFormat, - let responseFileArgs = try? ResponseFiles.expandResponseFiles(["@\(attachmentPath.str)"], fileSystem: localFS, relativeTo: workingDir, format: responseFileFormat) { + let attachmentPath = responseFileMapping[Path(arg.asString.dropFirst())], + let responseFileFormat, + let responseFileArgs = try? ResponseFiles.expandResponseFiles(["@\(attachmentPath.str)"], fileSystem: localFS, relativeTo: workingDir, format: responseFileFormat) + { result.append(contentsOf: responseFileArgs.map { ByteString(encodingAsUTF8: $0) }) } else { result.append(arg) @@ -271,7 +274,7 @@ public struct ClangSourceFileIndexingInfo: SourceFileIndexingInfo { // FIXME: Convert to bytes. dict["LanguageDialect"] = PropertyListItem(sourceLanguage.asString) // FIXME: Convert to bytes. - dict["clangASTCommandArguments"] = PropertyListItem(commandLine.map{ $0.asString }) + dict["clangASTCommandArguments"] = PropertyListItem(commandLine.map { $0.asString }) dict["clangASTBuiltProductsDir"] = PropertyListItem(builtProductsDir.str) dict["assetSymbolIndexPath"] = PropertyListItem(assetSymbolIndexPath.str) @@ -286,7 +289,7 @@ public struct ClangSourceFileIndexingInfo: SourceFileIndexingInfo { } // FIXME: Convert to bytes. - dict["clangPCHCommandArguments"] = PropertyListItem(pch.commandLine.map{ $0.asString }) + dict["clangPCHCommandArguments"] = PropertyListItem(pch.commandLine.map { $0.asString }) } } @@ -430,7 +433,7 @@ public protocol ClangModuleVerifierPayloadType: TaskPayload { struct ClangModuleVerifierPayload: ClangModuleVerifierPayloadType { var fileNameMapPath: Path? - func serialize(to serializer: T) where T : SWBUtil.Serializer { + func serialize(to serializer: T) where T: SWBUtil.Serializer { serializer.serialize(fileNameMapPath) } @@ -555,8 +558,7 @@ public enum FlagPattern: Sendable { } } - -public class ClangCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatibleCompilerCommandLineBuilder, @unchecked Sendable { +public class ClangCompilerSpec: CompilerSpec, SpecIdentifierType, GCCCompatibleCompilerCommandLineBuilder, @unchecked Sendable { /// Clang compiler data cache, used to cache constant flags. fileprivate final class DataCache: SpecDataCache { fileprivate struct ConstantFlagsKey: Hashable, Sendable { @@ -589,7 +591,7 @@ public class ClangCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatible /// Cache of constant flags, keyed by the scope and input file type. private let constantFlagsCache = Registry() - required init() { } + required init() {} func getStandardFlags(_ spec: ClangCompilerSpec, producer: any CommandProducer, scope: MacroEvaluationScope, optionContext: (any BuildOptionGenerationContext)?, delegate: any TaskGenerationDelegate, inputFileType: FileTypeSpec) -> ConstantFlags { // This cache is per-producer, so it is guaranteed to be invariant based on that. @@ -650,15 +652,22 @@ public class ClangCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatible var commandLine = Array() // Add the arguments from the specification. - commandLine += self.commandLineFromOptions(producer, scope: scope, inputFileType: inputFileType, optionContext: optionContext, buildOptionsFilter: .specOnly, lookup: { declaration in - if declaration.name == "CLANG_INDEX_STORE_ENABLE" && optionContext is DiscoveredClangToolSpecInfo { - let clangToolInfo = optionContext as! DiscoveredClangToolSpecInfo - if !clangToolInfo.isAppleClang { - return BuiltinMacros.namespace.parseString("NO") + commandLine += self.commandLineFromOptions( + producer, + scope: scope, + inputFileType: inputFileType, + optionContext: optionContext, + buildOptionsFilter: .specOnly, + lookup: { declaration in + if declaration.name == "CLANG_INDEX_STORE_ENABLE" && optionContext is DiscoveredClangToolSpecInfo { + let clangToolInfo = optionContext as! DiscoveredClangToolSpecInfo + if !clangToolInfo.isAppleClang { + return BuiltinMacros.namespace.parseString("NO") + } } + return nil } - return nil - }).map(\.asString) + ).map(\.asString) // Add the common header search paths. let headerSearchPaths = GCCCompatibleCompilerSpecSupport.headerSearchPathArguments(producer, scope, usesModules: scope.evaluate(BuiltinMacros.CLANG_ENABLE_MODULES)) @@ -715,16 +724,14 @@ public class ClangCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatible if let nextArg = iterator.next() { regularCommandLine.append(nextArg) } - } - else if arg == "-isysroot" || arg == "--sysroot" { + } else if arg == "-isysroot" || arg == "--sysroot" { // Exclude the options to pass the path to the base SDK from the response file to make reading the build log easier. // Sparse SDK options do not get similar treatment since they are passed as normal search paths. regularCommandLine.append(arg) if let nextArg = iterator.next() { regularCommandLine.append(nextArg) } - } - else if ClangSourceFileIndexingInfo.skippedArgsWithValues.contains(argAsByteString) || arg == "-include" { + } else if ClangSourceFileIndexingInfo.skippedArgsWithValues.contains(argAsByteString) || arg == "-include" { // Relevant to indexing, so exclude arg and value from response file. regularCommandLine.append(arg) if let nextArg = iterator.next() { @@ -843,8 +850,7 @@ public class ClangCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatible ]) func isOutputAgnosticCommandLineArgument(_ argument: ByteString, prevArgument: ByteString?) -> Bool { - if ClangCompilerSpec.outputAgnosticCompilerArguments.contains(argument) || - ClangCompilerSpec.outputAgnosticCompilerArgumentsWithValues.contains(argument) { + if ClangCompilerSpec.outputAgnosticCompilerArguments.contains(argument) || ClangCompilerSpec.outputAgnosticCompilerArgumentsWithValues.contains(argument) { return true } @@ -853,7 +859,7 @@ public class ClangCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatible } if let prevArgument, ClangCompilerSpec.outputAgnosticCompilerArgumentsWithValues.contains(prevArgument) { - return true + return true } return false @@ -919,11 +925,12 @@ public class ClangCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatible } let cachedBuild = cachingBuildEnabled(cbc, language: language, clangInfo: clangInfo) - let explicitModules = cbc.scope.evaluate(BuiltinMacros.CLANG_ENABLE_MODULES) - && (cbc.scope.evaluate(BuiltinMacros.CLANG_ENABLE_EXPLICIT_MODULES) || cbc.scope.evaluate(BuiltinMacros._EXPERIMENTAL_CLANG_EXPLICIT_MODULES)) + let explicitModules = + cbc.scope.evaluate(BuiltinMacros.CLANG_ENABLE_MODULES) + && (cbc.scope.evaluate(BuiltinMacros.CLANG_ENABLE_EXPLICIT_MODULES) || cbc.scope.evaluate(BuiltinMacros._EXPERIMENTAL_CLANG_EXPLICIT_MODULES)) let explicitModulesLanguages: Set = [ - .c, .objectiveC + .c, .objectiveC, ] let supportedLanguages = cachedBuild ? GCCCompatibleLanguageDialect.allCLanguages : explicitModulesLanguages @@ -948,16 +955,16 @@ public class ClangCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatible // Check that we are using a recognized clang. switch compiler.basenameWithoutSuffix { - case "clang", "clang++": - break - default: - // Not recognized as clang; assume the worst. - delegate.remark("Explicit modules is enabled but the compiler was not recognized; disable explicit modules with CLANG_ENABLE_EXPLICIT_MODULES=NO, or use C_COMPILER_LAUNCHER with CLANG_ENABLE_EXPLICIT_MODULES_WITH_COMPILER_LAUNCHER=YES if using a compatible launcher") - break EXPLICIT_MODULES + case "clang", "clang++": + break + default: + // Not recognized as clang; assume the worst. + delegate.remark("Explicit modules is enabled but the compiler was not recognized; disable explicit modules with CLANG_ENABLE_EXPLICIT_MODULES=NO, or use C_COMPILER_LAUNCHER with CLANG_ENABLE_EXPLICIT_MODULES_WITH_COMPILER_LAUNCHER=YES if using a compatible launcher") + break EXPLICIT_MODULES } // Verify we have a clang version with the latest explicit modules bugfixes. - if let clangVersion = clangInfo?.clangVersion, clangVersion < Version(1403, 0, 300, 5) { + if let clangVersion = clangInfo?.clangVersion, clangVersion < Version(1403, 0, 300, 5) { delegate.warning("Explicit modules is not supported with Clang version \(clangVersion), continuing with explicit modules disabled.") break EXPLICIT_MODULES } @@ -1138,7 +1145,7 @@ public class ClangCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatible let dataCache = cbc.producer.getSpecDataCache(self, cacheType: DataCache.self) let constantFlags = dataCache.getStandardFlags(self, producer: cbc.producer, scope: cbc.scope, optionContext: clangInfo, delegate: delegate, inputFileType: resolvedInputFileType) commandLine += constantFlags.flags - let responseFileAdditionalOutput = constantFlags.responseFileMapping.keys.sorted().map({"Using response file: \($0.str)"}) + let responseFileAdditionalOutput = constantFlags.responseFileMapping.keys.sorted().map({ "Using response file: \($0.str)" }) additionalOutput.append(contentsOf: responseFileAdditionalOutput) inputDeps.append(contentsOf: constantFlags.inputs) @@ -1152,15 +1159,22 @@ public class ClangCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatible // In practice, changes which actually are likely to impact the project will usually result in other source changes which will cause the necessary rebuild, but we should ultimately close this loophole: Move to stronger dependencies on headermaps and VFS // // We currently don't need to worry about any ordering constraint, because all of the headermap production tasks are forced into an early phase. -#if false - // Mark as depending on search paths which should be treated as inputs. - inputDeps.append(contentsOf: constantFlags.headerSearchPaths.inputPaths) -#endif + #if false + // Mark as depending on search paths which should be treated as inputs. + inputDeps.append(contentsOf: constantFlags.headerSearchPaths.inputPaths) + #endif let cbcWithOutput = cbc.outputs.isEmpty ? cbc.appendingOutputs([outputNode.path]) : cbc - commandLine += self.commandLineFromOptions(cbc.producer, scope: cbc.scope, inputFileType: resolvedInputFileType, optionContext: clangInfo, buildOptionsFilter: .extendedOnly, lookup: { - self.lookup($0, cbcWithOutput, delegate) - }).map(\.asString) + commandLine += self.commandLineFromOptions( + cbc.producer, + scope: cbc.scope, + inputFileType: resolvedInputFileType, + optionContext: clangInfo, + buildOptionsFilter: .extendedOnly, + lookup: { + self.lookup($0, cbcWithOutput, delegate) + } + ).map(\.asString) let additionalOutputs = await self.additionalEvaluatedOutputs(cbcWithOutput, delegate) for output in additionalOutputs.outputs { @@ -1192,7 +1206,7 @@ public class ClangCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatible } // Add -D arguments for any preprocessor definitions in GCC_PREPROCESSOR_DEFINITIONS_NOT_USED_IN_PRECOMPS. - commandLine += cbc.scope.evaluate(BuiltinMacros.GCC_PREPROCESSOR_DEFINITIONS_NOT_USED_IN_PRECOMPS).map{ "-D\($0)" } + commandLine += cbc.scope.evaluate(BuiltinMacros.GCC_PREPROCESSOR_DEFINITIONS_NOT_USED_IN_PRECOMPS).map { "-D\($0)" } // Add -D arguments for any preprocessor definitions in the SDK. // FIXME: We don’t yet have the SDK API that we’d need, but perhaps we should instead have the Settings object pass such things down through more general-purpose settings. @@ -1225,8 +1239,8 @@ public class ClangCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatible let dependencyValidationOutputPath: Path? let traceFilePath: Path? if clangInfo?.hasFeature("print-headers-direct-per-file") ?? false, - ((moduleDependenciesContext?.validate ?? .defaultValue) != .no || - (headerDependenciesContext?.validate ?? .defaultValue) != .no) { + ((moduleDependenciesContext?.validate ?? .defaultValue) != .no || (headerDependenciesContext?.validate ?? .defaultValue) != .no) + { dependencyValidationOutputPath = Path(outputNode.path.str + ".dependencies") let file = Path(outputNode.path.str + ".trace.json") @@ -1234,7 +1248,7 @@ public class ClangCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatible "-Xclang", "-header-include-file", "-Xclang", file.str, "-Xclang", "-header-include-filtering=direct-per-file", - "-Xclang", "-header-include-format=json" + "-Xclang", "-header-include-format=json", ] traceFilePath = file @@ -1257,7 +1271,7 @@ public class ClangCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatible let LTO = cbc.scope.evaluate(BuiltinMacros.LLVM_LTO) // Pass the flags to emit remarks to the compiler invocation when LTO is disabled. When LTO is enabled, the flags are passed to the linker invocation. let shouldGenerateRemarks = cbc.scope.evaluate(BuiltinMacros.CLANG_GENERATE_OPTIMIZATION_REMARKS) && (LTO.isEmpty || LTO == "NO") - if shouldGenerateRemarks { + if shouldGenerateRemarks { let remarkFilePath = Path(outputNode.path.withoutSuffix + ".opt.bitstream") commandLine += ["-fsave-optimization-record=bitstream", "-foptimization-record-file=" + remarkFilePath.str] let filter = cbc.scope.evaluate(BuiltinMacros.CLANG_GENERATE_OPTIMIZATION_REMARKS_FILTER) @@ -1281,8 +1295,7 @@ public class ClangCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatible commandLine += Self.supplementalIndexingArgs(allowCompilerErrors: clangInfo.toolFeatures.has(.allowPcmWithCompilerErrors)) } - if clangInfo.toolFeatures.has(.indexUnitOutputPath) && - (commandLine.contains("-index-store-path") || hasEnabledIndexBuildArena) { + if clangInfo.toolFeatures.has(.indexUnitOutputPath) && (commandLine.contains("-index-store-path") || hasEnabledIndexBuildArena) { // Remap the index output file path if either the index store is enabled (checked through the argument since it is conditional on more than just SWIFT_INDEX_STORE_ENABLE) or the build arena is enabled. let basePath = cbc.scope.evaluate(BuiltinMacros.OBJROOT) if let newPath = generateIndexOutputPath(from: outputNode.path, basePath: basePath) { @@ -1319,12 +1332,12 @@ public class ClangCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatible ), workingDir: cbc.scope.evaluate(BuiltinMacros.PROJECT_DIR), prefixInfo: prefixInfo, - toolchains: cbc.producer.toolchains.map{ $0.identifier }, + toolchains: cbc.producer.toolchains.map { $0.identifier }, responseFileAttachmentPaths: constantFlags.responseFileMapping, responseFileFormat: Self.responseFileFormat(hostOS: cbc.producer.hostOperatingSystem) ) } else { - indexingPayload = nil + indexingPayload = nil } // If we're generating module map files, then make the compile task depend on them. @@ -1467,10 +1480,11 @@ public class ClangCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatible } if cbc.isPreferredArch, - self.identifier == "com.apple.compilers.llvm.clang.1_0.compiler", - let sourcecodeCFileType = cbc.producer.lookupFileType(identifier: "sourcecode.c"), - resolvedInputFileType.conformsTo(sourcecodeCFileType), - !hasEnabledIndexBuildArena { + self.identifier == "com.apple.compilers.llvm.clang.1_0.compiler", + let sourcecodeCFileType = cbc.producer.lookupFileType(identifier: "sourcecode.c"), + resolvedInputFileType.conformsTo(sourcecodeCFileType), + !hasEnabledIndexBuildArena + { // If the static analyzer is enabled, also construct tasks for it. let skipAnalyzer = cbc.scope.evaluate(BuiltinMacros.SKIP_CLANG_STATIC_ANALYZER) if cbc.scope.evaluate(BuiltinMacros.RUN_CLANG_STATIC_ANALYZER) && !skipAnalyzer { @@ -1682,7 +1696,7 @@ public class ClangCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatible // Add “standard flags”, which are ones that depend only on the variant, architecture, and language (in addition to the identifier, of course). let dataCache = cbc.producer.getSpecDataCache(self, cacheType: DataCache.self) let constantFlags = dataCache.getStandardFlags(self, producer: cbc.producer, scope: cbc.scope, optionContext: clangInfo, delegate: delegate, inputFileType: inputFileType) - let responseFileAdditionalOutput = constantFlags.responseFileMapping.keys.sorted().map({"Using response file: \($0.str)"}) + let responseFileAdditionalOutput = constantFlags.responseFileMapping.keys.sorted().map({ "Using response file: \($0.str)" }) commandLine += constantFlags.flags // Add the source file argument. @@ -1717,8 +1731,8 @@ public class ClangCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatible } // Compute the final input and output path lists. - var inputPaths = [ headerPath ] + constantFlags.inputs - let outputPaths = [ precompPath ] + var inputPaths = [headerPath] + constantFlags.inputs + let outputPaths = [precompPath] // If we're generating module map files, then make PCH generation depend on them. // We might not need to include this dependency if the module is 'Swift only', but it shouldn't hurt. @@ -1729,7 +1743,7 @@ public class ClangCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatible } } - let byteStringCommandLine = commandLine.map{ ByteString(encodingAsUTF8: $0) } + let byteStringCommandLine = commandLine.map { ByteString(encodingAsUTF8: $0) } // Handle explicit modules build. let scanningOutput = precompPath.appendingFileNameSuffix("scan") @@ -1802,12 +1816,12 @@ public class ClangCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatible let payload = ClangTaskPayload(serializedDiagnosticsPath: Path(diagnosticsFile), indexingPayload: nil) - delegate.createTask(type: self, dependencyData: dependencyData, payload: payload, ruleInfo: ruleInfo + ["(implicit-copy)"], additionalSignatureData: additionalSignatureData, commandLine: updatedCommandLine.map{ ByteString(encodingAsUTF8: $0) }, additionalOutput: responseFileAdditionalOutput, environment: EnvironmentBindings(), workingDirectory: compilerWorkingDirectory(cbc), inputs: inputPaths + extraInputs, outputs: [ implicitModulesOutputNode.path ], action: delegate.taskActionCreationDelegate.createDeferredExecutionTaskActionIfRequested(userPreferences: cbc.producer.userPreferences), execDescription: resolveExecutionDescription(cbc, delegate), enableSandboxing: enableSandboxing, additionalTaskOrderingOptions: [.compilationForIndexableSourceFile], usesExecutionInputs: false) + delegate.createTask(type: self, dependencyData: dependencyData, payload: payload, ruleInfo: ruleInfo + ["(implicit-copy)"], additionalSignatureData: additionalSignatureData, commandLine: updatedCommandLine.map { ByteString(encodingAsUTF8: $0) }, additionalOutput: responseFileAdditionalOutput, environment: EnvironmentBindings(), workingDirectory: compilerWorkingDirectory(cbc), inputs: inputPaths + extraInputs, outputs: [implicitModulesOutputNode.path], action: delegate.taskActionCreationDelegate.createDeferredExecutionTaskActionIfRequested(userPreferences: cbc.producer.userPreferences), execDescription: resolveExecutionDescription(cbc, delegate), enableSandboxing: enableSandboxing, additionalTaskOrderingOptions: [.compilationForIndexableSourceFile], usesExecutionInputs: false) } return ClangPrefixInfo.PCHInfo( output: precompPath, - hashCriteria: nil, // rdar://problem/24469921 + hashCriteria: nil, // rdar://problem/24469921 commandLine: ClangSourceFileIndexingInfo.indexingCommandLine(from: byteStringCommandLine, workingDir: cbc.scope.evaluate(BuiltinMacros.PROJECT_DIR), addSupplementary: !hasEnabledIndexBuildArena, replaceCompile: false, responseFileMapping: constantFlags.responseFileMapping, responseFileFormat: Self.responseFileFormat(hostOS: cbc.producer.hostOperatingSystem)) ) } @@ -1843,7 +1857,6 @@ public class ClangCompilerSpec : CompilerSpec, SpecIdentifierType, GCCCompatible public override var payloadType: (any TaskPayload.Type)? { return ClangTaskPayload.self } - // MARK: Discovering info by invoking the tool /// Creates and returns a discovered info object for the clang compiler for the given command producer, scope, and language. @@ -1981,7 +1994,7 @@ extension ClangCompilerSpec { } } -public final class ClangStaticAnalyzerSpec : ClangCompilerSpec, @unchecked Sendable { +public final class ClangStaticAnalyzerSpec: ClangCompilerSpec, @unchecked Sendable { public class override var identifier: String { "com.apple.compilers.llvm.clang.1_0.analyzer" } @@ -2051,7 +2064,7 @@ func createSpecParser(for proxy: SpecProxy, registry: SpecRegistry) -> SpecParse return SpecParser(delegate, proxy) } -public final class ClangPreprocessorSpec : ClangCompilerSpec, SpecImplementationType, @unchecked Sendable { +public final class ClangPreprocessorSpec: ClangCompilerSpec, SpecImplementationType, @unchecked Sendable { public class override var identifier: String { "com.apple.compilers.llvm.clang.1_0.preprocessor" } @@ -2087,7 +2100,7 @@ public final class ClangPreprocessorSpec : ClangCompilerSpec, SpecImplementation } } -public final class ClangAssemblerSpec : ClangCompilerSpec, SpecImplementationType, @unchecked Sendable { +public final class ClangAssemblerSpec: ClangCompilerSpec, SpecImplementationType, @unchecked Sendable { public class override var identifier: String { "com.apple.compilers.llvm.clang.1_0.assembler" } @@ -2170,7 +2183,7 @@ public final class ClangModuleVerifierSpec: ClangCompilerSpec, SpecImplementatio } } -private func ==(lhs: ClangCompilerSpec.DataCache.ConstantFlagsKey, rhs: ClangCompilerSpec.DataCache.ConstantFlagsKey) -> Bool { +private func == (lhs: ClangCompilerSpec.DataCache.ConstantFlagsKey, rhs: ClangCompilerSpec.DataCache.ConstantFlagsKey) -> Bool { return ObjectIdentifier(lhs.scope) == ObjectIdentifier(rhs.scope) && lhs.inputFileType == rhs.inputFileType } diff --git a/Sources/SWBCore/SpecImplementations/Tools/ClangModuleVerifierInputGenerator.swift b/Sources/SWBCore/SpecImplementations/Tools/ClangModuleVerifierInputGenerator.swift index 99b75e8f..1af449b1 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/ClangModuleVerifierInputGenerator.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/ClangModuleVerifierInputGenerator.swift @@ -12,7 +12,7 @@ public import SWBUtil -public final class ClangModuleVerifierInputGeneratorSpec : GenericCommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { +public final class ClangModuleVerifierInputGeneratorSpec: GenericCommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier = "com.apple.build-tools.module-verifier-input-generator" override public func constructTasks(_ cbc: CommandBuildContext, _ delegate: any TaskGenerationDelegate) async { @@ -31,22 +31,26 @@ public final class ClangModuleVerifierInputGeneratorSpec : GenericCommandLineToo ] let commandLine = await commandLineFromTemplate(cbc, delegate, optionContext: discoveredCommandLineToolSpecInfo(cbc.producer, cbc.scope, delegate), specialArgs: specialArguments).map(\.asString) - let inputs = cbc.inputs.map{ delegate.createNode($0.absolutePath) } + cbc.commandOrderingInputs - let outputs = cbc.outputs.map { delegate.createNode($0) } + cbc.commandOrderingOutputs + [ - delegate.createNode(mainOutput), - delegate.createNode(headerOutput), - delegate.createNode(moduleMapOutput), - ] + let inputs = cbc.inputs.map { delegate.createNode($0.absolutePath) } + cbc.commandOrderingInputs + let outputs = + cbc.outputs.map { delegate.createNode($0) } + cbc.commandOrderingOutputs + [ + delegate.createNode(mainOutput), + delegate.createNode(headerOutput), + delegate.createNode(moduleMapOutput), + ] - delegate.createTask(type: self, - ruleInfo: ruleInfo, - commandLine: commandLine, - environment: environmentFromSpec(cbc, delegate), - workingDirectory: cbc.producer.defaultWorkingDirectory, - inputs: inputs, outputs: outputs, - action: delegate.taskActionCreationDelegate.createClangModuleVerifierInputGeneratorTaskAction(), - execDescription: resolveExecutionDescription(cbc, delegate), - enableSandboxing: enableSandboxing, - alwaysExecuteTask: alwaysExecuteTask) + delegate.createTask( + type: self, + ruleInfo: ruleInfo, + commandLine: commandLine, + environment: environmentFromSpec(cbc, delegate), + workingDirectory: cbc.producer.defaultWorkingDirectory, + inputs: inputs, + outputs: outputs, + action: delegate.taskActionCreationDelegate.createClangModuleVerifierInputGeneratorTaskAction(), + execDescription: resolveExecutionDescription(cbc, delegate), + enableSandboxing: enableSandboxing, + alwaysExecuteTask: alwaysExecuteTask + ) } } diff --git a/Sources/SWBCore/SpecImplementations/Tools/ClangStatCache.swift b/Sources/SWBCore/SpecImplementations/Tools/ClangStatCache.swift index 76c69149..250e52cf 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/ClangStatCache.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/ClangStatCache.swift @@ -20,18 +20,20 @@ final public class ClangStatCacheSpec: GenericCommandLineToolSpec, SpecIdentifie } override public func constructTasks(_ cbc: CommandBuildContext, _ delegate: any TaskGenerationDelegate) async { - await delegate.createTask(type: self, - ruleInfo: defaultRuleInfo(cbc, delegate), - commandLine: commandLineFromTemplate(cbc, delegate, optionContext: discoveredCommandLineToolSpecInfo(cbc.producer, cbc.scope, delegate)).map(\.asString), - environment: environmentFromSpec(cbc, delegate), - workingDirectory: cbc.producer.defaultWorkingDirectory, - // We intentionally specify no inputs because clang-stat-cache always runs uses FSEvents to optimize invalidation so llbuild doesn't need to stat the entire SDK on every build. - inputs: [], - outputs: [delegate.createNode(cbc.output), delegate.createVirtualNode("ClangStatCache \(cbc.output.str)")], - action: nil, - execDescription: resolveExecutionDescription(cbc, delegate), - preparesForIndexing: true, - enableSandboxing: enableSandboxing, - alwaysExecuteTask: true) + await delegate.createTask( + type: self, + ruleInfo: defaultRuleInfo(cbc, delegate), + commandLine: commandLineFromTemplate(cbc, delegate, optionContext: discoveredCommandLineToolSpecInfo(cbc.producer, cbc.scope, delegate)).map(\.asString), + environment: environmentFromSpec(cbc, delegate), + workingDirectory: cbc.producer.defaultWorkingDirectory, + // We intentionally specify no inputs because clang-stat-cache always runs uses FSEvents to optimize invalidation so llbuild doesn't need to stat the entire SDK on every build. + inputs: [], + outputs: [delegate.createNode(cbc.output), delegate.createVirtualNode("ClangStatCache \(cbc.output.str)")], + action: nil, + execDescription: resolveExecutionDescription(cbc, delegate), + preparesForIndexing: true, + enableSandboxing: enableSandboxing, + alwaysExecuteTask: true + ) } } diff --git a/Sources/SWBCore/SpecImplementations/Tools/CodeSign.swift b/Sources/SWBCore/SpecImplementations/Tools/CodeSign.swift index 8f0dd409..f66ea032 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/CodeSign.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/CodeSign.swift @@ -14,7 +14,7 @@ public import SWBUtil public import SWBMacro import Foundation -public final class CodesignToolSpec : CommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { +public final class CodesignToolSpec: CommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier = "com.apple.build-tools.codesign" public override func computeExecutablePath(_ cbc: CommandBuildContext) -> String { @@ -46,7 +46,7 @@ public final class CodesignToolSpec : CommandLineToolSpec, SpecIdentifierType, @ // Don't code sign unless we have a valid expanded identity. let expandedCodeSignIdentity = cbc.scope.evaluate(BuiltinMacros.EXPANDED_CODE_SIGN_IDENTITY) if expandedCodeSignIdentity.isEmpty { - if cbc.scope.evaluate(BuiltinMacros.CODE_SIGNING_ALLOWED) && cbc.scope.evaluate(BuiltinMacros.CODE_SIGNING_REQUIRED) { + if cbc.scope.evaluate(BuiltinMacros.CODE_SIGNING_ALLOWED) && cbc.scope.evaluate(BuiltinMacros.CODE_SIGNING_REQUIRED) { let productTypeString = { () -> String in guard let productType = cbc.producer.productType else { return "" } return " for product type '\(productType.name)" @@ -86,14 +86,13 @@ public final class CodesignToolSpec : CommandLineToolSpec, SpecIdentifierType, @ var addTimestampNoneFlag = !cbc.scope.evaluate(BuiltinMacros.DEPLOYMENT_POSTPROCESSING) var illegalFlags = [String]() for flag in cbc.scope.evaluate(BuiltinMacros.OTHER_CODE_SIGN_FLAGS) { - if flag.hasPrefix("-d") || flag == "--display" || flag == "-h" { + if flag.hasPrefix("-d") || flag == "--display" || flag == "-h" { illegalFlags.append(flag) - } - else { + } else { commandLine.append(flag) } - if flag.hasPrefix("-r") || flag.hasPrefix("--requirements") { + if flag.hasPrefix("-r") || flag.hasPrefix("--requirements") { generateDesignatedRequirements = false } @@ -202,7 +201,8 @@ public final class CodesignToolSpec : CommandLineToolSpec, SpecIdentifierType, @ var environment: [String: String] = environmentFromSpec(cbc, delegate).bindingsDictionary environment.merge( CodesignToolSpec.computeCodeSigningEnvironment(cbc, codesignAllocate: environment[BuiltinMacros.CODESIGN_ALLOCATE.name]), - uniquingKeysWith: { (_, second) in second }) + uniquingKeysWith: { (_, second) in second } + ) // Normally, the additional inputs shouldn't be applied on a resign-task for the target as doing so can create a cycle between the Copy Files Phase and Run Script Phase. However, due to the way that app hosted tests work (e.g. misuse the copy phase to inject content into the app bundle), we need to provide a provision to track that in order to properly-resign the app bundle (cf: testIncrementalCodesignForCopyFileChangesWithAppHostedTests). // NOTE: This does mean that the users can actually introduce a cycle if they have a script phase that also injects content into the app bundle related to the test bundle that is getting copied in. However, this should be an obscure usage. @@ -213,7 +213,7 @@ public final class CodesignToolSpec : CommandLineToolSpec, SpecIdentifierType, @ // FIXME: We currently track the product to sign as an input, but this doesn't seem right given that the tool won't actually look at that? // NOTE: The use of `createDirectoryTreeNode()` is done as it may very well be the case that the input is a bundle (e.g. .xctest, .framework, .appex, etc...). This is safe to use on files as well. - var inputs: [any PlannedNode] = [delegate.createNode(productToSign), delegate.createNode(outputPath)] + extraInputs.map{ delegate.createDirectoryTreeNode($0, excluding: []) } + cbc.commandOrderingInputs + var inputs: [any PlannedNode] = [delegate.createNode(productToSign), delegate.createNode(outputPath)] + extraInputs.map { delegate.createDirectoryTreeNode($0, excluding: []) } + cbc.commandOrderingInputs // Detect whether or not we are signing a bundle, so that we can properly report the inputs and outputs. This is important for our mutable node handling being able to connect the tasks properly. var outputs: [any PlannedNode] @@ -242,8 +242,7 @@ public final class CodesignToolSpec : CommandLineToolSpec, SpecIdentifierType, @ // For non-framework bundles we need to include Contents/MacOS when not using shallow bundles. if !shallow { binaryPath = outputPath.join("Contents/MacOS").join(bundleName) - } - else { + } else { binaryPath = outputPath.join(bundleName) } } @@ -284,16 +283,16 @@ public final class CodesignToolSpec : CommandLineToolSpec, SpecIdentifierType, @ // First, look for a -i or --identifier codesign parameter. if let identifierIdx = commandLine.firstIndex(of: "-i") ?? commandLine.firstIndex(of: "--identifier") { // If we found find a -i or --identifier parameter, use it as the identifier. - if identifierIdx+1 < commandLine.count { - identifier = commandLine[identifierIdx+1] + if identifierIdx + 1 < commandLine.count { + identifier = commandLine[identifierIdx + 1] } } // If there are no periods in the identifier, look for a --prefix codesign parameter and if found, prepend it to the identifier. if identifier.contains(".") { if let prefixIdx = commandLine.firstIndex(of: "--prefix") { - if prefixIdx+1 < commandLine.count { - let prefix = commandLine[prefixIdx+1] + if prefixIdx + 1 < commandLine.count { + let prefix = commandLine[prefixIdx + 1] identifier = prefix + identifier } } @@ -317,7 +316,6 @@ public final class CodesignToolSpec : CommandLineToolSpec, SpecIdentifierType, @ return cbc.scope.evaluate(cbc.scope.namespace.parseString(unevaluatedDesignatedRequirements), lookup: lookup) } - /// Computes the environment for invoking the code signing tool. /// /// - Parameters: diff --git a/Sources/SWBCore/SpecImplementations/Tools/ConcatenateTool.swift b/Sources/SWBCore/SpecImplementations/Tools/ConcatenateTool.swift index 024d6db6..34e61bb0 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/ConcatenateTool.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/ConcatenateTool.swift @@ -13,14 +13,17 @@ public import SWBUtil import SWBMacro -public final class ConcatenateToolSpec : CommandLineToolSpec, SpecImplementationType, @unchecked Sendable { +public final class ConcatenateToolSpec: CommandLineToolSpec, SpecImplementationType, @unchecked Sendable { public static let identifier = "com.apple.build-tools.concatenate" public static func construct(registry: SpecRegistry, proxy: SpecProxy) -> Spec { - return self.init(registry, proxy, - execDescription: registry.internalMacroNamespace.parseString("Concatenating to $(OutputRelativePath)"), - ruleInfoTemplate: ["Concatenate", .output, .inputs], - commandLineTemplate: [.execPath, .output, .inputs]) + return self.init( + registry, + proxy, + execDescription: registry.internalMacroNamespace.parseString("Concatenating to $(OutputRelativePath)"), + ruleInfoTemplate: ["Concatenate", .output, .inputs], + commandLineTemplate: [.execPath, .output, .inputs] + ) } public override func computeExecutablePath(_ cbc: CommandBuildContext) -> String { diff --git a/Sources/SWBCore/SpecImplementations/Tools/ConstructStubExecutorFileListTool.swift b/Sources/SWBCore/SpecImplementations/Tools/ConstructStubExecutorFileListTool.swift index 1609be7b..7f0620cd 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/ConstructStubExecutorFileListTool.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/ConstructStubExecutorFileListTool.swift @@ -57,7 +57,7 @@ public final class ConstructStubExecutorFileListToolSpec: CommandLineToolSpec, S debugDylibPath.str, stubExecutorLibraryPath.str, stubExecutorLibraryWithSwiftEntryPointPath.str, - "--output", cbc.output.str + "--output", cbc.output.str, ] delegate.createTask( diff --git a/Sources/SWBCore/SpecImplementations/Tools/CopyTool.swift b/Sources/SWBCore/SpecImplementations/Tools/CopyTool.swift index e7053e61..f9228aef 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/CopyTool.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/CopyTool.swift @@ -13,7 +13,7 @@ public import SWBUtil import SWBMacro -public final class CopyToolSpec : CompilerSpec, SpecIdentifierType, @unchecked Sendable { +public final class CopyToolSpec: CompilerSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier = "com.apple.compilers.pbxcp" /// Construct a `Copy` task to copy a source file to a destination with default behaviors. @@ -25,12 +25,18 @@ public final class CopyToolSpec : CompilerSpec, SpecIdentifierType, @unchecked S /// Construct a `Copy` task to copy a source file to a destination. public func constructCopyTasks( - _ cbc: CommandBuildContext, _ delegate: any TaskGenerationDelegate, - ruleName: String? = nil, executionDescription: String? = nil, - removeHeaderDirectories: Bool = false, removeStaticExecutables: Bool = false, - excludeSubpaths: [String] = [], includeOnlySubpaths: [String] = [], - stripUnsignedBinaries: Bool? = nil, stripSubpaths: [String] = [], - stripBitcode: Bool = false, skipCopyIfContentsEqual: Bool = false, + _ cbc: CommandBuildContext, + _ delegate: any TaskGenerationDelegate, + ruleName: String? = nil, + executionDescription: String? = nil, + removeHeaderDirectories: Bool = false, + removeStaticExecutables: Bool = false, + excludeSubpaths: [String] = [], + includeOnlySubpaths: [String] = [], + stripUnsignedBinaries: Bool? = nil, + stripSubpaths: [String] = [], + stripBitcode: Bool = false, + skipCopyIfContentsEqual: Bool = false, additionalFilesToRemove: [String]? = nil, additionalPresumedOutputs: [any PlannedNode] = [], ignoreMissingInputs: Bool = false, @@ -154,15 +160,21 @@ public final class CopyToolSpec : CompilerSpec, SpecIdentifierType, @unchecked S // Note that the order of rule info here is against the usual conventions. let action = delegate.taskActionCreationDelegate.createFileCopyTaskAction(FileCopyTaskActionContext(cbc)) - let inputs: [any PlannedNode] = cbc.inputs.map{ delegate.createDirectoryTreeNode($0.absolutePath) } + cbc.commandOrderingInputs + let inputs: [any PlannedNode] = cbc.inputs.map { delegate.createDirectoryTreeNode($0.absolutePath) } + cbc.commandOrderingInputs let outputs: [any PlannedNode] = [delegate.createNode(outputPath)] + additionalPresumedOutputs + cbc.commandOrderingOutputs delegate.createTask( - type: self, payload: payload, ruleInfo: ruleInfo, - commandLine: commandLine, environment: environmentFromSpec(cbc, delegate), + type: self, + payload: payload, + ruleInfo: ruleInfo, + commandLine: commandLine, + environment: environmentFromSpec(cbc, delegate), workingDirectory: cbc.producer.defaultWorkingDirectory, - inputs: inputs, outputs: outputs, action: action, + inputs: inputs, + outputs: outputs, + action: action, execDescription: executionDescription ?? resolveExecutionDescription(cbc, delegate), - preparesForIndexing: cbc.preparesForIndexing, enableSandboxing: enableSandboxing, + preparesForIndexing: cbc.preparesForIndexing, + enableSandboxing: enableSandboxing, additionalTaskOrderingOptions: additionalTaskOrderingOptions, repairViaOwnershipAnalysis: repairViaOwnershipAnalysis ) diff --git a/Sources/SWBCore/SpecImplementations/Tools/CreateAssetPackManifestTool.swift b/Sources/SWBCore/SpecImplementations/Tools/CreateAssetPackManifestTool.swift index 8964b9df..d6f1a23d 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/CreateAssetPackManifestTool.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/CreateAssetPackManifestTool.swift @@ -13,7 +13,7 @@ import SWBUtil public import SWBMacro -public final class CreateAssetPackManifestToolSpec : CommandLineToolSpec, SpecImplementationType, @unchecked Sendable { +public final class CreateAssetPackManifestToolSpec: CommandLineToolSpec, SpecImplementationType, @unchecked Sendable { public static let identifier = "com.apple.build-tools.odr.create-asset-pack-manifest" public class func construct(registry: SpecRegistry, proxy: SpecProxy) -> Spec { @@ -41,10 +41,13 @@ public final class CreateAssetPackManifestToolSpec : CommandLineToolSpec, SpecIm BuiltinMacros.OutputPath.name: assetPackManifestPath.str, ] - let inputs: [any PlannedNode] = assetPacks.flatMap { assetPack -> [any PlannedNode] in - [delegate.createDirectoryTreeNode(assetPack.path), - delegate.createNode(assetPack.path.join("Info.plist"))] - } + orderingInputs + let inputs: [any PlannedNode] = + assetPacks.flatMap { assetPack -> [any PlannedNode] in + [ + delegate.createDirectoryTreeNode(assetPack.path), + delegate.createNode(assetPack.path.join("Info.plist")), + ] + } + orderingInputs let outputPath = assetPackManifestPath let outputs = [delegate.createNode(outputPath)] let commandLine = assetPacks.map { $0.path.str }.sorted() diff --git a/Sources/SWBCore/SpecImplementations/Tools/DocumentationCompiler.swift b/Sources/SWBCore/SpecImplementations/Tools/DocumentationCompiler.swift index 705dee00..cad47e36 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/DocumentationCompiler.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/DocumentationCompiler.swift @@ -142,7 +142,7 @@ final public class DocumentationCompilerSpec: GenericCompilerSpec, SpecIdentifie // When building executable types (like applications and command-line tools), include all levels of headers in the generated symbol graph // since executable documentation is meant for the team developing that executable (compared to framework documentation for the consumers // of that framework). - return [.public, .private, nil] // nil for project visibility + return [.public, .private, nil] // nil for project visibility case .framework, .none: // For frameworks (and non-documentable types), only include public API in the generated symbol graph. return [.public] @@ -155,9 +155,16 @@ final public class DocumentationCompilerSpec: GenericCompilerSpec, SpecIdentifie } if cbc.inputs.count > 1 { - delegate.emit(Diagnostic(behavior: .error, location: .unknown, data: DiagnosticData("Each target may contain only a single documentation catalog."), childDiagnostics: cbc.inputs.map { input in - Diagnostic(behavior: .note, location: .path(input.absolutePath), data: DiagnosticData("Documentation catalog named \(input.absolutePath.basename)")) - })) + delegate.emit( + Diagnostic( + behavior: .error, + location: .unknown, + data: DiagnosticData("Each target may contain only a single documentation catalog."), + childDiagnostics: cbc.inputs.map { input in + Diagnostic(behavior: .note, location: .path(input.absolutePath), data: DiagnosticData("Documentation catalog named \(input.absolutePath.basename)")) + } + ) + ) return } @@ -188,7 +195,8 @@ final public class DocumentationCompilerSpec: GenericCompilerSpec, SpecIdentifie let environmentBindings = templatePath.map { [("DOCC_HTML_DIR", $0.str)] } ?? [] // The inputs (files that this task depend on) are the '.docs' bundles, specified in the '.xcspec' file, and the symbol graph task - let inputs: [any PlannedNode] = cbc.inputs.map({ delegate.createDirectoryTreeNode($0.absolutePath) }) + let inputs: [any PlannedNode] = + cbc.inputs.map({ delegate.createDirectoryTreeNode($0.absolutePath) }) + mainSymbolGraphFiles.map({ delegate.createNode($0) }) // The outputs (files that other tasks can depend on) are specified in the '.xcspec' file. @@ -257,12 +265,15 @@ final public class DocumentationCompilerSpec: GenericCompilerSpec, SpecIdentifie // Attach a payload with information about what built documentation this task will output. let outputDir = cbc.scope.evaluate(BuiltinMacros.DOCC_ARCHIVE_PATH) let diagnosticsFilePath = cbc.scope.evaluate(BuiltinMacros.DOCC_DIAGNOSTICS_FILE, lookup: lookup).nilIfEmpty - let payload = outputDir.isEmpty ? nil : DocumentationTaskPayload( - bundleIdentifier: cbc.scope.evaluate(BuiltinMacros.DOCC_CATALOG_IDENTIFIER), - outputPath: Path(outputDir), - targetIdentifier: cbc.producer.configuredTarget?.target.guid, - documentationDiagnosticsPath: diagnosticsFilePath - ) + let payload = + outputDir.isEmpty + ? nil + : DocumentationTaskPayload( + bundleIdentifier: cbc.scope.evaluate(BuiltinMacros.DOCC_CATALOG_IDENTIFIER), + outputPath: Path(outputDir), + targetIdentifier: cbc.producer.configuredTarget?.target.guid, + documentationDiagnosticsPath: diagnosticsFilePath + ) if let diagnosticsFilePath { outputs.append(delegate.createNode(diagnosticsFilePath)) } @@ -340,27 +351,32 @@ public func discoveredDocumentationCompilerInfo(_ producer: any CommandProducer, if !producer.executableSearchPaths.fs.exists(featuresPath) && producer.hostOperatingSystem == .windows { return DocumentationCompilerToolSpecInfo(toolPath: toolPath, toolFeatures: .init([.diagnosticsFile])) } - return try await producer.discoveredCommandLineToolSpecInfo(delegate, "docc", featuresPath, { contents in - func getFeatures(at toolPath: Path) throws -> ToolFeatures { - do { - let fs = PseudoFS() - try fs.createDirectory(featuresPath.dirname, recursive: true) - try fs.write(featuresPath, contents: ByteString(contents)) - return try .init(path: featuresPath, fs: fs) - } catch { - // If this is a custom tool path (via DOCC_EXEC) check the default features. - if let defaultToolPath = producer.executableSearchPaths.findExecutable(operatingSystem: producer.hostOperatingSystem, basename: "docc"), defaultToolPath != toolPath { - let defaultFeaturesPath = defaultToolPath.dirname.dirname.join("share").join("docc").join("features.json") - if localFS.exists(defaultFeaturesPath) { - return try .init(path: defaultFeaturesPath, fs: localFS) + return try await producer.discoveredCommandLineToolSpecInfo( + delegate, + "docc", + featuresPath, + { contents in + func getFeatures(at toolPath: Path) throws -> ToolFeatures { + do { + let fs = PseudoFS() + try fs.createDirectory(featuresPath.dirname, recursive: true) + try fs.write(featuresPath, contents: ByteString(contents)) + return try .init(path: featuresPath, fs: fs) + } catch { + // If this is a custom tool path (via DOCC_EXEC) check the default features. + if let defaultToolPath = producer.executableSearchPaths.findExecutable(operatingSystem: producer.hostOperatingSystem, basename: "docc"), defaultToolPath != toolPath { + let defaultFeaturesPath = defaultToolPath.dirname.dirname.join("share").join("docc").join("features.json") + if localFS.exists(defaultFeaturesPath) { + return try .init(path: defaultFeaturesPath, fs: localFS) + } } + // Didn't find any default features. + throw error } - // Didn't find any default features. - throw error } + return try DocumentationCompilerToolSpecInfo(toolPath: toolPath, toolFeatures: getFeatures(at: toolPath)) } - return try DocumentationCompilerToolSpecInfo(toolPath: toolPath, toolFeatures: getFeatures(at: toolPath)) - }) + ) } public struct DocumentationCompilerToolSpecInfo: DiscoveredCommandLineToolSpecInfo { @@ -393,9 +409,9 @@ extension DocumentationCompilerSpec { /// The Mach-O types considered by DocC to be frameworks. private static let frameworkMachOTypes: Set = [ - "mh_dylib", // dylibs and dynamic frameworks - "staticlib", // static libraries and static frameworks - "mh_object", // relocatable objects, used by SwiftPM + "mh_dylib", // dylibs and dynamic frameworks + "staticlib", // static libraries and static frameworks + "mh_object", // relocatable objects, used by SwiftPM ] /// The Mach-O type that DocC considers to be executable. @@ -709,7 +725,7 @@ private extension Diagnostic { location: mainLocation, sourceRanges: mainSourceRanges, data: DiagnosticData(diagnostic.summary), - fixIts: [], // DocC Solutions are created as child diagnostics to customize the fix-it messages + fixIts: [], // DocC Solutions are created as child diagnostics to customize the fix-it messages childDiagnostics: childDiagnostics ) } @@ -744,9 +760,10 @@ private extension Diagnostic.SourceRange { init(path: Path, range: DiagnosticFile.Diagnostic.Range) { self.init( path: path, - startLine: range.start.line, startColumn: range.start.column, - endLine: range.end.line, endColumn: range.end.column + startLine: range.start.line, + startColumn: range.start.column, + endLine: range.end.line, + endColumn: range.end.column ) } } - diff --git a/Sources/SWBCore/SpecImplementations/Tools/DsymutilTool.swift b/Sources/SWBCore/SpecImplementations/Tools/DsymutilTool.swift index 3fccd547..799d5b47 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/DsymutilTool.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/DsymutilTool.swift @@ -13,7 +13,7 @@ public import SWBUtil public import SWBMacro -public final class DsymutilToolSpec : GenericCommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { +public final class DsymutilToolSpec: GenericCommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier = "com.apple.tools.dsymutil" public override func constructTasks(_ cbc: CommandBuildContext, _ delegate: any TaskGenerationDelegate) async { diff --git a/Sources/SWBCore/SpecImplementations/Tools/GCCCompatibleCompilerSupport.swift b/Sources/SWBCore/SpecImplementations/Tools/GCCCompatibleCompilerSupport.swift index 446bed95..3743bb8c 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/GCCCompatibleCompilerSupport.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/GCCCompatibleCompilerSupport.swift @@ -13,10 +13,8 @@ package import SWBUtil package import SWBMacro - /// An abstract representation of a search path entry, with any auxiliary information as appropriate depending on the type. -package enum SearchPathEntry -{ +package enum SearchPathEntry { /// A "user" header search path, in the manner of the `-iquote` option. This kind of path is searched for quote-style includes, but is ignored for bracket-style includes. case userHeaderSearchPath(path: Path) @@ -40,8 +38,7 @@ package enum SearchPathEntry } /// An abstract representation of a list of search paths. -package final class SearchPathBuilder -{ +package final class SearchPathBuilder { /// The list of search path entries. private var searchPathEntries = [SearchPathEntry]() @@ -97,22 +94,19 @@ package final class SearchPathBuilder } /// Add a user header search path to the list of search paths. - func addUserHeaderSearchPath(_ path: Path, isInputPath: Bool = false) - { + func addUserHeaderSearchPath(_ path: Path, isInputPath: Bool = false) { let normalizedPath = path.normalize() addSearchPathEntry(.userHeaderSearchPath(path: normalizedPath), normalizedPath, isInputPath) } /// Add an ordinary header search path to the list of search paths. - func addHeaderSearchPath(_ path: Path, asSeparateArguments: Bool = false, isInputPath: Bool = false) - { + func addHeaderSearchPath(_ path: Path, asSeparateArguments: Bool = false, isInputPath: Bool = false) { let normalizedPath = path.normalize() addSearchPathEntry(.headerSearchPath(path: normalizedPath, asSeparateArguments: asSeparateArguments), normalizedPath, isInputPath) } /// Add a system header search path to the list of search paths. The path will not be added if an ordinary or system header search path for the path has previously been added. - func addSystemHeaderSearchPath(_ path: Path, isInputPath: Bool = false) - { + func addSystemHeaderSearchPath(_ path: Path, isInputPath: Bool = false) { let normalizedPath = path.normalize() guard !headerSearchPaths.contains(normalizedPath) && !systemHeaderSearchPaths.contains(normalizedPath) else { return @@ -121,21 +115,18 @@ package final class SearchPathBuilder } /// Add a system header search path to the list of search paths. - func addHeaderSearchPathSplitter() - { + func addHeaderSearchPathSplitter() { searchPathEntries.append(.headerSearchPathSplitter) } /// Add an ordinary framework search path to the list of search paths. - func addFrameworkSearchPath(_ path: Path, asSeparateArguments: Bool = false, isInputPath: Bool = false) - { + func addFrameworkSearchPath(_ path: Path, asSeparateArguments: Bool = false, isInputPath: Bool = false) { let normalizedPath = path.normalize() addSearchPathEntry(.frameworkSearchPath(path: normalizedPath, asSeparateArguments: asSeparateArguments), normalizedPath, isInputPath) } /// Add a system framework search path to the list of search paths. The path will not be added if an ordinary or system framework search path for the path has previously been added. - func addSystemFrameworkSearchPath(_ path: Path, isInputPath: Bool = false) - { + func addSystemFrameworkSearchPath(_ path: Path, isInputPath: Bool = false) { let normalizedPath = path.normalize() guard !frameworkSearchPaths.contains(normalizedPath) && !systemFrameworkSearchPaths.contains(normalizedPath) else { return @@ -144,8 +135,7 @@ package final class SearchPathBuilder } /// Adds a list of literal arguments to the command line. - func addLiteralArguments(_ args: [String], inputPaths: [Path] = []) - { + func addLiteralArguments(_ args: [String], inputPaths: [Path] = []) { searchPathEntries.append(.literalArguments(args)) self.inputPaths.append(contentsOf: inputPaths.map({ $0.normalize() }).filter({ !$0.isEmpty })) } @@ -190,36 +180,29 @@ package struct SearchPaths: Sendable { } /// Adopting this protocol indicates that the adopter can generate command line arguments for search path entries. -package protocol SearchPathCommandLineBuilder -{ +package protocol SearchPathCommandLineBuilder { func searchPathArguments(_ entry: SearchPathEntry, _ scope: MacroEvaluationScope) -> [String] func searchPathArguments(_ entries: [SearchPathEntry], _ scope: MacroEvaluationScope) -> [String] } -extension SearchPathCommandLineBuilder -{ - package func searchPathArguments(_ entries: [SearchPathEntry], _ scope: MacroEvaluationScope) -> [String] - { +extension SearchPathCommandLineBuilder { + package func searchPathArguments(_ entries: [SearchPathEntry], _ scope: MacroEvaluationScope) -> [String] { return entries.flatMap({ return self.searchPathArguments($0, scope) }) } } - /// Utility methods for GCC-compatible compiler specifications. -package struct GCCCompatibleCompilerSpecSupport -{ +package struct GCCCompatibleCompilerSpecSupport { /// Constructs and returns common header search path entries for LLVM-based compiler specs. Also returns any input paths (to headermap or VFS files) which users of these search paths should depend on. - package static func headerSearchPathArguments(_ producer: any CommandProducer, _ scope: MacroEvaluationScope, usesModules: Bool) -> SearchPaths - { + package static func headerSearchPathArguments(_ producer: any CommandProducer, _ scope: MacroEvaluationScope, usesModules: Bool) -> SearchPaths { let searchPathBuilder = SearchPathBuilder() // Evaluate some settings we need to look at several times. let alwaysSearchUserPaths = scope.evaluate(BuiltinMacros.ALWAYS_SEARCH_USER_PATHS) // Add the arguments for the headermap files, if any. - if scope.evaluate(BuiltinMacros.USE_HEADERMAP) - { + if scope.evaluate(BuiltinMacros.USE_HEADERMAP) { // Swift Build does not support "traditional" (single-file) headermaps. Use of separate headermaps has been the default for new projects for years, and use of the VFS (when clang modules are enabled) requires separate headermaps. // If the target is configured to use a traditional headermap, then we the HeadermapTaskProducer emits a warning about that, and we use separate headermaps anyway. @@ -233,8 +216,7 @@ package struct GCCCompatibleCompilerSpecSupport searchPathBuilder.addHeaderSearchPath(hmapFileForOwnTargets, isInputPath: true) // If we are using the VFS, replace the all targets headermap with it. - if usesVFS - { + if usesVFS { // Mark the creation context as needing VFS construction. // If we are using the VFS, we still need an equivalent of the all targets headermap for non-framework headers. let hmapFileForAllNonFrameworkTargetHeaders = scope.evaluate(BuiltinMacros.CPP_HEADERMAP_FILE_FOR_ALL_NON_FRAMEWORK_TARGET_HEADERS) @@ -244,9 +226,7 @@ package struct GCCCompatibleCompilerSpecSupport let productHeadersVFSFile = scope.evaluate(BuiltinMacros.CPP_HEADERMAP_PRODUCT_HEADERS_VFS_FILE) // FIXME: We should make Path be able to be used here, potentially using a "path string provider" protocol or somesuch. searchPathBuilder.addLiteralArguments(["-ivfsoverlay", productHeadersVFSFile.str], inputPaths: [productHeadersVFSFile]) - } - else - { + } else { // Not using a VFS, so we just add the headermap for all headers that are a member of any target. let hmapFileForAllTargetHeaders = scope.evaluate(BuiltinMacros.CPP_HEADERMAP_FILE_FOR_ALL_TARGET_HEADERS) searchPathBuilder.addHeaderSearchPath(hmapFileForAllTargetHeaders, isInputPath: true) @@ -258,46 +238,37 @@ package struct GCCCompatibleCompilerSpecSupport } // If we should use header symlinks, we add the path to that directory to the search path. We do this early, so that it comes near the beginning of the bracket search paths and overrides them all. - if scope.evaluate(BuiltinMacros.USE_HEADER_SYMLINKS) - { + if scope.evaluate(BuiltinMacros.USE_HEADER_SYMLINKS) { let headerSymlinksDir = scope.evaluate(BuiltinMacros.CPP_HEADER_SYMLINKS_DIR) searchPathBuilder.addUserHeaderSearchPath(headerSymlinksDir) } // Add search paths for USER_HEADER_SEARCH_PATHS before other header search paths. let userHeaderSearchPaths = producer.expandedSearchPaths(for: BuiltinMacros.USER_HEADER_SEARCH_PATHS, scope: scope) - if alwaysSearchUserPaths - { + if alwaysSearchUserPaths { // If we should always search user paths, then we pass the USER_HEADER_SEARCH_PATHS using an ordinary header search option. - for searchPath in userHeaderSearchPaths - { + for searchPath in userHeaderSearchPaths { searchPathBuilder.addHeaderSearchPath(Path(searchPath)) } - } - else - { + } else { // If we should *not* always search user paths, then we pass the USER_HEADER_SEARCH_PATHS using a user header search option. - for searchPath in userHeaderSearchPaths - { + for searchPath in userHeaderSearchPaths { searchPathBuilder.addUserHeaderSearchPath(Path(searchPath)) } } // Add ordinary header search paths for HEADER_SEARCH_PATHS. - for searchPath in producer.expandedSearchPaths(for: BuiltinMacros.HEADER_SEARCH_PATHS, scope: scope) - { + for searchPath in producer.expandedSearchPaths(for: BuiltinMacros.HEADER_SEARCH_PATHS, scope: scope) { searchPathBuilder.addHeaderSearchPath(Path(searchPath)) } // Add system header search paths for SYSTEM_HEADER_SEARCH_PATHS. The builder will filter out any which have already been added as ordinary header search paths. - for searchPath in producer.expandedSearchPaths(for: BuiltinMacros.SYSTEM_HEADER_SEARCH_PATHS, scope: scope) - { + for searchPath in producer.expandedSearchPaths(for: BuiltinMacros.SYSTEM_HEADER_SEARCH_PATHS, scope: scope) { searchPathBuilder.addSystemHeaderSearchPath(Path(searchPath)) } // Add ordinary header search paths for PRODUCT_TYPE_HEADER_SEARCH_PATHS. - for searchPath in producer.expandedSearchPaths(for: BuiltinMacros.PRODUCT_TYPE_HEADER_SEARCH_PATHS, scope: scope) - { + for searchPath in producer.expandedSearchPaths(for: BuiltinMacros.PRODUCT_TYPE_HEADER_SEARCH_PATHS, scope: scope) { searchPathBuilder.addHeaderSearchPath(Path(searchPath)) } @@ -321,7 +292,8 @@ package struct GCCCompatibleCompilerSpecSupport // If there are Iig generated files in this target, add them to the search path. if let target = producer.configuredTarget?.target as? BuildPhaseTarget { if let iigType = producer.lookupFileType(identifier: "sourcecode.iig"), - target.sourcesBuildPhase?.containsFiles(ofType: iigType, producer, producer, scope, producer.filePathResolver) ?? false { + target.sourcesBuildPhase?.containsFiles(ofType: iigType, producer, producer, scope, producer.filePathResolver) ?? false + { searchPathBuilder.addHeaderSearchPath(Path(scope.evaluate(BuiltinMacros.IIG_HEADERS_DIR))) } } @@ -330,8 +302,7 @@ package struct GCCCompatibleCompilerSpecSupport } /// Constructs and returns common framework search path arguments for LLVM-based compiler specs. - package static func frameworkSearchPathArguments(_ producer: any CommandProducer, _ scope: MacroEvaluationScope, asSeparateArguments: Bool = false) -> SearchPaths - { + package static func frameworkSearchPathArguments(_ producer: any CommandProducer, _ scope: MacroEvaluationScope, asSeparateArguments: Bool = false) -> SearchPaths { let searchPathBuilder = SearchPathBuilder() guard producer.isApplePlatform else { @@ -339,42 +310,35 @@ package struct GCCCompatibleCompilerSpecSupport } // Add ordinary framework search paths for FRAMEWORK_SEARCH_PATHS. - for searchPath in producer.expandedSearchPaths(for: BuiltinMacros.FRAMEWORK_SEARCH_PATHS, scope: scope) - { + for searchPath in producer.expandedSearchPaths(for: BuiltinMacros.FRAMEWORK_SEARCH_PATHS, scope: scope) { searchPathBuilder.addFrameworkSearchPath(Path(searchPath), asSeparateArguments: asSeparateArguments) } // Add system framework search paths for PRODUCT_TYPE_HEADER_SEARCH_PATHS. - for searchPath in producer.expandedSearchPaths(for: BuiltinMacros.PRODUCT_TYPE_FRAMEWORK_SEARCH_PATHS, scope: scope) - { + for searchPath in producer.expandedSearchPaths(for: BuiltinMacros.PRODUCT_TYPE_FRAMEWORK_SEARCH_PATHS, scope: scope) { searchPathBuilder.addSystemFrameworkSearchPath(Path(searchPath)) } // Add system framework search paths for SYSTEM_FRAMEWORK_SEARCH_PATHS. The builder will filter out any which have already been added as ordinary framework search paths. - for searchPath in producer.expandedSearchPaths(for: BuiltinMacros.SYSTEM_FRAMEWORK_SEARCH_PATHS, scope: scope) - { + for searchPath in producer.expandedSearchPaths(for: BuiltinMacros.SYSTEM_FRAMEWORK_SEARCH_PATHS, scope: scope) { searchPathBuilder.addSystemFrameworkSearchPath(Path(searchPath)) } return searchPathBuilder.searchPaths } - private static func sparseSDKSearchPathArguments(_ sparseSDKs: [SDK], _ existingHeaderSearchPaths: Set, _ existingFrameworkSearchPaths: Set, asSeparateArguments: Bool = false, skipHeaderSearchPaths: Bool = false) -> SearchPaths - { + private static func sparseSDKSearchPathArguments(_ sparseSDKs: [SDK], _ existingHeaderSearchPaths: Set, _ existingFrameworkSearchPaths: Set, asSeparateArguments: Bool = false, skipHeaderSearchPaths: Bool = false) -> SearchPaths { // Create a search path build with the search paths which are already in the arguments as -I and -F options, so we don't add SDK search paths to the same paths. let searchPathBuilder = SearchPathBuilder(headerSearchPaths: existingHeaderSearchPaths, frameworkSearchPaths: existingFrameworkSearchPaths) - for sdk in sparseSDKs - { + for sdk in sparseSDKs { if !skipHeaderSearchPaths { // Figure out which, if any, additional header search path options to add. We start with the header search paths defined by the sparse SDK, if any. let headerSearchPaths = sdk.headerSearchPaths // Add additional header paths as system header search paths. The builder will filter out any which have already been added as ordinary header search paths. - if !headerSearchPaths.isEmpty - { - for path in headerSearchPaths - { + if !headerSearchPaths.isEmpty { + for path in headerSearchPaths { searchPathBuilder.addSystemHeaderSearchPath(path) } } @@ -384,10 +348,8 @@ package struct GCCCompatibleCompilerSpecSupport let frameworkSearchPaths = sdk.frameworkSearchPaths // Add additional framework paths as system framework search paths. The builder will filter out any which have already been added as ordinary framework search paths. - if !frameworkSearchPaths.isEmpty - { - for path in frameworkSearchPaths - { + if !frameworkSearchPaths.isEmpty { + for path in frameworkSearchPaths { searchPathBuilder.addSystemFrameworkSearchPath(path) } } @@ -410,39 +372,34 @@ package struct GCCCompatibleCompilerSpecSupport } } - /// Adopting this protocol indicates that the adopter can generate LLVM-based compiler command line arguments for search path entries. -package protocol GCCCompatibleCompilerCommandLineBuilder: SearchPathCommandLineBuilder -{ +package protocol GCCCompatibleCompilerCommandLineBuilder: SearchPathCommandLineBuilder { } - -extension GCCCompatibleCompilerCommandLineBuilder -{ - package func searchPathArguments(_ entry: SearchPathEntry, _ scope: MacroEvaluationScope) -> [String] - { +extension GCCCompatibleCompilerCommandLineBuilder { + package func searchPathArguments(_ entry: SearchPathEntry, _ scope: MacroEvaluationScope) -> [String] { var args = [String]() switch entry { - case .userHeaderSearchPath(let path): + case .userHeaderSearchPath(let path): args.append(contentsOf: ["-iquote", path.str]) - case .headerSearchPath(let path, let separateArgs): + case .headerSearchPath(let path, let separateArgs): args.append(contentsOf: separateArgs ? ["-I", path.str] : ["-I" + path.str]) - case .systemHeaderSearchPath(let path): + case .systemHeaderSearchPath(let path): args.append(contentsOf: ["-isystem", path.str]) - case .headerSearchPathSplitter: - args.append("-I-") // states that clang has never supported this option. + case .headerSearchPathSplitter: + args.append("-I-") // states that clang has never supported this option. - case .frameworkSearchPath(let path, let separateArgs): + case .frameworkSearchPath(let path, let separateArgs): args.append(contentsOf: separateArgs ? ["-F", path.str] : ["-F" + path.str]) - case .systemFrameworkSearchPath(let path): + case .systemFrameworkSearchPath(let path): args.append(contentsOf: ["-iframework", path.str]) - case .literalArguments(let literalArgs): + case .literalArguments(let literalArgs): args.append(contentsOf: literalArgs) } return args diff --git a/Sources/SWBCore/SpecImplementations/Tools/InfoPlistTool.swift b/Sources/SWBCore/SpecImplementations/Tools/InfoPlistTool.swift index d0683f6f..67c7c512 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/InfoPlistTool.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/InfoPlistTool.swift @@ -14,7 +14,7 @@ public import SWBUtil import SWBMacro import Foundation -public final class InfoPlistToolSpec : GenericCommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { +public final class InfoPlistToolSpec: GenericCommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier = "com.apple.tools.info-plist-utility" public override func constructTasks(_ cbc: CommandBuildContext, _ delegate: any TaskGenerationDelegate) async { @@ -35,8 +35,7 @@ public final class InfoPlistToolSpec : GenericCommandLineToolSpec, SpecIdentifie let contents: SWBUtil.ByteString do { contents = try ByteString(additions.asBytes(.binary)) - } - catch { + } catch { delegate.error("failed to serialize product type infoPlistAdditions: \(error)") return nil } @@ -130,7 +129,7 @@ public final class InfoPlistToolSpec : GenericCommandLineToolSpec, SpecIdentifie that tracks the usage of these potential items. */ var privacyFiles: [Path] = [] - var privacyFilesMap: [String:[Path]] = [:] + var privacyFilesMap: [String: [Path]] = [:] for path in files { let basename = path.basename privacyFilesMap[basename, default: []].append(path) @@ -144,13 +143,11 @@ public final class InfoPlistToolSpec : GenericCommandLineToolSpec, SpecIdentifie if let pathToUse = paths.filter({ $0.str.hasPrefix(targetPath.str) }).first { privacyFiles.append(pathToUse) - } - else { + } else { // Unclear which one to use so pass them all. privacyFiles.append(contentsOf: paths) } - } - else { + } else { privacyFiles.append(contentsOf: paths) } } diff --git a/Sources/SWBCore/SpecImplementations/Tools/LaunchServicesRegisterTool.swift b/Sources/SWBCore/SpecImplementations/Tools/LaunchServicesRegisterTool.swift index 216ea105..80fd2822 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/LaunchServicesRegisterTool.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/LaunchServicesRegisterTool.swift @@ -10,7 +10,7 @@ // //===----------------------------------------------------------------------===// -final class LaunchServicesRegisterToolSpec : GenericCommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { +final class LaunchServicesRegisterToolSpec: GenericCommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { static let identifier = "com.apple.build-tasks.ls-register-url" override func createTaskAction(_ cbc: CommandBuildContext, _ delegate: any TaskGenerationDelegate) -> (any PlannedTaskAction)? { diff --git a/Sources/SWBCore/SpecImplementations/Tools/LinkerTools.swift b/Sources/SWBCore/SpecImplementations/Tools/LinkerTools.swift index 414ec486..7b0e216b 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/LinkerTools.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/LinkerTools.swift @@ -150,7 +150,7 @@ public struct DiscoveredLdLinkerToolSpecInfo: DiscoveredCommandLineToolSpecInfo } /// Parses stderr output as generated by ld(1). -@_spi(Testing) public final class LdLinkerOutputParser : GenericOutputParser { +@_spi(Testing) public final class LdLinkerOutputParser: GenericOutputParser { /// Regular expression that matches a line indicating the start of a list of undefined symbols. The section ends with a "symbol(s) not found" error message. static let undefSymbolsSectionStartRegEx = RegEx(patternLiteral: "^[ ]*Undefined symbols.*:$") @@ -202,7 +202,8 @@ public struct DiscoveredLdLinkerToolSpecInfo: DiscoveredCommandLineToolSpecInfo let processedLines: [ByteString] = linesToParse.map { lineBytes in let lineString = String(decoding: lineBytes, as: Unicode.UTF8.self) if lineString.contains(": error:") - || lineString.contains(": warning:") { + || lineString.contains(": warning:") + { let issueString = "\(projectPath.str): \(targetName): \(lineString)" return ByteString(encodingAsUTF8: issueString) @@ -215,8 +216,7 @@ public struct DiscoveredLdLinkerToolSpecInfo: DiscoveredCommandLineToolSpecInfo delegate.emitOutput($0) delegate.emitOutput("\n") } - } - else { + } else { // Forward the bytes linesToParse.forEach { delegate.emitOutput(ByteString($0)) @@ -245,11 +245,12 @@ public struct DiscoveredLdLinkerToolSpecInfo: DiscoveredCommandLineToolSpecInfo if let match = LdLinkerOutputParser.undefSymbolNameRegEx.firstMatch(in: lineString) { // We've found a symbol, so we add it to the list. undefinedSymbols.append(match[0]) - } - else if let match = LdLinkerOutputParser.problemMessageRegEx.firstMatch(in: lineString), match[3].hasPrefix("symbol(s) not found") { + } else if let match = LdLinkerOutputParser.problemMessageRegEx.firstMatch(in: lineString), match[3].hasPrefix("symbol(s) not found") { // It's time to emit all the symbols. We emit each as a separate message. - let projectLocation = Workspace.projectLocation(for: self.task.forTarget?.target, - workspace: self.workspaceContext.workspace) + let projectLocation = Workspace.projectLocation( + for: self.task.forTarget?.target, + workspace: self.workspaceContext.workspace + ) for symbol in undefinedSymbols.prefix(undefinedSymbolCountLimit) { delegate.diagnosticsEngine.emit(Diagnostic(behavior: .error, location: projectLocation, data: DiagnosticData("Undefined symbol: \(symbol)"), appendToOutputStream: false)) @@ -259,23 +260,22 @@ public struct DiscoveredLdLinkerToolSpecInfo: DiscoveredCommandLineToolSpecInfo } collectingUndefinedSymbols = false undefinedSymbols = [] - } - else { + } else { // Ignore references for now. } - } - else if LdLinkerOutputParser.undefSymbolsSectionStartRegEx.firstMatch(in: lineString) != nil { + } else if LdLinkerOutputParser.undefSymbolsSectionStartRegEx.firstMatch(in: lineString) != nil { // We've found the start of a list of undefined symbols; we'll collect them and emit them at the end of the list. collectingUndefinedSymbols = true - } - else if let match = LdLinkerOutputParser.problemMessageRegEx.firstMatch(in: lineString) { + } else if let match = LdLinkerOutputParser.problemMessageRegEx.firstMatch(in: lineString) { // We've found an error outside of the undefined-symbols list. We emit it in accordance with its type. // match[0] is severity prefix (if any), match[1] is severity name (if any), match[2] is the message let severity = match[2].isEmpty ? "error" : match[2] let behavior = Diagnostic.Behavior(name: severity) ?? .note let message = match[3].prefix(1).localizedCapitalized + match[3].dropFirst() - let projectLocation = Workspace.projectLocation(for: self.task.forTarget?.target, - workspace: self.workspaceContext.workspace) + let projectLocation = Workspace.projectLocation( + for: self.task.forTarget?.target, + workspace: self.workspaceContext.workspace + ) let diagnostic = Diagnostic(behavior: behavior, location: projectLocation, data: DiagnosticData(message), appendToOutputStream: false) delegate.diagnosticsEngine.emit(diagnostic) } @@ -293,7 +293,7 @@ public struct DiscoveredLdLinkerToolSpecInfo: DiscoveredCommandLineToolSpecInfo } } -public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchecked Sendable { +public final class LdLinkerSpec: GenericLinkerSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier = "com.apple.pbx.linkers.ld" override public var toolBasenameAliases: [String] { @@ -345,7 +345,7 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec let suppressDriverStdlibPaths: Bool } - static public func computeRPaths(_ cbc: CommandBuildContext, _ delegate: any TaskGenerationDelegate, inputRunpathSearchPaths: [String], isUsingSwift: Bool ) async -> RuntimeSearchPaths { + static public func computeRPaths(_ cbc: CommandBuildContext, _ delegate: any TaskGenerationDelegate, inputRunpathSearchPaths: [String], isUsingSwift: Bool) async -> RuntimeSearchPaths { // Product types can provide their own set of rpath values, we need to ensure that our rpath flags for Swift in the OS appear before those. Also, due to the fact that we are staging this rollout, we need to specifically override any Swift libraries that may be in the bundle _when_ the Swift ABI version matches on the system with that in which the tool was built with. var runpathSearchPaths = inputRunpathSearchPaths @@ -358,17 +358,13 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec let supportsSwiftSpanNatively = cbc.producer.platform?.supportsSwiftSpanNatively(cbc.scope, forceNextMajorVersion: false, considerTargetDeviceOSVersion: false) ?? true let shouldEmitRPathForSwiftConcurrency = UserDefaults.allowRuntimeSearchPathAdditionForSwiftConcurrency && !supportsSwiftConcurrencyNatively let shouldEmitRPathForSwiftSpan = !cbc.scope.evaluate(BuiltinMacros.DISABLE_SWIFT_SPAN_COMPATIBILITY_RPATH) && !supportsSwiftSpanNatively - if ( - cbc.producer.platform?.supportsSwiftInTheOS(cbc.scope, forceNextMajorVersion: true, considerTargetDeviceOSVersion: false) != true || - cbc.producer.toolchains.usesSwiftOpenSourceToolchain || - shouldEmitRPathForSwiftConcurrency || - shouldEmitRPathForSwiftSpan - ) + if (cbc.producer.platform?.supportsSwiftInTheOS(cbc.scope, forceNextMajorVersion: true, considerTargetDeviceOSVersion: false) != true || cbc.producer.toolchains.usesSwiftOpenSourceToolchain || shouldEmitRPathForSwiftConcurrency || shouldEmitRPathForSwiftSpan) && isUsingSwift - && cbc.producer.platform?.minimumOSForSwiftInTheOS != nil { + && cbc.producer.platform?.minimumOSForSwiftInTheOS != nil + { // NOTE: For swift.org toolchains, this is fine as `DYLD_LIBRARY_PATH` is used to override these settings. - let swiftABIVersion = await (cbc.producer.swiftCompilerSpec.discoveredCommandLineToolSpecInfo(cbc.producer, cbc.scope, delegate) as? DiscoveredSwiftCompilerToolSpecInfo)?.swiftABIVersion - runpathSearchPaths.insert( swiftABIVersion.flatMap { "/usr/lib/swift-\($0)" } ?? "/usr/lib/swift", at: 0) + let swiftABIVersion = await (cbc.producer.swiftCompilerSpec.discoveredCommandLineToolSpecInfo(cbc.producer, cbc.scope, delegate) as? DiscoveredSwiftCompilerToolSpecInfo)?.swiftABIVersion + runpathSearchPaths.insert(swiftABIVersion.flatMap { "/usr/lib/swift-\($0)" } ?? "/usr/lib/swift", at: 0) // Ensure the linker driver does not insert a duplicate rpath (if linking using swiftc) suppressDriverStdlibPaths = true } @@ -431,16 +427,19 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec let previewDylib = cbc.scope.evaluate(BuiltinMacros.EXECUTABLE_DEBUG_DYLIB_PATH) let isPreviewDylib = !previewDylib.isEmpty - let machOTypeString = isPreviewDylib + let machOTypeString = + isPreviewDylib ? "mh_dylib" : cbc.scope.evaluate(BuiltinMacros.MACH_O_TYPE) let machOType = cbc.scope.namespace.parseLiteralString(machOTypeString) - let entitlements = isPreviewDylib + let entitlements = + isPreviewDylib ? cbc.scope.namespace.parseLiteralString("") : cbc.scope.namespace.parseLiteralString(cbc.scope.evaluate(BuiltinMacros.LD_ENTITLEMENTS_SECTION)) - let entitlementsDer = isPreviewDylib + let entitlementsDer = + isPreviewDylib ? cbc.scope.namespace.parseLiteralString("") : cbc.scope.namespace.parseLiteralString(cbc.scope.evaluate(BuiltinMacros.LD_ENTITLEMENTS_SECTION_DER)) @@ -492,12 +491,14 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec // Add the additional outputs defined by the spec. These are not declared as outputs but should be processed by the tool separately. let additionalEvaluatedOutputsResult = await additionalEvaluatedOutputs(cbc, delegate) - outputs.append(contentsOf: additionalEvaluatedOutputsResult.outputs.map { output in - if let fileTypeIdentifier = output.fileType, let fileType = cbc.producer.lookupFileType(identifier: fileTypeIdentifier) { - delegate.declareOutput(FileToBuild(absolutePath: output.path, fileType: fileType)) + outputs.append( + contentsOf: additionalEvaluatedOutputsResult.outputs.map { output in + if let fileTypeIdentifier = output.fileType, let fileType = cbc.producer.lookupFileType(identifier: fileTypeIdentifier) { + delegate.declareOutput(FileToBuild(absolutePath: output.path, fileType: fileType)) + } + return delegate.createNode(output.path) } - return delegate.createNode(output.path) - }) + ) if let infoPlistContent = additionalEvaluatedOutputsResult.generatedInfoPlistContent { delegate.declareGeneratedInfoPlistContent(infoPlistContent) @@ -541,7 +542,7 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec // this target could be using the exposed Swift APIs in the C++ section of the generated // header. if let target = cbc.producer.configuredTarget { - let depScopes = cbc.producer.targetSwiftDependencyScopes(for: target, arch: cbc.scope.evaluate(BuiltinMacros.CURRENT_ARCH), variant: cbc.scope.evaluate(BuiltinMacros.CURRENT_VARIANT)) + let depScopes = cbc.producer.targetSwiftDependencyScopes(for: target, arch: cbc.scope.evaluate(BuiltinMacros.CURRENT_ARCH), variant: cbc.scope.evaluate(BuiltinMacros.CURRENT_VARIANT)) for scope in depScopes { if scope.evaluate(BuiltinMacros.SWIFT_OBJC_INTEROP_MODE) == "objcxx" { let optionContext = await cbc.producer.swiftCompilerSpec.discoveredCommandLineToolSpecInfo(cbc.producer, scope, delegate) @@ -572,7 +573,7 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec // FIXME: These rpaths should be evaluated in `computeRPaths`. if cbc.scope.evaluate(BuiltinMacros.MERGE_LINKED_LIBRARIES), !cbc.scope.evaluate(BuiltinMacros.DONT_EMBED_REEXPORTED_MERGEABLE_LIBRARIES) { if libraries.first(where: { $0.mode == .reexport_merge }) != nil { - runpathSearchPaths.insert( "@loader_path/\(reexportedBinariesDirectoryName)", at: 0) + runpathSearchPaths.insert("@loader_path/\(reexportedBinariesDirectoryName)", at: 0) } } } @@ -627,8 +628,7 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec let ldFlagsToEvaluate: [String] if dyldEnvDiagnosticBehavior == .warning { ldFlagsToEvaluate = filterLinkerFlagsWhenUnderPreviewsDylib(originalLdFlags) - } - else { + } else { ldFlagsToEvaluate = originalLdFlags } @@ -691,7 +691,7 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec } } if !usesLDClassic, supportsSDKImportsFeature, !sdkImportsInfoFile.isEmpty, cbc.scope.evaluate(BuiltinMacros.ENABLE_SDK_IMPORTS), cbc.producer.isApplePlatform { - commandLine.insert(contentsOf: ["-Xlinker", "-sdk_imports", "-Xlinker", sdkImportsInfoFile.str, "-Xlinker", "-sdk_imports_each_object"], at: commandLine.count - 2) // This preserves the assumption that the last argument is the linker output which a few tests make. + commandLine.insert(contentsOf: ["-Xlinker", "-sdk_imports", "-Xlinker", sdkImportsInfoFile.str, "-Xlinker", "-sdk_imports_each_object"], at: commandLine.count - 2) // This preserves the assumption that the last argument is the linker output which a few tests make. outputs.append(delegate.createNode(sdkImportsInfoFile)) await cbc.producer.processSDKImportsSpec.createTasks(CommandBuildContext(producer: cbc.producer, scope: cbc.scope, inputs: []), delegate, ldSDKImportsPath: sdkImportsInfoFile) @@ -745,7 +745,7 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec } // Compute the inputs and outputs. - var inputs: [any PlannedNode] = inputPaths.map{ delegate.createNode($0) } + var inputs: [any PlannedNode] = inputPaths.map { delegate.createNode($0) } await inputs.append(contentsOf: additionalInputDependencies(cbc, delegate, optionContext: discoveredCommandLineToolSpecInfo(cbc.producer, cbc.scope, delegate), lookup: lookup).map(delegate.createNode)) @@ -773,8 +773,9 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec cbc.scope.evaluate(BuiltinMacros.TARGET_BUILD_DIR).join(cbc.scope.evaluate(BuiltinMacros.EXECUTABLE_PATH)), cbc.scope.evaluate(BuiltinMacros.TARGET_BUILD_DIR).join(cbc.scope.evaluate(BuiltinMacros.WRAPPER_NAME)).join(cbc.scope.evaluate(BuiltinMacros.EXECUTABLE_NAME)), Path(cbc.scope.evaluate(BuiltinMacros.TAPI_OUTPUT_PATH)), - Path(cbc.scope.evaluate(BuiltinMacros.EAGER_LINKING_INTERMEDIATE_TBD_PATH)) - ], removableBasenames: [ + Path(cbc.scope.evaluate(BuiltinMacros.EAGER_LINKING_INTERMEDIATE_TBD_PATH)), + ], + removableBasenames: [ cbc.scope.evaluate(BuiltinMacros.EXECUTABLE_NAME), Path(cbc.scope.evaluate(BuiltinMacros.EXECUTABLE_NAME)).basenameWithoutSuffix + ".tbd", ], @@ -971,7 +972,7 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec } // Compute the inputs and outputs. - let inputs = inputPaths.map{ delegate.createNode($0) } + cbc.commandOrderingInputs + let inputs = inputPaths.map { delegate.createNode($0) } + cbc.commandOrderingInputs let outputs: [any PlannedNode] = [delegate.createNode(cbc.output)] + cbc.commandOrderingOutputs // Silently fix up the dependency info in order to avoid generating incorrect dependencies in certain cases: @@ -986,7 +987,7 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec cbc.output, cbc.scope.evaluate(BuiltinMacros.TARGET_BUILD_DIR).join(cbc.scope.evaluate(BuiltinMacros.EXECUTABLE_DEBUG_DYLIB_PATH)), Path(cbc.scope.evaluate(BuiltinMacros.TAPI_OUTPUT_PATH)), - Path(cbc.scope.evaluate(BuiltinMacros.EAGER_LINKING_INTERMEDIATE_TBD_PATH)) + Path(cbc.scope.evaluate(BuiltinMacros.EAGER_LINKING_INTERMEDIATE_TBD_PATH)), ], removableBasenames: [ cbc.scope.evaluate(BuiltinMacros.EXECUTABLE_NAME), @@ -1051,7 +1052,7 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec outputPath: cbc.output, dependencyInfoEditPayload: DependencyInfoEditPayload( removablePaths: [ - cbc.output, + cbc.output ], removableBasenames: [], developerPath: cbc.scope.evaluate(BuiltinMacros.DEVELOPER_DIR) @@ -1107,7 +1108,7 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec for arg in ["-export_dynamic", "-sdk_imports_each_object", "-dead_strip"] { while let index = commandLine.firstIndex(of: arg) { guard index > 0, commandLine[index - 1] == argPrefix else { break } - commandLine.removeSubrange(index - 1 ... index) + commandLine.removeSubrange(index - 1...index) } } @@ -1136,11 +1137,11 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec index + 2 < commandLine.count, commandLine[index - 1] == argPrefix, commandLine[index + 1] == argPrefix - else { - break + else { + break } - commandLine.removeSubrange(index - 1 ... index + 2) + commandLine.removeSubrange(index - 1...index + 2) } } @@ -1149,14 +1150,14 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec commandLine.append(contentsOf: [ "-dynamiclib", payload.outputPath.str, - ]) + ]) case .bundleLoader: commandLine.append(contentsOf: [ "-bundle", "-bundle_loader", payload.outputPath.str, - ]) + ]) case .staticLib: break } @@ -1165,7 +1166,7 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec commandLine.append(contentsOf: [ "-o", outputPath.str, - ]) + ]) let output = TaskGeneratePreviewInfoOutput( architecture: previewPayload.architecture, @@ -1194,7 +1195,7 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec ] { while let index = commandLine.firstIndex(of: arg) { guard index > 0, commandLine[index - 1] == argPrefix else { break } - commandLine.removeSubrange(index - 1 ... index) + commandLine.removeSubrange(index - 1...index) } } @@ -1207,11 +1208,11 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec index + 2 < commandLine.count, commandLine[index - 1] == argPrefix, commandLine[index + 1] == argPrefix - else { - break + else { + break } - commandLine.removeSubrange(index - 1 ... index + 2) + commandLine.removeSubrange(index - 1...index + 2) } } @@ -1319,8 +1320,7 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec let returnPath: Path if let pathArg = absPathArgs.last, Path(pathArg).basename == frameworkName { returnPath = Path(pathArg) - } - else { + } else { returnPath = specifier.path } return (absPathArgs, [returnPath]) @@ -1381,11 +1381,9 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec let components = arg.split(separator: ",") if components.count == 1 { return (nil, nil) - } - else if components.count == 2 { + } else if components.count == 2 { return (String(components[1]), nil) - } - else { + } else { // Not sure what it means if we end up with more components than the option supports, so we just ignore that. return (String(components[1]), String(components[2])) } @@ -1397,8 +1395,7 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec if StaticVars.frameworkArgs.contains(option) { if let argument { await addFramework(macro, option, argument) - } - else { + } else { // See if the next option is also a -Wl, guard let next = argsIterator.next() else { await addError("\(macro.name) in target '\(settings.target?.name ?? "")': Expected a framework name quoted with -Wl, after \(option)") @@ -1409,8 +1406,7 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec let components = next.split(separator: ",") if components.count == 1 { return nil - } - else { + } else { return String(components[1]) } }() @@ -1419,14 +1415,12 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec continue } await addFramework(macro, option, argument) - } - else { + } else { await addError("\(macro.name) in target '\(settings.target?.name ?? "")': Expected a framework name quoted with -Wl, after \(option) but found '\(next)'") break } } - } - else if let prefix = (StaticVars.libPrefixArgs.first { option.hasPrefix($0) }) { + } else if let prefix = (StaticVars.libPrefixArgs.first { option.hasPrefix($0) }) { let stem = String(option.dropFirst(prefix.count)) await addLibrary(macro, prefix, stem) } @@ -1449,14 +1443,12 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec await addError("\(macro.name) in target '\(settings.target?.name ?? "")': Expected a framework name quoted with -Xlinker after \(arg)") break } - argument = next - } - else { + argument = next + } else { argument = next } await addFramework(macro, arg, argument) - } - else if let prefix = (StaticVars.libPrefixArgs.first { arg.hasPrefix($0) }) { + } else if let prefix = (StaticVars.libPrefixArgs.first { arg.hasPrefix($0) }) { let stem = String(arg.dropFirst(prefix.count)) await addLibrary(macro, prefix, stem) } @@ -1519,7 +1511,7 @@ public final class LdLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchec if alternateLinker != "" && alternateLinker != "ld" && alternateLinker != "link" { linkerPath = Path(producer.hostOperatingSystem.imageFormat.executableName(basename: "ld.\(alternateLinker)")) } else if alternateLinker != "" { - linkerPath = Path(alternateLinker) + linkerPath = Path(alternateLinker) } // If the linker does not support multiple architectures update the path to include a subfolder based on the prefix map // to find the architecture specific executable. @@ -1572,10 +1564,10 @@ fileprivate extension LinkerSpec.LibrarySpecifier { case (.dynamic, .weak): return ["-weak-l" + name] case (.static, .weak), - (.textBased, .weak): + (.textBased, .weak): return ["-weak-l" + name] case (.static, _), - (.textBased, _): + (.textBased, _): // Other modes are not supported for these kinds. return ["-l" + name] case (.framework, .normal): @@ -1610,10 +1602,10 @@ fileprivate extension LinkerSpec.LibrarySpecifier { case (.dynamic, .weak): return ["-weak_library", path.str] case (.static, .weak), - (.textBased, .weak): + (.textBased, .weak): return ["-weak_library", path.str] case (.static, _), - (.textBased, _): + (.textBased, _): // Other modes are not supported for these kinds. return [path.str] // FIXME: When linking with absolute paths, we pass the path to library inside the framework using the appropriate -***_library option (or no option for normal mode). This is probably a mis-feature, I doubt it is a good idea to bypass the linker's notion of frameworkness, but this has been the behavior for a long time and it's not clear that the linker provides us with a better alternative. @@ -1641,7 +1633,7 @@ public struct DiscoveredLibtoolLinkerToolSpecInfo: DiscoveredCommandLineToolSpec public let toolVersion: Version? } -public final class LibtoolLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @unchecked Sendable { +public final class LibtoolLinkerSpec: GenericLinkerSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier = "com.apple.pbx.linkers.libtool" override public var payloadType: (any TaskPayload.Type)? { return LibtoolLinkerTaskPayload.self } @@ -1665,23 +1657,28 @@ public final class LibtoolLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @u return DiscoveredLibtoolLinkerToolSpecInfo(toolPath: toolPath, toolVersion: nil) } - return try await producer.discoveredCommandLineToolSpecInfo(delegate, nil, [toolPath.str, producer.isApplePlatform ? "-V" : "--version"], { executionResult in - let outputString = String(decoding: executionResult.stdout, as: UTF8.self).trimmingCharacters(in: .whitespacesAndNewlines) - let regexes: [Regex<(Substring, libtool: Substring)>] - if producer.isApplePlatform { - regexes = [#/^Apple Inc\. version cctools(?:_[A-Za-z0-9_]+)?-(?[0-9\.]+)$/#] - } else { - regexes = [ - #/^libtool \(GNU libtool\) (?[0-9\.]+).*/#, - #/^LLD (?[0-9\.]+).*/#, - ] - } - guard let match = try regexes.compactMap({ try $0.firstMatch(in: outputString) }).first else { - throw StubError.error("Could not parse libtool version from: \(outputString)") - } + return try await producer.discoveredCommandLineToolSpecInfo( + delegate, + nil, + [toolPath.str, producer.isApplePlatform ? "-V" : "--version"], + { executionResult in + let outputString = String(decoding: executionResult.stdout, as: UTF8.self).trimmingCharacters(in: .whitespacesAndNewlines) + let regexes: [Regex<(Substring, libtool: Substring)>] + if producer.isApplePlatform { + regexes = [#/^Apple Inc\. version cctools(?:_[A-Za-z0-9_]+)?-(?[0-9\.]+)$/#] + } else { + regexes = [ + #/^libtool \(GNU libtool\) (?[0-9\.]+).*/#, + #/^LLD (?[0-9\.]+).*/#, + ] + } + guard let match = try regexes.compactMap({ try $0.firstMatch(in: outputString) }).first else { + throw StubError.error("Could not parse libtool version from: \(outputString)") + } - return try DiscoveredLibtoolLinkerToolSpecInfo(toolPath: toolPath, toolVersion: Version(String(match.output.libtool))) - }) + return try DiscoveredLibtoolLinkerToolSpecInfo(toolPath: toolPath, toolVersion: Version(String(match.output.libtool))) + } + ) } override public func discoveredCommandLineToolSpecInfo(_ producer: any CommandProducer, _ scope: MacroEvaluationScope, _ delegate: any CoreClientTargetDiagnosticProducingDelegate) async -> (any DiscoveredCommandLineToolSpecInfo)? { @@ -1716,43 +1713,44 @@ public final class LibtoolLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @u } // Add arguments for the contents of the Link Binaries build phase. - specialArgs.append(contentsOf: libraries.flatMap { specifier -> [String] in - let basename = specifier.path.basename + specialArgs.append( + contentsOf: libraries.flatMap { specifier -> [String] in + let basename = specifier.path.basename + + switch specifier.kind { + case .static: + // A static library can build against another static library. + + // If directed to link it weakly, we emit a warning, since libtool can't perform weak linking (since it's not really linking). Then we pass it normally. + // We silently ignore other non-normal modes, since they are only set programmatically and there's nothing the user can do about them. + if specifier.mode == .weak { + delegate.warning("Product \(cbc.output.basename) cannot weak-link \(specifier.kind) \(basename)") + } - switch specifier.kind { - case .static: - // A static library can build against another static library. + if specifier.useSearchPaths, basename.hasPrefix("lib"), basename.hasSuffix(".a") { + // Locate using search paths: Add a -l option and *don't* add the path to the library as an input to the task. + return ["-l" + basename.withoutPrefix("lib").withoutSuffix(".a")] + } else { + // Locate using an absolute path: Add the path as an option and as an input to the task. + inputPaths.append(specifier.path) + return [specifier.path.str] + } - // If directed to link it weakly, we emit a warning, since libtool can't perform weak linking (since it's not really linking). Then we pass it normally. - // We silently ignore other non-normal modes, since they are only set programmatically and there's nothing the user can do about them. - if specifier.mode == .weak { - delegate.warning("Product \(cbc.output.basename) cannot weak-link \(specifier.kind) \(basename)") - } + case .object: + // Object files are added to linker inputs in the sources task producer and so end up in the link-file-list. + return [] - if specifier.useSearchPaths, basename.hasPrefix("lib"), basename.hasSuffix(".a") { - // Locate using search paths: Add a -l option and *don't* add the path to the library as an input to the task. - return ["-l" + basename.withoutPrefix("lib").withoutSuffix(".a")] - } - else { - // Locate using an absolute path: Add the path as an option and as an input to the task. + case .objectLibrary: inputPaths.append(specifier.path) - return [specifier.path.str] - } - - case .object: - // Object files are added to linker inputs in the sources task producer and so end up in the link-file-list. - return [] - - case .objectLibrary: - inputPaths.append(specifier.path) - return ["@\(specifier.path.join("args.resp").str)"] + return ["@\(specifier.path.join("args.resp").str)"] - case .dynamic, .textBased, .framework: - // A static library can't build against a dynamic library, or against a .tbd file, so we don't add any arguments here. But the inclusion of such a file in the Link Binaries build phase might be used to find implicit dependencies. - // We don't have a concrete example of this, and we used to emit an error here, but we removed it in . - return [] + case .dynamic, .textBased, .framework: + // A static library can't build against a dynamic library, or against a .tbd file, so we don't add any arguments here. But the inclusion of such a file in the Link Binaries build phase might be used to find implicit dependencies. + // We don't have a concrete example of this, and we used to emit an error here, but we removed it in . + return [] + } } - }) + ) var outputs: [any PlannedNode] = [delegate.createNode(cbc.output)] + cbc.commandOrderingOutputs @@ -1771,13 +1769,13 @@ public final class LibtoolLinkerSpec : GenericLinkerSpec, SpecIdentifierType, @u let commandLine = await commandLineFromTemplate(cbc, delegate, optionContext: optionContext, specialArgs: specialArgs).map(\.asString) // Compute the inputs and outputs. - var inputs = inputPaths.map{ delegate.createNode($0) } + var inputs = inputPaths.map { delegate.createNode($0) } // Add inputs for the (un)exports files, if we generated options for them. - if let idx = commandLine.firstIndex(of: "-exported_symbols_list"), idx+1 < commandLine.count { - inputs.append(delegate.createNode(Path(commandLine[idx+1]).normalize())) + if let idx = commandLine.firstIndex(of: "-exported_symbols_list"), idx + 1 < commandLine.count { + inputs.append(delegate.createNode(Path(commandLine[idx + 1]).normalize())) } - if let idx = commandLine.firstIndex(of: "-unexported_symbols_list"), idx+1 < commandLine.count { - inputs.append(delegate.createNode(Path(commandLine[idx+1]).normalize())) + if let idx = commandLine.firstIndex(of: "-unexported_symbols_list"), idx + 1 < commandLine.count { + inputs.append(delegate.createNode(Path(commandLine[idx + 1]).normalize())) } var payload: LibtoolLinkerTaskPayload? = nil @@ -1857,69 +1855,84 @@ public func discoveredLinkerToolsInfo(_ producer: any CommandProducer, _ delegat do { do { let commandLine = [toolPath.str, "-version_details"] - return try await producer.discoveredCommandLineToolSpecInfo(delegate, nil, commandLine, { executionResult in - let gnuLD = [ - #/GNU ld version (?[\d.]+)-.*/#, - #/GNU ld \(GNU Binutils.*\) (?[\d.]+)/#, - ] - if let match = try gnuLD.compactMap({ try $0.firstMatch(in: String(decoding: executionResult.stdout, as: UTF8.self)) }).first { - return DiscoveredLdLinkerToolSpecInfo(linker: .gnuld, toolPath: toolPath, toolVersion: try Version(String(match.output.version)), architectures: Set()) - } + return try await producer.discoveredCommandLineToolSpecInfo( + delegate, + nil, + commandLine, + { executionResult in + let gnuLD = [ + #/GNU ld version (?[\d.]+)-.*/#, + #/GNU ld \(GNU Binutils.*\) (?[\d.]+)/#, + ] + if let match = try gnuLD.compactMap({ try $0.firstMatch(in: String(decoding: executionResult.stdout, as: UTF8.self)) }).first { + return DiscoveredLdLinkerToolSpecInfo(linker: .gnuld, toolPath: toolPath, toolVersion: try Version(String(match.output.version)), architectures: Set()) + } - let goLD = [ - #/GNU gold version (?[\d.]+)-.*/#, - #/GNU gold \(GNU Binutils.*\) (?[\d.]+)/#, // Ubuntu "GNU gold (GNU Binutils for Ubuntu 2.38) 1.16", Debian "GNU gold (GNU Binutils for Debian 2.40) 1.16" - #/GNU gold \(version .*\) (?[\d.]+)/#, // Fedora "GNU gold (version 2.40-14.fc39) 1.16", RHEL "GNU gold (version 2.35.2-54.el9) 1.16", Amazon "GNU gold (version 2.29.1-31.amzn2.0.1) 1.14" - ] + let goLD = [ + #/GNU gold version (?[\d.]+)-.*/#, + #/GNU gold \(GNU Binutils.*\) (?[\d.]+)/#, // Ubuntu "GNU gold (GNU Binutils for Ubuntu 2.38) 1.16", Debian "GNU gold (GNU Binutils for Debian 2.40) 1.16" + #/GNU gold \(version .*\) (?[\d.]+)/#, // Fedora "GNU gold (version 2.40-14.fc39) 1.16", RHEL "GNU gold (version 2.35.2-54.el9) 1.16", Amazon "GNU gold (version 2.29.1-31.amzn2.0.1) 1.14" + ] - if let match = try goLD.compactMap({ try $0.firstMatch(in: String(decoding: executionResult.stdout, as: UTF8.self)) }).first { - return DiscoveredLdLinkerToolSpecInfo(linker: .gold, toolPath: toolPath, toolVersion: try Version(String(match.output.version)), architectures: Set()) - } + if let match = try goLD.compactMap({ try $0.firstMatch(in: String(decoding: executionResult.stdout, as: UTF8.self)) }).first { + return DiscoveredLdLinkerToolSpecInfo(linker: .gold, toolPath: toolPath, toolVersion: try Version(String(match.output.version)), architectures: Set()) + } - // link.exe has no option to simply dump the version, running, the program will no arguments or an invalid one will dump a header that contains the version. - let linkExe = [ - #/Microsoft \(R\) Incremental Linker Version (?[\d.]+)/# - ] - if let match = try linkExe.compactMap({ try $0.firstMatch(in: String(decoding: executionResult.stdout, as: UTF8.self)) }).first { - return DiscoveredLdLinkerToolSpecInfo(linker: .linkExe, toolPath: toolPath, toolVersion: try Version(String(match.output.version)), architectures: Set()) - } + // link.exe has no option to simply dump the version, running, the program will no arguments or an invalid one will dump a header that contains the version. + let linkExe = [ + #/Microsoft \(R\) Incremental Linker Version (?[\d.]+)/# + ] + if let match = try linkExe.compactMap({ try $0.firstMatch(in: String(decoding: executionResult.stdout, as: UTF8.self)) }).first { + return DiscoveredLdLinkerToolSpecInfo(linker: .linkExe, toolPath: toolPath, toolVersion: try Version(String(match.output.version)), architectures: Set()) + } - struct LDVersionDetails: Decodable { - let version: Version - let architectures: Set - } + struct LDVersionDetails: Decodable { + let version: Version + let architectures: Set + } - let details: LDVersionDetails - do { - details = try JSONDecoder().decode(LDVersionDetails.self, from: executionResult.stdout) - } catch { - throw CommandLineOutputJSONParsingError(commandLine: commandLine, data: executionResult.stdout) - } + let details: LDVersionDetails + do { + details = try JSONDecoder().decode(LDVersionDetails.self, from: executionResult.stdout) + } catch { + throw CommandLineOutputJSONParsingError(commandLine: commandLine, data: executionResult.stdout) + } - return DiscoveredLdLinkerToolSpecInfo(linker: .ld64, toolPath: toolPath, toolVersion: details.version, architectures: details.architectures) - }) + return DiscoveredLdLinkerToolSpecInfo(linker: .ld64, toolPath: toolPath, toolVersion: details.version, architectures: details.architectures) + } + ) } catch let e as CommandLineOutputJSONParsingError { let vCommandLine = [toolPath.str, "-v"] - return try await producer.discoveredCommandLineToolSpecInfo(delegate, nil, vCommandLine, { executionResult in - let lld = [ - #/LLD (?[\d.]+).*/#, - ] - if let match = try lld.compactMap({ try $0.firstMatch(in: String(decoding: executionResult.stdout, as: UTF8.self)) }).first { - return DiscoveredLdLinkerToolSpecInfo(linker: .lld, toolPath: toolPath, toolVersion: try Version(String(match.output.version)), architectures: Set()) - } - - let versionCommandLine = [toolPath.str, "--version"] - return try await producer.discoveredCommandLineToolSpecInfo(delegate, nil, versionCommandLine, { executionResult in + return try await producer.discoveredCommandLineToolSpecInfo( + delegate, + nil, + vCommandLine, + { executionResult in let lld = [ - #/LLD (?[\d.]+).*/#, + #/LLD (?[\d.]+).*/# ] if let match = try lld.compactMap({ try $0.firstMatch(in: String(decoding: executionResult.stdout, as: UTF8.self)) }).first { return DiscoveredLdLinkerToolSpecInfo(linker: .lld, toolPath: toolPath, toolVersion: try Version(String(match.output.version)), architectures: Set()) } - throw e - }) - }) + let versionCommandLine = [toolPath.str, "--version"] + return try await producer.discoveredCommandLineToolSpecInfo( + delegate, + nil, + versionCommandLine, + { executionResult in + let lld = [ + #/LLD (?[\d.]+).*/# + ] + if let match = try lld.compactMap({ try $0.firstMatch(in: String(decoding: executionResult.stdout, as: UTF8.self)) }).first { + return DiscoveredLdLinkerToolSpecInfo(linker: .lld, toolPath: toolPath, toolVersion: try Version(String(match.output.version)), architectures: Set()) + } + + throw e + } + ) + } + ) } } catch { delegate.error(error) @@ -1949,14 +1962,17 @@ fileprivate func enumerateLinkerCommandLine(arguments: [String], handleWl: Bool continue } - handle(arg, { (offset: Int) -> String? in - // The driver accepts both `-Xlinker -flag value` _and_ `-Xlinker -flag -Xlinker value`. - // So if the first argument used -Xlinker, we may or may not have more. - if let value = it.next(count: offset, transform: { it, arg in isXlinker && arg == "-Xlinker" ? it.next() : arg }).last ?? nil { - return value + handle( + arg, + { (offset: Int) -> String? in + // The driver accepts both `-Xlinker -flag value` _and_ `-Xlinker -flag -Xlinker value`. + // So if the first argument used -Xlinker, we may or may not have more. + if let value = it.next(count: offset, transform: { it, arg in isXlinker && arg == "-Xlinker" ? it.next() : arg }).last ?? nil { + return value + } + return nil } - return nil - }) + ) } } @@ -1980,8 +1996,7 @@ fileprivate func filterLinkerFlagsWhenUnderPreviewsDylib(_ flags: [String]) -> [ } } continue - } - else if flag == "-client_name" { + } else if flag == "-client_name" { // Filter out `-client_Name` when using the previews dylib, since this isn't // allowed on dylibs. Transition from `OTHER_LD_FLAGS` to the dedicated // `LD_CLIENT_NAME` (by defining both at once) in order to remain compatible with @@ -1995,16 +2010,13 @@ fileprivate func filterLinkerFlagsWhenUnderPreviewsDylib(_ flags: [String]) -> [ break } } - } - else { + } else { _ = it.next() } continue - } - else if flag == "-Wl,-no_exported_symbols" { + } else if flag == "-Wl,-no_exported_symbols" { continue - } - else if flag == "-no_exported_symbols" && newFlags.last == "-Xlinker" { + } else if flag == "-no_exported_symbols" && newFlags.last == "-Xlinker" { // Filter out `-no_exported_symbols` when using the previews dylib, since this // strips important symbols that are needed for the stub executor trampoline.. // Transition from `OTHER_LD_FLAGS` to the dedicated `LD_EXPORT_SYMBOLS` (by diff --git a/Sources/SWBCore/SpecImplementations/Tools/Lipo.swift b/Sources/SWBCore/SpecImplementations/Tools/Lipo.swift index d5d76c02..b1f6acd7 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/Lipo.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/Lipo.swift @@ -48,12 +48,12 @@ public final class LipoToolSpec: GenericCommandLineToolSpec, SpecIdentifierType, var commandLine = [String]() if cbc.scope.evaluate(BuiltinMacros.CREATE_UNIVERSAL_STATIC_LIBRARY_USING_LIBTOOL) && cbc.scope.evaluate(BuiltinMacros.MACH_O_TYPE) == "staticlib" { - commandLine.append(cbc.producer.libtoolLinkerSpec.libtoolToolPath(cbc).str) - commandLine.append("-static") - for input in cbc.inputs { - commandLine.append(input.absolutePath.str) - } - commandLine += ["-o", outputPath.str] + commandLine.append(cbc.producer.libtoolLinkerSpec.libtoolToolPath(cbc).str) + commandLine.append("-static") + for input in cbc.inputs { + commandLine.append(input.absolutePath.str) + } + commandLine += ["-o", outputPath.str] } else { commandLine.append(lipoToolPath(cbc).str) diff --git a/Sources/SWBCore/SpecImplementations/Tools/MkdirTool.swift b/Sources/SWBCore/SpecImplementations/Tools/MkdirTool.swift index 78feb6d3..a38b62f0 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/MkdirTool.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/MkdirTool.swift @@ -12,7 +12,7 @@ import SWBUtil -public final class MkdirToolSpec : CommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { +public final class MkdirToolSpec: CommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier = "com.apple.tools.mkdir" override public func constructTasks(_ cbc: CommandBuildContext, _ delegate: any TaskGenerationDelegate) async { @@ -20,11 +20,14 @@ public final class MkdirToolSpec : CommandLineToolSpec, SpecIdentifierType, @unc // We must create a virtual output usable for mutable node ordering. let outputs: [any PlannedNode] = [delegate.createNode(output), delegate.createVirtualNode("MkDir \(output.str)")] delegate.createTask( - type: self, ruleInfo: ["MkDir", output.str], + type: self, + ruleInfo: ["MkDir", output.str], commandLine: ["/bin/mkdir", "-p", output.str], environment: EnvironmentBindings(), workingDirectory: cbc.producer.defaultWorkingDirectory, - inputs: [], outputs: outputs, action: nil, + inputs: [], + outputs: outputs, + action: nil, execDescription: resolveExecutionDescription(cbc, delegate), preparesForIndexing: cbc.preparesForIndexing, enableSandboxing: enableSandboxing, diff --git a/Sources/SWBCore/SpecImplementations/Tools/ModulesVerifierTool.swift b/Sources/SWBCore/SpecImplementations/Tools/ModulesVerifierTool.swift index a584628e..d59ce3ee 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/ModulesVerifierTool.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/ModulesVerifierTool.swift @@ -12,7 +12,7 @@ public import SWBUtil -public final class ModulesVerifierToolSpec : GenericCommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { +public final class ModulesVerifierToolSpec: GenericCommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier = "com.apple.build-tools.modules-verifier" override public func constructTasks(_ cbc: CommandBuildContext, _ delegate: any TaskGenerationDelegate) async { @@ -29,17 +29,21 @@ public final class ModulesVerifierToolSpec : GenericCommandLineToolSpec, SpecIde let commandLine = await commandLineFromTemplate(cbc, delegate, optionContext: discoveredCommandLineToolSpecInfo(cbc.producer, cbc.scope, delegate), specialArgs: specialArguments).map(\.asString) - let inputs = cbc.inputs.map{ delegate.createNode($0.absolutePath) } + cbc.commandOrderingInputs + let inputs = cbc.inputs.map { delegate.createNode($0.absolutePath) } + cbc.commandOrderingInputs let outputs = cbc.outputs.map { delegate.createNode($0) } + cbc.commandOrderingOutputs - delegate.createTask(type: self, - ruleInfo: ruleInfo, - commandLine: commandLine, - environment: environmentFromSpec(cbc, delegate), - workingDirectory: cbc.producer.defaultWorkingDirectory, - inputs: inputs, outputs: outputs, action: nil, - execDescription: resolveExecutionDescription(cbc, delegate), - enableSandboxing: enableSandboxing, - alwaysExecuteTask: alwaysExecuteTask) + delegate.createTask( + type: self, + ruleInfo: ruleInfo, + commandLine: commandLine, + environment: environmentFromSpec(cbc, delegate), + workingDirectory: cbc.producer.defaultWorkingDirectory, + inputs: inputs, + outputs: outputs, + action: nil, + execDescription: resolveExecutionDescription(cbc, delegate), + enableSandboxing: enableSandboxing, + alwaysExecuteTask: alwaysExecuteTask + ) } } diff --git a/Sources/SWBCore/SpecImplementations/Tools/ObjectLibraryAssembler.swift b/Sources/SWBCore/SpecImplementations/Tools/ObjectLibraryAssembler.swift index d228388d..b13e40de 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/ObjectLibraryAssembler.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/ObjectLibraryAssembler.swift @@ -10,7 +10,7 @@ // //===----------------------------------------------------------------------===// -public final class ObjectLibraryAssemblerSpec : GenericLinkerSpec, SpecIdentifierType, @unchecked Sendable { +public final class ObjectLibraryAssemblerSpec: GenericLinkerSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier: String = "org.swift.linkers.object-library-assembler" public override func createTaskAction(_ cbc: CommandBuildContext, _ delegate: any TaskGenerationDelegate) -> (any PlannedTaskAction)? { diff --git a/Sources/SWBCore/SpecImplementations/Tools/PLUtilTool.swift b/Sources/SWBCore/SpecImplementations/Tools/PLUtilTool.swift index c8bad67e..17569397 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/PLUtilTool.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/PLUtilTool.swift @@ -10,6 +10,6 @@ // //===----------------------------------------------------------------------===// -final class PLUtilToolSpec : CommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { +final class PLUtilToolSpec: CommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { static let identifier = "com.apple.tools.plutil" } diff --git a/Sources/SWBCore/SpecImplementations/Tools/PrelinkedObjectLink.swift b/Sources/SWBCore/SpecImplementations/Tools/PrelinkedObjectLink.swift index d2e17180..89f4ab08 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/PrelinkedObjectLink.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/PrelinkedObjectLink.swift @@ -36,9 +36,10 @@ public final class PrelinkedObjectLinkSpec: CommandLineToolSpec, SpecImplementat commandLine += ["-r", "-arch", arch] if let buildPlatform = cbc.producer.sdk?.targetBuildVersionPlatform(sdkVariant: cbc.producer.sdkVariant), - let deploymentTargetMacro = cbc.producer.platform?.deploymentTargetMacro, - let minDeploymentTarget = cbc.scope.evaluate(deploymentTargetMacro).nilIfEmpty, - let sdkVersion = cbc.producer.sdk?.version { + let deploymentTargetMacro = cbc.producer.platform?.deploymentTargetMacro, + let minDeploymentTarget = cbc.scope.evaluate(deploymentTargetMacro).nilIfEmpty, + let sdkVersion = cbc.producer.sdk?.version + { commandLine += ["-platform_version", "\(buildPlatform.rawValue)", minDeploymentTarget, sdkVersion.canonicalDeploymentTargetForm.description] } diff --git a/Sources/SWBCore/SpecImplementations/Tools/ProductPackaging.swift b/Sources/SWBCore/SpecImplementations/Tools/ProductPackaging.swift index 2fec4dd2..2f3eca5c 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/ProductPackaging.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/ProductPackaging.swift @@ -14,7 +14,7 @@ public import SWBUtil import SWBMacro import Foundation -public final class ProductPackagingToolSpec : GenericCommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { +public final class ProductPackagingToolSpec: GenericCommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier = "com.apple.tools.product-pkg-utility" public override func constructTasks(_ cbc: CommandBuildContext, _ delegate: any TaskGenerationDelegate) async { @@ -175,7 +175,7 @@ public final class ProductPackagingToolSpec : GenericCommandLineToolSpec, SpecId delegate.access(path: path) } - delegate.createTask(type: self, ruleInfo: ["ProcessProductPackaging", codeSignEntitlementsInput?.absolutePath.str ?? "", outputPath.str], commandLine: commandLine, additionalOutput: additionalOutput, environment: environmentFromSpec(cbc, delegate), workingDirectory: cbc.producer.defaultWorkingDirectory, inputs: inputs.map(\.absolutePath), outputs: [ outputPath ], action: action, execDescription: resolveExecutionDescription(cbc, delegate), enableSandboxing: enableSandboxing) + delegate.createTask(type: self, ruleInfo: ["ProcessProductPackaging", codeSignEntitlementsInput?.absolutePath.str ?? "", outputPath.str], commandLine: commandLine, additionalOutput: additionalOutput, environment: environmentFromSpec(cbc, delegate), workingDirectory: cbc.producer.defaultWorkingDirectory, inputs: inputs.map(\.absolutePath), outputs: [outputPath], action: action, execDescription: resolveExecutionDescription(cbc, delegate), enableSandboxing: enableSandboxing) } /// Construct a task to create the provisioning file (commonly named `embedded.mobileprovision`). @@ -202,10 +202,10 @@ public final class ProductPackagingToolSpec : GenericCommandLineToolSpec, SpecId let commandLine = await commandLineFromTemplate(cbc, delegate, optionContext: discoveredCommandLineToolSpecInfo(cbc.producer, cbc.scope, delegate), lookup: lookup).map(\.asString) let action = delegate.taskActionCreationDelegate.createProcessProductProvisioningProfileTaskAction() - delegate.createTask(type: self, ruleInfo: ["ProcessProductPackaging", inputPath.str, outputPath.str], commandLine: commandLine, environment: environmentFromSpec(cbc, delegate), workingDirectory: cbc.producer.defaultWorkingDirectory, inputs: cbc.inputs.map({ $0.absolutePath }), outputs: [ outputPath ], action: action, execDescription: resolveExecutionDescription(cbc, delegate), enableSandboxing: enableSandboxing) + delegate.createTask(type: self, ruleInfo: ["ProcessProductPackaging", inputPath.str, outputPath.str], commandLine: commandLine, environment: environmentFromSpec(cbc, delegate), workingDirectory: cbc.producer.defaultWorkingDirectory, inputs: cbc.inputs.map({ $0.absolutePath }), outputs: [outputPath], action: action, execDescription: resolveExecutionDescription(cbc, delegate), enableSandboxing: enableSandboxing) // FIXME: Need to add this signature info to the command once commands support signatures. (I think we probably only need to add the UUID.) -// producer.extraSignatureInfo = inputs.profileUUID ?: inputs.profilePath.pathString; + // producer.extraSignatureInfo = inputs.profileUUID ?: inputs.profilePath.pathString; } } @@ -230,7 +230,7 @@ private extension ProductPackagingToolSpec { message = "The '\(buildSettingName)' build setting is set to '\(buildSettingValue)', but entitlement '\(entitlement)' is set to '\(entitlementValue ? "YES" : "NO")' in your entitlements file." childDiagnostics = [ .init(behavior: .note, location: entitlementsLocation, data: .init("To enable '\(buildSettingName)', remove the entitlement from your entitlements file.")), - .init(behavior: .note, location: .buildSettings(names: [buildSettingName]), data: .init("To disable '\(buildSettingName)', remove the entitlement from your entitlements file and disable '\(buildSettingName)' in build settings.")) + .init(behavior: .note, location: .buildSettings(names: [buildSettingName]), data: .init("To disable '\(buildSettingName)', remove the entitlement from your entitlements file and disable '\(buildSettingName)' in build settings.")), ] delegate.warning(message, childDiagnostics: childDiagnostics) diff --git a/Sources/SWBCore/SpecImplementations/Tools/ShellScriptTool.swift b/Sources/SWBCore/SpecImplementations/Tools/ShellScriptTool.swift index 095d25df..24d5d621 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/ShellScriptTool.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/ShellScriptTool.swift @@ -12,7 +12,7 @@ public import SWBUtil -public final class ShellScriptToolSpec : CommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { +public final class ShellScriptToolSpec: CommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier = "com.apple.commands.shell-script" override public func constructTasks(_ cbc: CommandBuildContext, _ delegate: any TaskGenerationDelegate) async { @@ -21,30 +21,48 @@ public final class ShellScriptToolSpec : CommandLineToolSpec, SpecIdentifierType } public func constructShellScriptTasks( - _ cbc: CommandBuildContext, _ delegate: any TaskGenerationDelegate, - ruleInfo: [String], commandLine: [String], environment: EnvironmentBindings, - workingDirectory: Path? = nil, inputs: [any PlannedNode], outputs: [any PlannedNode], - dependencyData: DependencyDataStyle?, execDescription: String, - showEnvironment: Bool, alwaysExecuteTask: Bool, enableSandboxing: Bool, - payload: (any TaskPayload)? = nil, action: (any PlannedTaskAction)? = nil, + _ cbc: CommandBuildContext, + _ delegate: any TaskGenerationDelegate, + ruleInfo: [String], + commandLine: [String], + environment: EnvironmentBindings, + workingDirectory: Path? = nil, + inputs: [any PlannedNode], + outputs: [any PlannedNode], + dependencyData: DependencyDataStyle?, + execDescription: String, + showEnvironment: Bool, + alwaysExecuteTask: Bool, + enableSandboxing: Bool, + payload: (any TaskPayload)? = nil, + action: (any PlannedTaskAction)? = nil, repairViaOwnershipAnalysis: Bool = false ) { delegate.createTask( - type: self, dependencyData: dependencyData, payload: payload, - ruleInfo: ruleInfo, additionalSignatureData: "", + type: self, + dependencyData: dependencyData, + payload: payload, + ruleInfo: ruleInfo, + additionalSignatureData: "", commandLine: commandLine.map { ByteString(encodingAsUTF8: $0) }, - additionalOutput: [], environment: environment, + additionalOutput: [], + environment: environment, workingDirectory: workingDirectory ?? cbc.producer.defaultWorkingDirectory, - inputs: inputs, outputs: outputs, mustPrecede: [], - action: action ?? delegate.taskActionCreationDelegate + inputs: inputs, + outputs: outputs, + mustPrecede: [], + action: action + ?? delegate.taskActionCreationDelegate .createDeferredExecutionTaskActionIfRequested( userPreferences: delegate.userPreferences ), execDescription: execDescription, preparesForIndexing: cbc.preparesForIndexing, enableSandboxing: enableSandboxing, - llbuildControlDisabled: true, additionalTaskOrderingOptions: [], - alwaysExecuteTask: alwaysExecuteTask, showEnvironment: showEnvironment, + llbuildControlDisabled: true, + additionalTaskOrderingOptions: [], + alwaysExecuteTask: alwaysExecuteTask, + showEnvironment: showEnvironment, repairViaOwnershipAnalysis: repairViaOwnershipAnalysis ) } diff --git a/Sources/SWBCore/SpecImplementations/Tools/StripTool.swift b/Sources/SWBCore/SpecImplementations/Tools/StripTool.swift index 0f4a556d..ed97774e 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/StripTool.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/StripTool.swift @@ -13,7 +13,7 @@ import SWBMacro import SWBUtil -public final class StripToolSpec : GenericCommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { +public final class StripToolSpec: GenericCommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier = "com.apple.build-tools.strip" /// Custom override to inject an appropriate output path. diff --git a/Sources/SWBCore/SpecImplementations/Tools/SwiftABICheckerTool.swift b/Sources/SWBCore/SpecImplementations/Tools/SwiftABICheckerTool.swift index 8ed1aebe..3605cce1 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/SwiftABICheckerTool.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/SwiftABICheckerTool.swift @@ -13,7 +13,7 @@ public import SWBUtil public import SWBMacro -public final class SwiftABICheckerToolSpec : GenericCommandLineToolSpec, SpecIdentifierType, SwiftDiscoveredCommandLineToolSpecInfo, @unchecked Sendable { +public final class SwiftABICheckerToolSpec: GenericCommandLineToolSpec, SpecIdentifierType, SwiftDiscoveredCommandLineToolSpecInfo, @unchecked Sendable { public static let identifier = "com.apple.build-tools.swift-abi-checker" override public func discoveredCommandLineToolSpecInfo(_ producer: any CommandProducer, _ scope: MacroEvaluationScope, _ delegate: any CoreClientTargetDiagnosticProducingDelegate) async -> (any DiscoveredCommandLineToolSpecInfo)? { @@ -105,18 +105,20 @@ public final class SwiftABICheckerToolSpec : GenericCommandLineToolSpec, SpecIde for searchPath in SwiftCompilerSpec.collectInputSearchPaths(cbc, toolInfo: toolSpecInfo) { commandLine += ["-I", searchPath] } - delegate.createTask(type: self, - payload: ABICheckerPayload( - serializedDiagnosticsPath: serializedDiagsPath, - downgradeErrors: downgradeErrors - ), - ruleInfo: defaultRuleInfo(cbc, delegate), - commandLine: commandLine, - environment: environmentFromSpec(cbc, delegate), - workingDirectory: cbc.producer.defaultWorkingDirectory, - inputs: allInputs, - outputs: [delegate.createNode(cbc.output)], - enableSandboxing: enableSandboxing) + delegate.createTask( + type: self, + payload: ABICheckerPayload( + serializedDiagnosticsPath: serializedDiagsPath, + downgradeErrors: downgradeErrors + ), + ruleInfo: defaultRuleInfo(cbc, delegate), + commandLine: commandLine, + environment: environmentFromSpec(cbc, delegate), + workingDirectory: cbc.producer.defaultWorkingDirectory, + inputs: allInputs, + outputs: [delegate.createNode(cbc.output)], + enableSandboxing: enableSandboxing + ) } } diff --git a/Sources/SWBCore/SpecImplementations/Tools/SwiftABIGenerationTool.swift b/Sources/SWBCore/SpecImplementations/Tools/SwiftABIGenerationTool.swift index b8e47fd7..c12c4fba 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/SwiftABIGenerationTool.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/SwiftABIGenerationTool.swift @@ -13,7 +13,7 @@ public import SWBUtil public import SWBMacro -public final class SwiftABIGenerationToolSpec : GenericCommandLineToolSpec, SpecIdentifierType, SwiftDiscoveredCommandLineToolSpecInfo, @unchecked Sendable { +public final class SwiftABIGenerationToolSpec: GenericCommandLineToolSpec, SpecIdentifierType, SwiftDiscoveredCommandLineToolSpecInfo, @unchecked Sendable { public static let identifier = "com.apple.build-tools.swift-abi-generation" override public func discoveredCommandLineToolSpecInfo(_ producer: any CommandProducer, _ scope: MacroEvaluationScope, _ delegate: any CoreClientTargetDiagnosticProducingDelegate) async -> (any DiscoveredCommandLineToolSpecInfo)? { @@ -53,13 +53,15 @@ public final class SwiftABIGenerationToolSpec : GenericCommandLineToolSpec, Spec for searchPath in SwiftCompilerSpec.collectInputSearchPaths(cbc, toolInfo: toolSpecInfo) { commandLine += ["-I", searchPath] } - delegate.createTask(type: self, - ruleInfo: defaultRuleInfo(cbc, delegate), - commandLine: commandLine, - environment: environmentFromSpec(cbc, delegate), - workingDirectory: cbc.producer.defaultWorkingDirectory, - inputs: cbc.inputs.map { delegate.createNode($0.absolutePath) }, - outputs: [delegate.createNode(cbc.output)], - enableSandboxing: enableSandboxing) + delegate.createTask( + type: self, + ruleInfo: defaultRuleInfo(cbc, delegate), + commandLine: commandLine, + environment: environmentFromSpec(cbc, delegate), + workingDirectory: cbc.producer.defaultWorkingDirectory, + inputs: cbc.inputs.map { delegate.createNode($0.absolutePath) }, + outputs: [delegate.createNode(cbc.output)], + enableSandboxing: enableSandboxing + ) } } diff --git a/Sources/SWBCore/SpecImplementations/Tools/SwiftCompiler.swift b/Sources/SWBCore/SpecImplementations/Tools/SwiftCompiler.swift index d9ed7ad7..9cb6fe61 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/SwiftCompiler.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/SwiftCompiler.swift @@ -104,7 +104,7 @@ public struct SwiftSourceFileIndexingInfo: SourceFileIndexingInfo { result += ["-index-unit-output-path", indexOutputFile] } return result - } + } public var indexOutputFile: String? { outputFile.str } public var language: IndexingInfoLanguage? { .swift } @@ -142,7 +142,7 @@ public struct SwiftSourceFileIndexingInfo: SourceFileIndexingInfo { "-emit-module-interface", "-emit-objc-header", "-lto=llvm-thin", - "-lto=llvm-full" + "-lto=llvm-full", ] private static let removeArgs: Set = [ "-o", @@ -153,11 +153,12 @@ public struct SwiftSourceFileIndexingInfo: SourceFileIndexingInfo { "-emit-module-interface-path", "-emit-private-module-interface-path", "-emit-package-module-interface-path", - "-emit-objc-header-path" + "-emit-objc-header-path", ] private static let removeFrontendArgs: Set = [ "-experimental-skip-non-inlinable-function-bodies", - "-experimental-skip-all-function-bodies"] + "-experimental-skip-all-function-bodies", + ] // SourceKit uses the old driver to determine the frontend args. Remove all // new driver flags as a workaround for cases were corresponding no-op @@ -174,7 +175,8 @@ public struct SwiftSourceFileIndexingInfo: SourceFileIndexingInfo { "-emit-module-separately-wmo", "-no-emit-module-separately-wmo", "-use-frontend-parseable-output", - "-emit-digester-baseline"] + "-emit-digester-baseline", + ] private static let newDriverArgs: Set = [ "-emit-module-serialize-diagnostics-path", "-emit-module-dependencies-path", @@ -182,7 +184,8 @@ public struct SwiftSourceFileIndexingInfo: SourceFileIndexingInfo { "-compare-to-baseline-path", "-serialize-breaking-changes-path", "-digester-breakage-allowlist-path", - "-digester-mode"] + "-digester-mode", + ] private static func indexingCommandLine(commandLine: [ByteString], payload: SwiftIndexingPayload, enableIndexBuildArena: Bool, integratedDriver: Bool) -> [ByteString] { precondition(!commandLine.isEmpty) @@ -222,7 +225,8 @@ public struct SwiftSourceFileIndexingInfo: SourceFileIndexingInfo { // Swift tests are not being discovered, XCTest framework from the project fails to import correctly if !enableIndexBuildArena, UserDefaults.enableFixFor23297285, - arg == "-I" || arg == "-F" { + arg == "-I" || arg == "-F" + { result.append(contentsOf: ["-Xcc", arg, "-Xcc", nextArg]) } } @@ -263,7 +267,7 @@ public struct SwiftSourceFileIndexingInfo: SourceFileIndexingInfo { // FIXME: Convert to bytes. dict["LanguageDialect"] = PropertyListItem("swift") // FIXME: Convert to bytes. - dict["swiftASTCommandArguments"] = PropertyListItem(commandLine.map{ $0.asString }) + dict["swiftASTCommandArguments"] = PropertyListItem(commandLine.map { $0.asString }) dict["swiftASTBuiltProductsDir"] = PropertyListItem(builtProductsDir.str) dict["assetSymbolIndexPath"] = PropertyListItem(assetSymbolIndexPath.str) dict["toolchains"] = PropertyListItem(toolchains) @@ -338,7 +342,7 @@ public struct SwiftLocalizationPayload: Serializable, Sendable { self.architecture = architecture } - public func serialize(to serializer: T) where T : Serializer { + public func serialize(to serializer: T) where T: Serializer { serializer.serializeAggregate(3) { serializer.serialize(effectivePlatformName) serializer.serialize(buildVariant) @@ -420,7 +424,7 @@ public struct SwiftDriverPayload: Serializable, TaskPayload, Encodable { self.verifyScannerDependencies = try deserializer.deserialize() } - public func serialize(to serializer: T) where T : Serializer { + public func serialize(to serializer: T) where T: Serializer { serializer.serializeAggregate(19) { serializer.serialize(self.uniqueID) serializer.serialize(self.compilerLocation) @@ -528,8 +532,8 @@ public final class SwiftCommandOutputParser: TaskOutputParser { enum MessageKind: String { case began case finished - case abnormal = "abnormal-exit" // Windows exceptions - case signalled // POSIX signals + case abnormal = "abnormal-exit" // Windows exceptions + case signalled // POSIX signals case skipped } @@ -651,7 +655,7 @@ public final class SwiftCommandOutputParser: TaskOutputParser { // The encoding is an ASCII one: // \n // \n - var slice = buffer[buffer.startIndex ..< buffer.endIndex] + var slice = buffer[buffer.startIndex.. String { - if let name = SubtaskName(rawValue: name) { + if let name = SubtaskName(rawValue: name) { switch name { case .compile: return "CompileSwift" @@ -788,7 +793,8 @@ public final class SwiftCommandOutputParser: TaskOutputParser { var result = [Path]() for item in outputContents { guard case let .plDict(contents) = item, - case let .plString(itemType)? = contents["type"] else { + case let .plString(itemType)? = contents["type"] + else { delegate.diagnosticsEngine.emit(data: DiagnosticData("invalid item in Swift parseable output message (\(item))"), behavior: .error) return nil } @@ -827,9 +833,7 @@ public final class SwiftCommandOutputParser: TaskOutputParser { if case let .plArray(inputContents)? = contents["inputs"] { var hadInput = false for case let .plString(value) in inputContents { - if value.hasSuffix(".swift") || - value.hasSuffix(".swiftinterface") || - value.hasSuffix(".modulemap") { + if value.hasSuffix(".swift") || value.hasSuffix(".swiftinterface") || value.hasSuffix(".modulemap") { inputCount += 1 if !hadInput { hadInput = true @@ -851,7 +855,7 @@ public final class SwiftCommandOutputParser: TaskOutputParser { return error("missing pid") } if subtasks[pid] != nil { - return error("invalid pid \(pid) (already in use)") + return error("invalid pid \(pid) (already in use)") } // Compute the title. @@ -880,7 +884,8 @@ public final class SwiftCommandOutputParser: TaskOutputParser { additionalOutput: [], interestingPath: onlyInput, workingDirectory: workingDirectory, - serializedDiagnosticsPaths: serializedDiagnosticsPaths) + serializedDiagnosticsPaths: serializedDiagnosticsPaths + ) subtasks[pid] = Subtask(pid: pid, serializedDiagnosticsPaths: serializedDiagnosticsPaths, delegate: subtaskDelegate) if subtaskName == .compile, !usingSwiftIntegratedDriver { @@ -998,7 +1003,7 @@ public final class SwiftCommandOutputParser: TaskOutputParser { public struct SwiftBlocklists: Sendable { - public struct ExplicitModulesInfo : ProjectFailuresBlockList, Codable, Sendable { + public struct ExplicitModulesInfo: ProjectFailuresBlockList, Codable, Sendable { let KnownFailures: [String] enum CodingKeys: String, CodingKey { @@ -1008,14 +1013,14 @@ public struct SwiftBlocklists: Sendable { var explicitModules: ExplicitModulesInfo? = nil - public struct InstallAPILazyTypecheckInfo : Codable, Sendable { + public struct InstallAPILazyTypecheckInfo: Codable, Sendable { /// A blocklist of module names that do not support the `SWIFT_INSTALLAPI_LAZY_TYPECHECK` build setting. let Modules: [String] } var installAPILazyTypecheck: InstallAPILazyTypecheckInfo? = nil - public struct CachingBlockList : ProjectFailuresBlockList, Codable, Sendable { + public struct CachingBlockList: ProjectFailuresBlockList, Codable, Sendable { let KnownFailures: [String] /// A blocklist of module names that do not support the `SWIFT_ENABLE_COMPILE_CACHE` build setting. @@ -1024,7 +1029,7 @@ public struct SwiftBlocklists: Sendable { var caching: CachingBlockList? = nil - public struct LanguageFeatureEnablementInfo : Codable, Sendable { + public struct LanguageFeatureEnablementInfo: Codable, Sendable { public struct Feature: Codable, Sendable { public enum DiagnosticLevel: String, Codable, Sendable { case ignore @@ -1136,7 +1141,7 @@ public struct SwiftMacroImplementationDescriptor: Hashable, Comparable, Sendable } } -public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDiscoveredCommandLineToolSpecInfo, @unchecked Sendable { +public final class SwiftCompilerSpec: CompilerSpec, SpecIdentifierType, SwiftDiscoveredCommandLineToolSpecInfo, @unchecked Sendable { @_spi(Testing) public static let parallelismLevel = ProcessInfo.processInfo.activeProcessorCount public static let identifier = "com.apple.xcode.tools.swift.compiler" @@ -1148,8 +1153,11 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi return true } - fileprivate func getABIBaselinePath(_ scope: MacroEvaluationScope, _ delegate: any TaskGenerationDelegate, - _ mode: SwiftCompilationMode) -> Path? { + fileprivate func getABIBaselinePath( + _ scope: MacroEvaluationScope, + _ delegate: any TaskGenerationDelegate, + _ mode: SwiftCompilationMode + ) -> Path? { switch mode { case .api, .prepareForIndex: return nil @@ -1398,14 +1406,13 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi cbc.scope.evaluate(BuiltinMacros.COMPILER_WORKING_DIRECTORY).nilIfEmpty.map { Path($0) } ?? cbc.producer.defaultWorkingDirectory } - private func getExplicitModuleBlocklist(_ producer: any CommandProducer, _ scope: MacroEvaluationScope, _ delegate: any TaskGenerationDelegate) async -> SwiftBlocklists.ExplicitModulesInfo? { + private func getExplicitModuleBlocklist(_ producer: any CommandProducer, _ scope: MacroEvaluationScope, _ delegate: any TaskGenerationDelegate) async -> SwiftBlocklists.ExplicitModulesInfo? { let specInfo = await (discoveredCommandLineToolSpecInfo(producer, scope, delegate) as? DiscoveredSwiftCompilerToolSpecInfo) return specInfo?.blocklists.explicitModules } func swiftExplicitModuleBuildEnabled(_ producer: any CommandProducer, _ scope: MacroEvaluationScope, _ delegate: any TaskGenerationDelegate) async -> Bool { - let buildSettingEnabled = scope.evaluate(BuiltinMacros.SWIFT_ENABLE_EXPLICIT_MODULES) == .enabled || - scope.evaluate(BuiltinMacros._EXPERIMENTAL_SWIFT_EXPLICIT_MODULES) == .enabled + let buildSettingEnabled = scope.evaluate(BuiltinMacros.SWIFT_ENABLE_EXPLICIT_MODULES) == .enabled || scope.evaluate(BuiltinMacros._EXPERIMENTAL_SWIFT_EXPLICIT_MODULES) == .enabled // If this project is on the blocklist, override the blocklist default enable for it if let explicitModuleBlocklist = await getExplicitModuleBlocklist(producer, scope, delegate), explicitModuleBlocklist.isProjectListed(scope) { @@ -1778,7 +1785,8 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi if cbc.scope.evaluate(BuiltinMacros.SWIFT_ENABLE_INCREMENTAL_COMPILATION) { args.append("-incremental") if LibSwiftDriver.supportsDriverFlag(spelled: "-incremental-dependency-scan"), - cbc.scope.evaluate(BuiltinMacros.SWIFT_ENABLE_INCREMENTAL_SCAN) { + cbc.scope.evaluate(BuiltinMacros.SWIFT_ENABLE_INCREMENTAL_SCAN) + { args.append("-incremental-dependency-scan") } } @@ -1800,8 +1808,7 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi } var indexObjectFileDir: Path? = nil - if toolSpecInfo.toolFeatures.has(.indexUnitOutputPathWithoutWarning) || - (toolSpecInfo.toolFeatures.has(.indexUnitOutputPath) && (args.contains("-index-store-path") || cbc.scope.evaluate(BuiltinMacros.INDEX_ENABLE_BUILD_ARENA))) { + if toolSpecInfo.toolFeatures.has(.indexUnitOutputPathWithoutWarning) || (toolSpecInfo.toolFeatures.has(.indexUnitOutputPath) && (args.contains("-index-store-path") || cbc.scope.evaluate(BuiltinMacros.INDEX_ENABLE_BUILD_ARENA))) { // Unlike CCompiler, the index unit path remapping is actually added to the output file map. So even though both *arguments* are ignored when determining tasks to re-run, the file itself is hashed and that will cause rebuilds. Thus, always add the output path if Swift is new enough to not generate a warning if it isn't used. let basePath = cbc.scope.evaluate(BuiltinMacros.OBJROOT) if let newPath = generateIndexOutputPath(from: objectFileDir, basePath: basePath) { @@ -1840,12 +1847,14 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi args.append(contentsOf: ["-module-cache-path", explicitDependencyOutputPath.str]) let moduleCacheDir = cbc.scope.evaluate(BuiltinMacros.MODULE_CACHE_DIR) if LibSwiftDriver.supportsDriverFlag(spelled: "-clang-scanner-module-cache-path"), - !moduleCacheDir.isEmpty { + !moduleCacheDir.isEmpty + { // Specify the Clang scanner cache separately as a shared cache among different projects args.append(contentsOf: ["-clang-scanner-module-cache-path", moduleCacheDir.str]) } if LibSwiftDriver.supportsDriverFlag(spelled: "-sdk-module-cache-path"), - !moduleCacheDir.isEmpty { + !moduleCacheDir.isEmpty + { args.append(contentsOf: ["-sdk-module-cache-path", moduleCacheDir.str]) } } @@ -1870,7 +1879,8 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi } // If the integrated cache queries is enabled, the remote service is handled by build system and no need to pass to compiler if !casOpts.enableIntegratedCacheQueries && casOpts.hasRemoteCache, - let remoteService = casOpts.remoteServicePath { + let remoteService = casOpts.remoteServicePath + { args += ["-cas-plugin-option", "remote-service-path=" + remoteService.str] } } @@ -1930,12 +1940,7 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi let sourceInfoPath = Path(moduleFilePath.withoutSuffix + ".swiftsourceinfo") moduleOutputPaths.append(sourceInfoPath) let usingLegacyDriver = cbc.scope.evaluate(BuiltinMacros.OTHER_SWIFT_FLAGS).contains("-disallow-use-new-driver") - let abiDescriptorPath: Path? = !usingLegacyDriver && - cbc.producer.isApplePlatform && - toolSpecInfo.toolFeatures.has(.emitABIDescriptor) && - compilationMode.canEmitABIDescriptor && - cbc.scope.evaluate(BuiltinMacros.SWIFT_INSTALL_MODULE_ABI_DESCRIPTOR) ? - Path(moduleFilePath.withoutSuffix + ".abi.json") : nil + let abiDescriptorPath: Path? = !usingLegacyDriver && cbc.producer.isApplePlatform && toolSpecInfo.toolFeatures.has(.emitABIDescriptor) && compilationMode.canEmitABIDescriptor && cbc.scope.evaluate(BuiltinMacros.SWIFT_INSTALL_MODULE_ABI_DESCRIPTOR) ? Path(moduleFilePath.withoutSuffix + ".abi.json") : nil if let abiDescriptorPath { moduleOutputPaths.append(abiDescriptorPath) } @@ -1961,8 +1966,9 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi let packageName = cbc.scope.evaluate(BuiltinMacros.SWIFT_PACKAGE_NAME) let emitPackageInterfacePath = "-emit-package-module-interface-path" if !packageName.isEmpty, - toolSpecInfo.toolFeatures.has(.emitPackageModuleInterfacePath), - LibSwiftDriver.supportsDriverFlag(spelled: emitPackageInterfacePath) { + toolSpecInfo.toolFeatures.has(.emitPackageModuleInterfacePath), + LibSwiftDriver.supportsDriverFlag(spelled: emitPackageInterfacePath) + { let path = Path(moduleFilePath.withoutSuffix + ".package.swiftinterface") packageModuleInterfaceFilePath = path args += [emitPackageInterfacePath, path.str] @@ -1971,8 +1977,7 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi packageModuleInterfaceFilePath = nil } } - } - else { + } else { moduleInterfaceFilePath = nil privateModuleInterfaceFilePath = nil packageModuleInterfaceFilePath = nil @@ -1985,19 +1990,22 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi let buildSessionFile = cbc.scope.evaluate(BuiltinMacros.CLANG_MODULES_BUILD_SESSION_FILE) if !buildSessionFile.isEmpty, - integratedDriverEnabled(scope: cbc.scope), - LibSwiftDriver.supportsDriverFlag(spelled: "-validate-clang-modules-once") && LibSwiftDriver.supportsDriverFlag(spelled: "-clang-build-session-file"), - cbc.scope.evaluate(BuiltinMacros.SWIFT_VALIDATE_CLANG_MODULES_ONCE_PER_BUILD_SESSION) { + integratedDriverEnabled(scope: cbc.scope), + LibSwiftDriver.supportsDriverFlag(spelled: "-validate-clang-modules-once") && LibSwiftDriver.supportsDriverFlag(spelled: "-clang-build-session-file"), + cbc.scope.evaluate(BuiltinMacros.SWIFT_VALIDATE_CLANG_MODULES_ONCE_PER_BUILD_SESSION) + { args += ["-validate-clang-modules-once", "-clang-build-session-file", buildSessionFile] } if toolSpecInfo.toolFeatures.has(.libraryLevel), - let libraryLevel = cbc.scope.evaluateAsString(BuiltinMacros.SWIFT_LIBRARY_LEVEL).nilIfEmpty { + let libraryLevel = cbc.scope.evaluateAsString(BuiltinMacros.SWIFT_LIBRARY_LEVEL).nilIfEmpty + { args += ["-library-level", libraryLevel] } if toolSpecInfo.toolFeatures.has(.packageName), - let packageName = cbc.scope.evaluate(BuiltinMacros.SWIFT_PACKAGE_NAME).nilIfEmpty { + let packageName = cbc.scope.evaluate(BuiltinMacros.SWIFT_PACKAGE_NAME).nilIfEmpty + { args += ["-package-name", packageName] } @@ -2016,8 +2024,7 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi // FIXME: Separate -I and its argument. args += ["-Xcc", "-I" + overridesHeadermapPath.str] } - } - else { + } else { objcHeaderFilePath = nil } @@ -2037,10 +2044,18 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi delegate.error(error) return } - cbc.producer.writeFileSpec.constructFileTasks(CommandBuildContext(producer: cbc.producer, scope: cbc.scope, inputs: [], output: protocolListPath), - delegate, contents: protocolListContents, permissions: nil, preparesForIndexing: true, additionalTaskOrderingOptions: [.immediate, .ignorePhaseOrdering]) - args += ["-Xfrontend", "-const-gather-protocols-file", - "-Xfrontend", protocolListPath.str] + cbc.producer.writeFileSpec.constructFileTasks( + CommandBuildContext(producer: cbc.producer, scope: cbc.scope, inputs: [], output: protocolListPath), + delegate, + contents: protocolListContents, + permissions: nil, + preparesForIndexing: true, + additionalTaskOrderingOptions: [.immediate, .ignorePhaseOrdering] + ) + args += [ + "-Xfrontend", "-const-gather-protocols-file", + "-Xfrontend", protocolListPath.str, + ] extraInputPaths.append(protocolListPath) } @@ -2097,14 +2112,16 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi let objcBridgingHeaderPath = Path(cbc.scope.evaluate(BuiltinMacros.SWIFT_OBJC_BRIDGING_HEADER)) if !objcBridgingHeaderPath.isEmpty { let objcBridgingHeaderNode = delegate.createNode(objcBridgingHeaderPath) - let flag = cbc.scope.evaluate(BuiltinMacros.SWIFT_BRIDGING_HEADER_IS_INTERNAL) + let flag = + cbc.scope.evaluate(BuiltinMacros.SWIFT_BRIDGING_HEADER_IS_INTERNAL) ? "-internal-import-bridging-header" : "-import-objc-header" args += [flag, objcBridgingHeaderNode.path.normalize().str] extraInputPaths.append(objcBridgingHeaderPath) let precompsPath = cbc.scope.evaluate(BuiltinMacros.SHARED_PRECOMPS_DIR) if !precompsPath.isEmpty, - !explicitModuleBuildEnabled { + !explicitModuleBuildEnabled + { args += ["-pch-output-dir", precompsPath.str] } } @@ -2141,7 +2158,7 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi "-Xfrontend", "-enable-implicit-dynamic", "-Xfrontend", "-enable-private-imports", "-Xfrontend", "-enable-dynamic-replacement-chaining", - "-Xfrontend", "-disable-previous-implementation-calls-in-dynamic-replacements" + "-Xfrontend", "-disable-previous-implementation-calls-in-dynamic-replacements", ]) } @@ -2188,8 +2205,14 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi inputs.append(input.absolutePath) // Compute and add the output object file path. - outputs.append(SwiftCompilerSpec.objectFileDirOutput(input: input, moduleBaseNameSuffix: compilationMode.moduleBaseNameSuffix, - objectFileDir: objectFileDir, fileExtension: ".\(outputObjectExtension)")) + outputs.append( + SwiftCompilerSpec.objectFileDirOutput( + input: input, + moduleBaseNameSuffix: compilationMode.moduleBaseNameSuffix, + objectFileDir: objectFileDir, + fileExtension: ".\(outputObjectExtension)" + ) + ) } return (inputs, outputs) }() @@ -2208,8 +2231,12 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi // Otherwise, there will be a const metadata file per-input (per-object-file-output) for input in cbc.inputs { // Compute and add the output supplementary const metadata file path. - let supplementaryConstMetadataOutputPath = SwiftCompilerSpec.objectFileDirOutput(input: input, moduleBaseNameSuffix: compilationMode.moduleBaseNameSuffix, - objectFileDir: objectFileDir, fileExtension: ".swiftconstvalues") + let supplementaryConstMetadataOutputPath = SwiftCompilerSpec.objectFileDirOutput( + input: input, + moduleBaseNameSuffix: compilationMode.moduleBaseNameSuffix, + objectFileDir: objectFileDir, + fileExtension: ".swiftconstvalues" + ) extraOutputPaths.append(supplementaryConstMetadataOutputPath) delegate.declareGeneratedSwiftConstMetadataFile(supplementaryConstMetadataOutputPath, architecture: arch) } @@ -2250,14 +2277,15 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi let additionalSignatureData = "SWIFTC: \(toolSpecInfo.swiftTag)" let environmentBindings = EnvironmentBindings(environment) - let indexingInputReplacements = Dictionary(uniqueKeysWithValues: cbc.inputs.compactMap { ftb -> (Path, Path)? in - if let repl = ftb.indexingInputReplacement { - return (ftb.absolutePath, repl) - } - else { - return nil + let indexingInputReplacements = Dictionary( + uniqueKeysWithValues: cbc.inputs.compactMap { ftb -> (Path, Path)? in + if let repl = ftb.indexingInputReplacement { + return (ftb.absolutePath, repl) + } else { + return nil + } } - }) + ) let dependencyInfoPath: Path? = await { guard await shouldEmitMakeStyleDependencies(cbc.producer, cbc.scope, delegate: delegate) else { @@ -2303,9 +2331,7 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi // 1. The platform must require it // 2. We must be compiling with debug info // 3. We must be emitting a module separately - if cbc.scope.evaluate(BuiltinMacros.PLATFORM_REQUIRES_SWIFT_MODULEWRAP) && - cbc.scope.evaluate(BuiltinMacros.GCC_GENERATE_DEBUGGING_SYMBOLS) && - emittingModuleSeparately { + if cbc.scope.evaluate(BuiltinMacros.PLATFORM_REQUIRES_SWIFT_MODULEWRAP) && cbc.scope.evaluate(BuiltinMacros.GCC_GENERATE_DEBUGGING_SYMBOLS) && emittingModuleSeparately { let moduleWrapOutput = Path(moduleFilePath.withoutSuffix + ".o") moduleOutputPaths.append(moduleWrapOutput) } @@ -2331,7 +2357,7 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi rule + [ variant, arch + compilationMode.moduleBaseNameSuffix, - self.identifier + self.identifier, ] } @@ -2346,7 +2372,7 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi cbc.scope.evaluate(BuiltinMacros.ASSETCATALOG_COMPILER_GENERATE_ASSET_SYMBOL_INDEX_PATH) ), objectFileDir: indexObjectFileDir ?? objectFileDir, - toolchains: cbc.producer.toolchains.map{ $0.identifier } + toolchains: cbc.producer.toolchains.map { $0.identifier } ), previewPayload: previewPayload, localizationPayload: localizationPayload, @@ -2470,13 +2496,13 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi // `fileSuffix` would only return .swiftinterface in this case. let privateModuleInterfaceSuffix = ".private.swiftinterface" if input.matchesFilenamePattern("*" + privateModuleInterfaceSuffix) { - inputFileSuffix = privateModuleInterfaceSuffix + inputFileSuffix = privateModuleInterfaceSuffix } // Check for .package.swiftinterface suffix of package module interfaces. let packageModuleInterfaceSuffix = ".package.swiftinterface" if input.matchesFilenamePattern("*" + packageModuleInterfaceSuffix) { - inputFileSuffix = packageModuleInterfaceSuffix + inputFileSuffix = packageModuleInterfaceSuffix } // Check for the longer suffix of abi descriptor. @@ -2552,13 +2578,13 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi let compilerLocation: LibSwiftDriver.CompilerLocation #if os(macOS) || !canImport(Darwin) - compilerLocation = .path(swiftc) + compilerLocation = .path(swiftc) #else - guard let libSwiftScanPath = cbc.producer.toolchains.map({ $0.path.join("usr/lib/swift/host/lib_InternalSwiftScan.dylib") }).first(where: { localFS.exists($0) }) else { - delegate.error("Could not find lib_InternalSwiftScan.dylib in toolchain") - return nil - } - compilerLocation = .library(libSwiftScanPath: libSwiftScanPath) + guard let libSwiftScanPath = cbc.producer.toolchains.map({ $0.path.join("usr/lib/swift/host/lib_InternalSwiftScan.dylib") }).first(where: { localFS.exists($0) }) else { + delegate.error("Could not find lib_InternalSwiftScan.dylib in toolchain") + return nil + } + compilerLocation = .library(libSwiftScanPath: libSwiftScanPath) #endif let explicitModuleBuildEnabled = await swiftExplicitModuleBuildEnabled(cbc.producer, cbc.scope, delegate) let verifyScannerDependencies = explicitModuleBuildEnabled && cbc.scope.evaluate(BuiltinMacros.SWIFT_DEPENDENCY_REGISTRATION_MODE) == .verifySwiftDependencyScanner @@ -2676,7 +2702,8 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi await constructSwiftCompilationTasks( compilationMode: .generateModule(triplePlatform: triplePlatform, tripleSuffix: tripleSuffix, moduleOnly: moduleOnly), inputMode: inputMode, - lookup: chainLookup) + lookup: chainLookup + ) } let hasEnabledIndexBuildArena = cbc.scope.evaluate(BuiltinMacros.INDEX_ENABLE_BUILD_ARENA) @@ -2708,7 +2735,8 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi case BuiltinMacros.DEPLOYMENT_TARGET_SETTING_NAME: if cbc.producer.sdkVariant?.isMacCatalyst == true { return cbc.scope.namespace.parseString( - BuildVersion.Platform.macCatalyst.deploymentTargetSettingName(infoLookup: cbc.producer)) + BuildVersion.Platform.macCatalyst.deploymentTargetSettingName(infoLookup: cbc.producer) + ) } return nil default: @@ -2727,7 +2755,8 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi await constructSwiftCompilationTasks( compilationMode: .generateModule(triplePlatform: triplePlatform, tripleSuffix: tripleSuffix, moduleOnly: true), inputMode: inputMode, - lookup: chainLookup) + lookup: chainLookup + ) } // Don't pass the module-only flag to the zippered variant if we passed it to the main variant above, because we don't want to create generated Objective-C headers from that task. It's unnecessary because zippering can't be distinguished at the API level, and would result in duplicate tasks creating the header file anyways. @@ -2747,8 +2776,7 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi return } compilationMode = .api - } - else { + } else { compilationMode = .compile } await constructSwiftCompilationTasks(compilationMode: compilationMode, inputMode: inputMode) @@ -2850,7 +2878,7 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi // Check if there should be an additional main symbol graph file for the zippered variant in this sub scope. if cbc.producer.platform?.familyName == "macOS", cbc.scope.evaluate(BuiltinMacros.IS_ZIPPERED), - let (_, triplePlatform, tripleSuffix) = zipperedSwiftModuleInfo(cbc.producer, arch: cbc.scope.evaluate(BuiltinMacros.CURRENT_ARCH)) + let (_, triplePlatform, tripleSuffix) = zipperedSwiftModuleInfo(cbc.producer, arch: cbc.scope.evaluate(BuiltinMacros.CURRENT_ARCH)) { paths.append(getMainSymbolGraphFile(cbc.scope, .generateModule(triplePlatform: triplePlatform, tripleSuffix: tripleSuffix, moduleOnly: false))) } @@ -2940,9 +2968,7 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi public static func shouldUseWholeModuleOptimization(for scope: MacroEvaluationScope) -> (result: Bool, isExplicitlyEnabled: Bool) { let isForAPI = scope.evaluate(BuiltinMacros.INSTALLAPI_MODE_ENABLED) let isExplicitlyEnabled = - scope.evaluate(BuiltinMacros.SWIFT_WHOLE_MODULE_OPTIMIZATION) || - (scope.evaluate(BuiltinMacros.SWIFT_COMPILATION_MODE) == "wholemodule") || - (scope.evaluate(BuiltinMacros.SWIFT_OPTIMIZATION_LEVEL) == "-Owholemodule") + scope.evaluate(BuiltinMacros.SWIFT_WHOLE_MODULE_OPTIMIZATION) || (scope.evaluate(BuiltinMacros.SWIFT_COMPILATION_MODE) == "wholemodule") || (scope.evaluate(BuiltinMacros.SWIFT_OPTIMIZATION_LEVEL) == "-Owholemodule") let isEnabled = isExplicitlyEnabled || isForAPI return (isEnabled, isExplicitlyEnabled) } @@ -2979,7 +3005,7 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi // Compute the executable path. let swiftc = swiftToolSpec.toolPath - var args: [[String]] = [] + var args: [[String]] = [] var inputPaths: [Path] = [] if !forTAPI { // TAPI can't use all of the additional linker options, and its spec has all of the build setting/option arguments that it can use. @@ -3001,17 +3027,27 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi if swiftLibraryPath.isEmpty { // Look next to the compiler and in the toolchains for one. if shouldStaticLinkStdlib { - swiftLibraryPath = findSearchPathForLibrary(executablePath: swiftc, possibleNames: [ - "swift_static/\(platformName)/lib\(swiftStdlibName).a", - "swift_static/lib\(swiftStdlibName).a", - "lib\(swiftStdlibName).a", - ], toolchains: producer.toolchains) ?? Path("") + swiftLibraryPath = + findSearchPathForLibrary( + executablePath: swiftc, + possibleNames: [ + "swift_static/\(platformName)/lib\(swiftStdlibName).a", + "swift_static/lib\(swiftStdlibName).a", + "lib\(swiftStdlibName).a", + ], + toolchains: producer.toolchains + ) ?? Path("") } else { - swiftLibraryPath = findSearchPathForLibrary(executablePath: swiftc, possibleNames: [ - "swift/\(platformName)/lib\(swiftStdlibName).\(dynamicLibraryExtension)", - "swift/lib\(swiftStdlibName).\(dynamicLibraryExtension)", - "lib\(swiftStdlibName).\(dynamicLibraryExtension)", - ], toolchains: producer.toolchains) ?? Path("") + swiftLibraryPath = + findSearchPathForLibrary( + executablePath: swiftc, + possibleNames: [ + "swift/\(platformName)/lib\(swiftStdlibName).\(dynamicLibraryExtension)", + "swift/lib\(swiftStdlibName).\(dynamicLibraryExtension)", + "lib\(swiftStdlibName).\(dynamicLibraryExtension)", + ], + toolchains: producer.toolchains + ) ?? Path("") } } @@ -3103,8 +3139,13 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi } private static func objectFileDirOutput(input: FileToBuild, moduleBaseNameSuffix: String, objectFileDir: Path, fileExtension: String) -> Path { - return objectFileDirOutput(inputPath: input.absolutePath, moduleBaseNameSuffix: moduleBaseNameSuffix, - uniquingSuffix: input.uniquingSuffix, objectFileDir: objectFileDir, fileExtension: fileExtension) + return objectFileDirOutput( + inputPath: input.absolutePath, + moduleBaseNameSuffix: moduleBaseNameSuffix, + uniquingSuffix: input.uniquingSuffix, + objectFileDir: objectFileDir, + fileExtension: fileExtension + ) } /// Generate the Swift output file map. @@ -3137,7 +3178,7 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi return (objectFilePath, fileMapEntry) } - // Add entries to the map indicating where to find the files the compiler generates. + // Add entries to the map indicating where to find the files the compiler generates. if !isUsingWholeModuleOptimization { // If we're not using WMO at all, then we produce an entry in the output file map for each file. for input in cbc.inputs { @@ -3201,8 +3242,7 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi // Add the global entry to the map. mapDict[""] = fileMapEntry } - } - else { + } else { // If we are using WMO, then we still generate entries for each file, but several files move to the global map since the source files aren't processed individually. for input in cbc.inputs { mapDict[input.absolutePath.str] = createCommonFileEntry(input: input).fileMapEntry @@ -3225,7 +3265,6 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi let emitModuleDependenciesFilePath = objectFileDir.join(primarySwiftBaseName + "-emit-module.d") fileMapEntry.emitModuleDependencies = emitModuleDependenciesFilePath.str - // The dependencies file, used to discover implicit dependencies. This file will be in Makefile format. let dependenciesFilePath = objectFileDir.join(primarySwiftBaseName + ".d") fileMapEntry.dependencies = dependenciesFilePath.str @@ -3280,8 +3319,13 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi let inputReplacementPath = payload.indexingPayload.inputReplacements[inputPath] ?? inputPath guard input.requestedSourceFiles.contains(inputReplacementPath) else { return nil } // FIXME: Getting the right uniquingSuffix requires having a FileToBuild, which we don't have here. I'm not sure whether ExecutableTask has enough information to be able to get the correct path to the output file when there are multiple input files with the same base name. - let outputFile = SwiftCompilerSpec.objectFileDirOutput(inputPath: inputPath, moduleBaseNameSuffix: "", uniquingSuffix: "", - objectFileDir: payload.indexingPayload.objectFileDir, fileExtension: ".o") + let outputFile = SwiftCompilerSpec.objectFileDirOutput( + inputPath: inputPath, + moduleBaseNameSuffix: "", + uniquingSuffix: "", + objectFileDir: payload.indexingPayload.objectFileDir, + fileExtension: ".o" + ) let indexingInfo: any SourceFileIndexingInfo if input.outputPathOnly { indexingInfo = OutputPathIndexingInfo(outputFile: outputFile, language: .swift) @@ -3405,7 +3449,7 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi "-explicit-module-build", // Strip until builder SDKs include a swift-driver with this flag. Do not remove without also removing -clang-build-session-file. - "-validate-clang-modules-once" + "-validate-clang-modules-once", ] { while let index = commandLine.firstIndex(of: arg) { commandLine.remove(at: index) @@ -3418,7 +3462,7 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi let argPrefix = "-Xfrontend" while let index = commandLine.firstIndex(of: arg) { guard index > 0, commandLine[index - 1] == argPrefix else { break } - commandLine.removeSubrange(index - 1 ... index) + commandLine.removeSubrange(index - 1...index) } } @@ -3430,7 +3474,7 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi "-enable-implicit-dynamic", "-enable-dynamic-replacement-chaining", "-enable-private-imports", - "-disable-previous-implementation-calls-in-dynamic-replacements" + "-disable-previous-implementation-calls-in-dynamic-replacements", ] { removeWithPrefix(arg) } @@ -3440,7 +3484,7 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi func removeWithParameter(_ arg: String) { while let index = commandLine.firstIndex(of: arg) { guard index + 1 < commandLine.count else { break } - commandLine.removeSubrange(index ... index + 1) + commandLine.removeSubrange(index...index + 1) } } for arg in [ @@ -3494,12 +3538,12 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi let argPrefix = "-Xfrontend" while let index = commandLine.firstIndex(of: arg) { guard index > 0, commandLine[index - 1] == argPrefix else { break } - commandLine.removeSubrange(index - 1 ... index + 2) + commandLine.removeSubrange(index - 1...index + 2) } } for arg in [ // Stripped because they emit sidecar data that Previews does not need. - "-const-gather-protocols-file", + "-const-gather-protocols-file" ] { removeWithPrefixAndParameter(arg) } @@ -3534,8 +3578,7 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi } else { newVFSOverlayPath = nil } - } - else { + } else { selectedInputPath = inputPath newVFSOverlayPath = nil commandLine.append(contentsOf: [inputPath.str]) @@ -3558,7 +3601,7 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi commandLine.append(contentsOf: [ "-Xfrontend", "-disable-modules-validate-system-headers", - ]) + ]) } // For XOJIT previews, we want the frontend (`swift-frontend`) invocation rather than the driver (`swiftc`) invocation, so ask libSwiftDriver for it and replace the command line with the result for propagation back to the request. @@ -3610,9 +3653,9 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi // The driver may have emitted an error even if it returned us a command line. In this case, don't return the command line since it likely won't work. if commandLine.isEmpty || outputDelegate.engine.hasErrors { #if canImport(os) - for diagnostic in outputDelegate.engine.diagnostics.filter({ $0.behavior == .error }) { - OSLog.log("Swift driver preview info error: \(diagnostic.data.description)") - } + for diagnostic in outputDelegate.engine.diagnostics.filter({ $0.behavior == .error }) { + OSLog.log("Swift driver preview info error: \(diagnostic.data.description)") + } #endif return [] } @@ -3643,19 +3686,21 @@ public final class SwiftCompilerSpec : CompilerSpec, SpecIdentifierType, SwiftDi return [] } - return [TaskGenerateLocalizationInfoOutput(producedStringsdataPaths: [ - LocalizationBuildPortion(effectivePlatformName: localizationPayload.effectivePlatformName, variant: localizationPayload.buildVariant, architecture: localizationPayload.architecture): stringsdataPaths - ])] + return [ + TaskGenerateLocalizationInfoOutput(producedStringsdataPaths: [ + LocalizationBuildPortion(effectivePlatformName: localizationPayload.effectivePlatformName, variant: localizationPayload.buildVariant, architecture: localizationPayload.architecture): stringsdataPaths + ]) + ] } /// Define the custom output parser. public override func customOutputParserType(for task: any ExecutableTask) -> (any TaskOutputParser.Type)? { switch task.ruleInfo.first { case "CompileSwiftSources", - "GenerateSwiftModule": - return SwiftCommandOutputParser.self + "GenerateSwiftModule": + return SwiftCommandOutputParser.self default: - return nil + return nil } } @@ -3687,75 +3732,79 @@ extension SwiftCompilerSpec { /// /// This is global and public because it is used by `SWBTaskExecution` and `CoreBasedTests`, which is the basis of many of our tests (so caching this info across tests is desirable), and which is used in some performance tests. If we discover that the info for a compiler at a given path can change during an instance of Swift Build (e.g., if a downloadable toolchain can replace an existing compiler) then this may need to be revisited. public func discoveredSwiftCompilerInfo(_ producer: any CommandProducer, _ delegate: any CoreClientTargetDiagnosticProducingDelegate, at toolPath: Path, blocklistsPathOverride: Path?) async throws -> DiscoveredSwiftCompilerToolSpecInfo { - try await producer.discoveredCommandLineToolSpecInfo(delegate, nil, [toolPath.str, "--version"], { executionResult in - let outputString = String(decoding: executionResult.stdout, as: UTF8.self) - - // Values we will parse. If we end up not parsing any values, then we return an empty info struct. - var swiftVersion: Version? = nil - var swiftTag: String? = nil - var swiftABIVersion: String? = nil - - let versionRegex = #/Swift version (?[\d.]+).*\((?.*)\)/# - let abiVersionRegex = #/ABI version: (?[\d.]+)/# - - // Iterate over each line and add any discovered info to the info object. - for line in outputString.components(separatedBy: "\n") { - if swiftVersion == nil { - if let groups = try versionRegex.firstMatch(in: line) { - swiftVersion = try? Version(String(groups.output.swiftVersion)) - swiftTag = String(groups.output.swiftTag) + try await producer.discoveredCommandLineToolSpecInfo( + delegate, + nil, + [toolPath.str, "--version"], + { executionResult in + let outputString = String(decoding: executionResult.stdout, as: UTF8.self) + + // Values we will parse. If we end up not parsing any values, then we return an empty info struct. + var swiftVersion: Version? = nil + var swiftTag: String? = nil + var swiftABIVersion: String? = nil + + let versionRegex = #/Swift version (?[\d.]+).*\((?.*)\)/# + let abiVersionRegex = #/ABI version: (?[\d.]+)/# + + // Iterate over each line and add any discovered info to the info object. + for line in outputString.components(separatedBy: "\n") { + if swiftVersion == nil { + if let groups = try versionRegex.firstMatch(in: line) { + swiftVersion = try? Version(String(groups.output.swiftVersion)) + swiftTag = String(groups.output.swiftTag) + } } - } - if swiftABIVersion == nil { - if let groups = try abiVersionRegex.firstMatch(in: line) { - swiftABIVersion = groups.output.abiVersion.trimmingCharacters(in: .whitespaces) + if swiftABIVersion == nil { + if let groups = try abiVersionRegex.firstMatch(in: line) { + swiftABIVersion = groups.output.abiVersion.trimmingCharacters(in: .whitespaces) + } } } - } - guard let swiftVersion, let swiftTag else { - throw StubError.error("Could not parse Swift versions from: \(outputString)") - } + guard let swiftVersion, let swiftTag else { + throw StubError.error("Could not parse Swift versions from: \(outputString)") + } - func getFeatures(at toolPath: Path) -> ToolFeatures { - let featuresPath = toolPath.dirname.dirname.join("share").join("swift").join("features.json") - do { - let features: ToolFeatures = try .init(path: featuresPath, fs: localFS) - if features.has(.experimentalAllowModuleWithCompilerErrors) { - // FIXME: Need to add this flag into Swift's features.json - return .init(features.flags.union([.vfsDirectoryRemap])) + func getFeatures(at toolPath: Path) -> ToolFeatures { + let featuresPath = toolPath.dirname.dirname.join("share").join("swift").join("features.json") + do { + let features: ToolFeatures = try .init(path: featuresPath, fs: localFS) + if features.has(.experimentalAllowModuleWithCompilerErrors) { + // FIXME: Need to add this flag into Swift's features.json + return .init(features.flags.union([.vfsDirectoryRemap])) + } + return features + } catch { + // FIXME: Consider about reporting this as error, lest users silently get surprising behavior if we fail to read the features file for any reason. + return ToolFeatures.none } - return features - } catch { - // FIXME: Consider about reporting this as error, lest users silently get surprising behavior if we fail to read the features file for any reason. - return ToolFeatures.none } - } - let blocklistPaths = CompilerSpec.findToolchainBlocklists(producer, directoryOverride: blocklistsPathOverride) + let blocklistPaths = CompilerSpec.findToolchainBlocklists(producer, directoryOverride: blocklistsPathOverride) - func getBlocklist(type: T.Type, toolchainFilename: String, delegate: any TargetDiagnosticProducingDelegate) -> T? { - return CompilerSpec.getBlocklist( - type: type, - toolchainFilename: toolchainFilename, - blocklistPaths: blocklistPaths, - fs: localFS, - delegate: delegate - ) - } + func getBlocklist(type: T.Type, toolchainFilename: String, delegate: any TargetDiagnosticProducingDelegate) -> T? { + return CompilerSpec.getBlocklist( + type: type, + toolchainFilename: toolchainFilename, + blocklistPaths: blocklistPaths, + fs: localFS, + delegate: delegate + ) + } - var blocklists = SwiftBlocklists() - blocklists.explicitModules = getBlocklist(type: SwiftBlocklists.ExplicitModulesInfo.self, toolchainFilename: "swift-explicit-modules.json", delegate: delegate) - blocklists.installAPILazyTypecheck = getBlocklist(type: SwiftBlocklists.InstallAPILazyTypecheckInfo.self, toolchainFilename: "swift-lazy-installapi.json", delegate: delegate) - blocklists.caching = getBlocklist(type: SwiftBlocklists.CachingBlockList.self, toolchainFilename: "swift-caching.json", delegate: delegate) - blocklists.languageFeatureEnablement = getBlocklist(type: SwiftBlocklists.LanguageFeatureEnablementInfo.self, toolchainFilename: "swift-language-feature-enablement.json", delegate: delegate) - return DiscoveredSwiftCompilerToolSpecInfo(toolPath: toolPath, swiftVersion: swiftVersion, swiftTag: swiftTag, swiftABIVersion: swiftABIVersion, blocklists: blocklists, toolFeatures: getFeatures(at: toolPath)) - }) + var blocklists = SwiftBlocklists() + blocklists.explicitModules = getBlocklist(type: SwiftBlocklists.ExplicitModulesInfo.self, toolchainFilename: "swift-explicit-modules.json", delegate: delegate) + blocklists.installAPILazyTypecheck = getBlocklist(type: SwiftBlocklists.InstallAPILazyTypecheckInfo.self, toolchainFilename: "swift-lazy-installapi.json", delegate: delegate) + blocklists.caching = getBlocklist(type: SwiftBlocklists.CachingBlockList.self, toolchainFilename: "swift-caching.json", delegate: delegate) + blocklists.languageFeatureEnablement = getBlocklist(type: SwiftBlocklists.LanguageFeatureEnablementInfo.self, toolchainFilename: "swift-language-feature-enablement.json", delegate: delegate) + return DiscoveredSwiftCompilerToolSpecInfo(toolPath: toolPath, swiftVersion: swiftVersion, swiftTag: swiftTag, swiftABIVersion: swiftABIVersion, blocklists: blocklists, toolFeatures: getFeatures(at: toolPath)) + } + ) } extension SwiftCompilerSpec: GCCCompatibleCompilerCommandLineBuilder { - package func searchPathArguments(_ entry: SearchPathEntry, _ scope: MacroEvaluationScope) -> [String] - { + package func searchPathArguments(_ entry: SearchPathEntry, _ scope: MacroEvaluationScope) -> [String] { var args = [String]() switch entry { @@ -3769,7 +3818,7 @@ extension SwiftCompilerSpec: GCCCompatibleCompilerCommandLineBuilder { args.append(contentsOf: ["-isystem", path.str]) case .headerSearchPathSplitter: - args.append(contentsOf: ["-I-"]) // states that clang has never supported this option. + args.append(contentsOf: ["-I-"]) // states that clang has never supported this option. case .frameworkSearchPath(let path, let separateArgs): args.append(contentsOf: separateArgs ? ["-F", path.str] : ["-F" + path.str]) @@ -3779,8 +3828,7 @@ extension SwiftCompilerSpec: GCCCompatibleCompilerCommandLineBuilder { // We need to use -Fsystem for the public iOSSupport directories (Frameworks, SubFrameworks), so we special-case doing so. c.f. We *don't* pass -Fsystem to the equivalent PrivateFrameworks directory, for reasons described in . if path.ends(with: "System/iOSSupport/System/Library/Frameworks") || path.ends(with: "System/iOSSupport/System/Library/SubFrameworks") || scope.evaluate(BuiltinMacros.SYSTEM_FRAMEWORK_SEARCH_PATHS_USE_FSYSTEM) { args.append(contentsOf: ["-Fsystem", path.str]) - } - else { + } else { args.append(contentsOf: ["-F", path.str]) } @@ -3823,8 +3871,9 @@ public extension BuildPhaseWithBuildFiles { return buildFiles.contains { buildFile -> Bool in // We only need to consider file references. guard case let .reference(guid) = buildFile.buildableItem, - let reference = referenceLookupContext.lookupReference(for: guid), - let fileRef = reference as? FileReference else { return false } + let reference = referenceLookupContext.lookupReference(for: guid), + let fileRef = reference as? FileReference + else { return false } let path = filePathResolver.resolveAbsolutePath(fileRef) guard !filteringContext.isExcluded(path, filters: buildFile.platformFilters) else { return false } @@ -3914,10 +3963,13 @@ extension SwiftDiscoveredCommandLineToolSpecInfo { let compilerFileName = producer.hostOperatingSystem.imageFormat.executableName(basename: "swiftc") // Get the path to the compiler. - let path = scope.evaluate(BuiltinMacros.SWIFT_TOOLS_DIR).nilIfEmpty.map(Path.init)?.join(compilerFileName) - ?? scope.evaluate(BuiltinMacros.SWIFT_EXEC).nilIfEmpty.map({ $0.isAbsolute - ? $0 - : Path(producer.hostOperatingSystem.imageFormat.executableName(basename: $0.str)) }) + let path = + scope.evaluate(BuiltinMacros.SWIFT_TOOLS_DIR).nilIfEmpty.map(Path.init)?.join(compilerFileName) + ?? scope.evaluate(BuiltinMacros.SWIFT_EXEC).nilIfEmpty.map({ + $0.isAbsolute + ? $0 + : Path(producer.hostOperatingSystem.imageFormat.executableName(basename: $0.str)) + }) ?? Path(compilerFileName) let userSpecifiedBlocklists = scope.evaluate(BuiltinMacros.BLOCKLISTS_PATH).nilIfEmpty.map { Path($0) } let toolPath = self.resolveExecutablePath(producer, path) diff --git a/Sources/SWBCore/SpecImplementations/Tools/SwiftHeaderTool.swift b/Sources/SWBCore/SpecImplementations/Tools/SwiftHeaderTool.swift index df6c840b..f0b67fde 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/SwiftHeaderTool.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/SwiftHeaderTool.swift @@ -13,7 +13,7 @@ public import SWBUtil public import SWBMacro -public final class SwiftHeaderToolSpec : CommandLineToolSpec, SpecImplementationType, @unchecked Sendable { +public final class SwiftHeaderToolSpec: CommandLineToolSpec, SpecImplementationType, @unchecked Sendable { public static let identifier = "com.apple.build-tools.swift-header-tool" public override func resolveExecutionDescription(_ cbc: CommandBuildContext, _ delegate: any DiagnosticProducingDelegate, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> String { diff --git a/Sources/SWBCore/SpecImplementations/Tools/SwiftStdLibTool.swift b/Sources/SWBCore/SpecImplementations/Tools/SwiftStdLibTool.swift index 985a6988..f7edf365 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/SwiftStdLibTool.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/SwiftStdLibTool.swift @@ -13,7 +13,7 @@ import SWBUtil public import SWBMacro -public final class SwiftStdLibToolSpec : GenericCommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { +public final class SwiftStdLibToolSpec: GenericCommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier = "com.apple.build-tools.swift-stdlib-tool" override public func constructTasks(_ cbc: CommandBuildContext, _ delegate: any TaskGenerationDelegate) async { @@ -22,7 +22,7 @@ public final class SwiftStdLibToolSpec : GenericCommandLineToolSpec, SpecIdentif } /// Construct a new task to run the Swift standard library tool. - public func constructSwiftStdLibraryToolTask(_ cbc:CommandBuildContext, _ delegate: any TaskGenerationDelegate, foldersToScan: MacroStringListExpression?, filterForSwiftOS: Bool, backDeploySwiftConcurrency: Bool, backDeploySwiftSpan: Bool) async { + public func constructSwiftStdLibraryToolTask(_ cbc: CommandBuildContext, _ delegate: any TaskGenerationDelegate, foldersToScan: MacroStringListExpression?, filterForSwiftOS: Bool, backDeploySwiftConcurrency: Bool, backDeploySwiftSpan: Bool) async { precondition(cbc.outputs.isEmpty, "Unexpected output paths \(cbc.outputs.map { "'\($0.str)'" }) passed to \(type(of: self)).") let input = cbc.input @@ -32,19 +32,19 @@ public final class SwiftStdLibToolSpec : GenericCommandLineToolSpec, SpecIdentif // Create a lookup closure for build setting overrides. let lookup: ((MacroDeclaration) -> MacroExpression?) = - { macro in - switch macro - { - case BuiltinMacros.SWIFT_STDLIB_TOOL_FOLDERS_TO_SCAN: - return foldersToScan - - case BuiltinMacros.OutputPath, BuiltinMacros.OutputFile: - return wrapperPathMacroExpression - - default: - return nil + { macro in + switch macro + { + case BuiltinMacros.SWIFT_STDLIB_TOOL_FOLDERS_TO_SCAN: + return foldersToScan + + case BuiltinMacros.OutputPath, BuiltinMacros.OutputFile: + return wrapperPathMacroExpression + + default: + return nil + } } - } // Compute the rule info. let ruleInfo = defaultRuleInfo(cbc, delegate, lookup: lookup) @@ -92,6 +92,6 @@ public final class SwiftStdLibToolSpec : GenericCommandLineToolSpec, SpecIdentif let outputs = [delegate.createVirtualNode("CopySwiftStdlib \(wrapperPathString.str)")] - delegate.createTask(type: self, dependencyData: .dependencyInfo(dependencyInfoFilePath), ruleInfo: ruleInfo, commandLine: commandLine, environment: EnvironmentBindings(environment.map { ($0, $1) }), workingDirectory: cbc.producer.defaultWorkingDirectory, inputs: [ delegate.createNode(input.absolutePath) ], outputs: outputs, mustPrecede: [], action: action, execDescription: resolveExecutionDescription(cbc, delegate, lookup: lookup), enableSandboxing: enableSandboxing) + delegate.createTask(type: self, dependencyData: .dependencyInfo(dependencyInfoFilePath), ruleInfo: ruleInfo, commandLine: commandLine, environment: EnvironmentBindings(environment.map { ($0, $1) }), workingDirectory: cbc.producer.defaultWorkingDirectory, inputs: [delegate.createNode(input.absolutePath)], outputs: outputs, mustPrecede: [], action: action, execDescription: resolveExecutionDescription(cbc, delegate, lookup: lookup), enableSandboxing: enableSandboxing) } } diff --git a/Sources/SWBCore/SpecImplementations/Tools/SwiftSymbolExtractor.swift b/Sources/SWBCore/SpecImplementations/Tools/SwiftSymbolExtractor.swift index 8692342e..436dc60d 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/SwiftSymbolExtractor.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/SwiftSymbolExtractor.swift @@ -91,20 +91,20 @@ final class SwiftSymbolExtractor: GenericCompilerSpec, GCCCompatibleCompilerComm let userHeaderSearchPaths = cbc.scope.evaluate(BuiltinMacros.SYMBOL_GRAPH_EXTRACTOR_SEARCH_PATHS).map { return "-I" + $0 } - let defaultFrameworkSearchPaths = frameworkSearchPaths.searchPathArguments(for: self, scope:cbc.scope) + sparseSDKSearchPaths.searchPathArguments(for: self, scope: cbc.scope) + let defaultFrameworkSearchPaths = frameworkSearchPaths.searchPathArguments(for: self, scope: cbc.scope) + sparseSDKSearchPaths.searchPathArguments(for: self, scope: cbc.scope) // swift-symbolgraph-extract doesn't expect the `-iquote`, `-isystem`, or `-iframework` flags so we map those to `-I` and `-Fsystem` instead. let allSearchPaths = (defaultHeaderSearchPaths + userHeaderSearchPaths + defaultFrameworkSearchPaths) .map { (argument: String) -> String in - switch argument { - case "-iquote", "-isystem": - return "-I" - case "-iframework": - return "-Fsystem" - default: - return argument + switch argument { + case "-iquote", "-isystem": + return "-I" + case "-iframework": + return "-Fsystem" + default: + return argument + } } - } return cbc.scope.namespace.parseLiteralStringList(allSearchPaths) diff --git a/Sources/SWBCore/SpecImplementations/Tools/SymlinkTool.swift b/Sources/SWBCore/SpecImplementations/Tools/SymlinkTool.swift index 0d889a54..10838f8a 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/SymlinkTool.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/SymlinkTool.swift @@ -12,7 +12,7 @@ public import SWBUtil -public final class SymlinkToolSpec : CommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { +public final class SymlinkToolSpec: CommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier = "com.apple.tools.symlink" override public func constructTasks(_ cbc: CommandBuildContext, _ delegate: any TaskGenerationDelegate) async { @@ -42,10 +42,14 @@ public final class SymlinkToolSpec : CommandLineToolSpec, SpecIdentifierType, @u let commandLine = ["/bin/ln", "-sfh", toPath, outputPath.str] delegate.createTask( - type: self, ruleInfo: ["SymLink", outputPath.str, toPath], - commandLine: commandLine, environment: environmentFromSpec(cbc, delegate), + type: self, + ruleInfo: ["SymLink", outputPath.str, toPath], + commandLine: commandLine, + environment: environmentFromSpec(cbc, delegate), workingDirectory: cbc.producer.defaultWorkingDirectory, - inputs: [], outputs: [ outputPath ], action: nil, + inputs: [], + outputs: [outputPath], + action: nil, execDescription: resolveExecutionDescription(cbc, delegate), preparesForIndexing: cbc.preparesForIndexing, enableSandboxing: enableSandboxing, diff --git a/Sources/SWBCore/SpecImplementations/Tools/TAPISymbolExtractor.swift b/Sources/SWBCore/SpecImplementations/Tools/TAPISymbolExtractor.swift index 6ed9acc8..7e3043c1 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/TAPISymbolExtractor.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/TAPISymbolExtractor.swift @@ -26,7 +26,7 @@ final public class TAPISymbolExtractor: GenericCompilerSpec, GCCCompatibleCompil /// - cbc: The command build context /// - clangCompilerInfo: Optional information about the installed copy of clang. Pass nil if clang is not being used. /// - static public func shouldConstructSymbolExtractionTask(_ cbc: CommandBuildContext, clangCompilerInfo: (any DiscoveredCommandLineToolSpecInfo)? ) async -> Bool { + static public func shouldConstructSymbolExtractionTask(_ cbc: CommandBuildContext, clangCompilerInfo: (any DiscoveredCommandLineToolSpecInfo)?) async -> Bool { let (canGenerateCXXTasks, hasPlusPlusHeaders) = await (canGenerateCXXTasks(cbc), hasPlusPlusHeaders(cbc)) return ((supportsPlusPlus(cbc: cbc, clangCompilerInfo: clangCompilerInfo) && canGenerateCXXTasks) || !hasPlusPlusHeaders) && DocumentationCompilerSpec.shouldConstructSymbolGenerationTask(cbc) } @@ -98,9 +98,9 @@ final public class TAPISymbolExtractor: GenericCompilerSpec, GCCCompatibleCompil } return predicate(foundFileType) }) - || headers.publicHeaders.contains(where: { predicate(cbc.producer.lookupFileType(identifier: $0.fileTypeIdentifier)) }) - || headers.privateHeaders.contains(where: { predicate(cbc.producer.lookupFileType(identifier: $0.fileTypeIdentifier)) }) - || headers.projectHeaders.contains(where: { predicate(cbc.producer.lookupFileType(identifier: $0.fileTypeIdentifier)) }) + || headers.publicHeaders.contains(where: { predicate(cbc.producer.lookupFileType(identifier: $0.fileTypeIdentifier)) }) + || headers.privateHeaders.contains(where: { predicate(cbc.producer.lookupFileType(identifier: $0.fileTypeIdentifier)) }) + || headers.projectHeaders.contains(where: { predicate(cbc.producer.lookupFileType(identifier: $0.fileTypeIdentifier)) }) } /// A list of headers to consider for documentation @@ -149,9 +149,9 @@ final public class TAPISymbolExtractor: GenericCompilerSpec, GCCCompatibleCompil for buildFile in target.headersBuildPhase?.buildFiles ?? [] { guard headerVisibilityToProcess.contains(buildFile.headerVisibility), - case let .reference(guid) = buildFile.buildableItem, - let fileRef = cbc.producer.lookupReference(for: guid) as? FileReference, - let path = fileRef.path.asLiteralString + case let .reference(guid) = buildFile.buildableItem, + let fileRef = cbc.producer.lookupReference(for: guid) as? FileReference, + let path = fileRef.path.asLiteralString else { continue } fileReferencePlatformFilters[fileRef] = buildFile.platformFilters @@ -174,9 +174,9 @@ final public class TAPISymbolExtractor: GenericCompilerSpec, GCCCompatibleCompil // The developer has opted in to build multi-language documentation. return SwiftCompilerSpec.generatedObjectiveCHeaderOutputPath(cbc.scope) } - guard willProcessAnyHeaders, // Only process the Swift interface header if some other headers are also processed ... - // ... and if the target has Swift code that can end up in the Swift interface header. - target.sourcesBuildPhase?.containsSwiftSources(cbc.producer, cbc.producer, cbc.scope, cbc.producer.filePathResolver) == true + guard willProcessAnyHeaders, // Only process the Swift interface header if some other headers are also processed ... + // ... and if the target has Swift code that can end up in the Swift interface header. + target.sourcesBuildPhase?.containsSwiftSources(cbc.producer, cbc.producer, cbc.scope, cbc.producer.filePathResolver) == true else { return nil } @@ -208,7 +208,7 @@ final public class TAPISymbolExtractor: GenericCompilerSpec, GCCCompatibleCompil // // If this target is an executable, continue with a heuristic for finding what headers to extract symbol information from for documentation. guard case .executable = DocumentationCompilerSpec.DocumentationType(from: cbc), - let cFamilySourceFileType = cbc.producer.lookupFileType(identifier: "sourcecode.c") + let cFamilySourceFileType = cbc.producer.lookupFileType(identifier: "sourcecode.c") else { // Otherwise, if it's not an executable, don't look for headers to process for documentation. return .init(publicHeaders: [], privateHeaders: [], projectHeaders: [], generatedSwiftHeader: generatedSwiftHeaderPath(willProcessAnyHeaders: false), headerBuildFiles: []) @@ -226,9 +226,9 @@ final public class TAPISymbolExtractor: GenericCompilerSpec, GCCCompatibleCompil for file in target.sourcesBuildPhase?.buildFiles ?? [] { guard case let .reference(guid) = file.buildableItem, - let fileRef = cbc.producer.lookupReference(for: guid) as? FileReference, - let fileType = cbc.producer.lookupFileType(identifier: fileRef.fileTypeIdentifier), - fileType.conformsTo(cFamilySourceFileType) + let fileRef = cbc.producer.lookupReference(for: guid) as? FileReference, + let fileType = cbc.producer.lookupFileType(identifier: fileRef.fileTypeIdentifier), + fileType.conformsTo(cFamilySourceFileType) else { continue } let path = cbc.producer.filePathResolver.resolveAbsolutePath(fileRef) @@ -277,12 +277,12 @@ final public class TAPISymbolExtractor: GenericCompilerSpec, GCCCompatibleCompil var commandLine = [ clangPath.str, - "-extract-api" + "-extract-api", ] - if let compatibilitySymbolsPath = swiftCompilerInfo?.toolPath.dirname.dirname.join("share").join("swift").join("compatibility-symbols"), let ignoresFlagAvailable = clangCompilerInfo?.hasFeature("extract-api-ignores"), - localFS.exists(compatibilitySymbolsPath) && ignoresFlagAvailable { + localFS.exists(compatibilitySymbolsPath) && ignoresFlagAvailable + { commandLine.append("--extract-api-ignores=\(compatibilitySymbolsPath.str)") } @@ -302,7 +302,7 @@ final public class TAPISymbolExtractor: GenericCompilerSpec, GCCCompatibleCompil let userHeaderSearchPaths = cbc.scope.evaluate(BuiltinMacros.TAPI_EXTRACT_API_SEARCH_PATHS).map { return "-I" + $0 } - let defaultFrameworkSearchPaths = frameworkSearchPaths.searchPathArguments(for: self, scope:cbc.scope) + sparseSDKSearchPaths.searchPathArguments(for: self, scope: cbc.scope) + let defaultFrameworkSearchPaths = frameworkSearchPaths.searchPathArguments(for: self, scope: cbc.scope) + sparseSDKSearchPaths.searchPathArguments(for: self, scope: cbc.scope) let moduleMapSearchPaths = dependenciesModuleMaps.map { "-fmodule-map-file=\($0.str)" } @@ -342,7 +342,8 @@ final public class TAPISymbolExtractor: GenericCompilerSpec, GCCCompatibleCompil } else { commandLine += headerList.compactMap { if let language = $0.language, - GCCCompatibleLanguageDialect(dialectName: language).isPlusPlus { + GCCCompatibleLanguageDialect(dialectName: language).isPlusPlus + { return nil } return $0.path.str @@ -370,8 +371,9 @@ final public class TAPISymbolExtractor: GenericCompilerSpec, GCCCompatibleCompil return } - let inputs = cbc.inputs.map({ delegate.createNode($0.absolutePath) }) as [PlannedPathNode] - + cbc.commandOrderingInputs + let inputs = + cbc.inputs.map({ delegate.createNode($0.absolutePath) }) as [PlannedPathNode] + + cbc.commandOrderingInputs let symbolGraphFile = Self.getMainSymbolGraphFile(cbc.scope) @@ -393,7 +395,7 @@ final public class TAPISymbolExtractor: GenericCompilerSpec, GCCCompatibleCompil let userHeaderSearchPaths = cbc.scope.evaluate(BuiltinMacros.TAPI_EXTRACT_API_SEARCH_PATHS).map { return "-I" + $0 } - let defaultFrameworkSearchPaths = frameworkSearchPaths.searchPathArguments(for: self, scope:cbc.scope) + sparseSDKSearchPaths.searchPathArguments(for: self, scope: cbc.scope) + let defaultFrameworkSearchPaths = frameworkSearchPaths.searchPathArguments(for: self, scope: cbc.scope) + sparseSDKSearchPaths.searchPathArguments(for: self, scope: cbc.scope) let moduleMapSearchPaths = dependenciesModuleMaps.map { "-fmodule-map-file=\($0.str)" } diff --git a/Sources/SWBCore/SpecImplementations/Tools/TAPITools.swift b/Sources/SWBCore/SpecImplementations/Tools/TAPITools.swift index f21a525d..97f02e9e 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/TAPITools.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/TAPITools.swift @@ -19,7 +19,7 @@ public struct DiscoveredTAPIToolSpecInfo: DiscoveredCommandLineToolSpecInfo { public let toolVersion: Version? } -public final class TAPIToolSpec : GenericCommandLineToolSpec, GCCCompatibleCompilerCommandLineBuilder, SpecIdentifierType, @unchecked Sendable { +public final class TAPIToolSpec: GenericCommandLineToolSpec, GCCCompatibleCompilerCommandLineBuilder, SpecIdentifierType, @unchecked Sendable { public static let identifier = "com.apple.build-tools.tapi.installapi" public static let dSYMSupportRequiredVersion = try! FuzzyVersion("1500.*.7") @@ -48,12 +48,12 @@ public final class TAPIToolSpec : GenericCommandLineToolSpec, GCCCompatibleCompi let frameworkSearchPaths = GCCCompatibleCompilerSpecSupport.frameworkSearchPathArguments(cbc.producer, cbc.scope) let sparseSDKSearchPaths = GCCCompatibleCompilerSpecSupport.sparseSDKSearchPathArguments(cbc.producer.sparseSDKs, headerSearchPaths.headerSearchPaths, frameworkSearchPaths.frameworkSearchPaths) - let defaultHeaderSearchPaths = headerSearchPaths.searchPathArguments(for:self, scope:cbc.scope) + let defaultHeaderSearchPaths = headerSearchPaths.searchPathArguments(for: self, scope: cbc.scope) let userHeaderSearchPaths = cbc.scope.evaluate(BuiltinMacros.TAPI_HEADER_SEARCH_PATHS, lookup: lookup).map { return "-I" + $0 } - let defaultFrameworkSearchPaths = frameworkSearchPaths.searchPathArguments(for: self, scope:cbc.scope) + sparseSDKSearchPaths.searchPathArguments(for:self, scope:cbc.scope) + let defaultFrameworkSearchPaths = frameworkSearchPaths.searchPathArguments(for: self, scope: cbc.scope) + sparseSDKSearchPaths.searchPathArguments(for: self, scope: cbc.scope) return cbc.scope.namespace.parseLiteralStringList(defaultHeaderSearchPaths + userHeaderSearchPaths + defaultFrameworkSearchPaths) @@ -82,7 +82,7 @@ public final class TAPIToolSpec : GenericCommandLineToolSpec, GCCCompatibleCompi let lookup: ((MacroDeclaration) -> MacroExpression?) = { macro in switch macro { case BuiltinMacros.TAPI_INPUTS: - return useOnlyFilelist ? nil : scope.namespace.parseLiteralStringList(cbc.inputs.map{ $0.absolutePath.str }) + return useOnlyFilelist ? nil : scope.namespace.parseLiteralStringList(cbc.inputs.map { $0.absolutePath.str }) case BuiltinMacros.BuiltBinaryPath: return scope.namespace.parseLiteralString(builtBinaryPath?.normalize().str ?? "") @@ -107,9 +107,10 @@ public final class TAPIToolSpec : GenericCommandLineToolSpec, GCCCompatibleCompi var commandLine: [String] = await commandLineFromTemplate(cbc, delegate, optionContext: toolInfo, lookup: lookup).map(\.asString) // Compute inputs. - var inputs = cbc.inputs.map({ delegate.createNode($0.absolutePath) }) as [PlannedPathNode] - + (fileListPath.flatMap({ [delegate.createNode($0)] }) ?? []) as [PlannedPathNode] - + generatedTBDFiles.map({ delegate.createNode($0) }) as [PlannedPathNode] + var inputs = + cbc.inputs.map({ delegate.createNode($0.absolutePath) }) as [PlannedPathNode] + + (fileListPath.flatMap({ [delegate.createNode($0)] }) ?? []) as [PlannedPathNode] + + generatedTBDFiles.map({ delegate.createNode($0) }) as [PlannedPathNode] + cbc.commandOrderingInputs // Compute swift aware arguments for installapi consumption and verification. @@ -124,8 +125,10 @@ public final class TAPIToolSpec : GenericCommandLineToolSpec, GCCCompatibleCompi if !scope.evaluate(BuiltinMacros.SWIFT_OBJC_INTERFACE_HEADER_NAME).isEmpty && scope.evaluate(BuiltinMacros.SWIFT_INSTALL_OBJC_HEADER) { let generatedHeaderPath = SwiftCompilerSpec.generatedObjectiveCHeaderOutputPath(scope).str - commandLine.append(contentsOf: ["-exclude-public-header", - generatedHeaderPath]) + commandLine.append(contentsOf: [ + "-exclude-public-header", + generatedHeaderPath, + ]) inputs.append(delegate.createNode(Path(generatedHeaderPath))) } } @@ -138,7 +141,8 @@ public final class TAPIToolSpec : GenericCommandLineToolSpec, GCCCompatibleCompi inputs.append(contentsOf: (dsymPath.flatMap({ [delegate.createNode($0)] }) ?? []) as [PlannedPathNode]) if let version = toolInfo?.toolVersion, version >= TAPIToolSpec.dSYMSupportRequiredVersion, - dsymPath != nil, scope.evaluate(BuiltinMacros.TAPI_READ_DSYM) { + dsymPath != nil, scope.evaluate(BuiltinMacros.TAPI_READ_DSYM) + { let dsymBundle = scope.evaluate(BuiltinMacros.DWARF_DSYM_FOLDER_PATH) .join(scope.evaluate(BuiltinMacros.DWARF_DSYM_FILE_NAME)) commandLine.append(contentsOf: ["--dsym=" + dsymBundle.str]) @@ -166,7 +170,7 @@ public final class TAPIToolSpec : GenericCommandLineToolSpec, GCCCompatibleCompi } } -final class TAPIMergeToolSpec : CommandLineToolSpec, SpecImplementationType, @unchecked Sendable { +final class TAPIMergeToolSpec: CommandLineToolSpec, SpecImplementationType, @unchecked Sendable { static let identifier = "com.apple.build-tools.tapi.merge" class func construct(registry: SpecRegistry, proxy: SpecProxy) -> Spec { @@ -181,7 +185,7 @@ final class TAPIMergeToolSpec : CommandLineToolSpec, SpecImplementationType, @un // FIXME: We don't have a spec to work with here, we should get one. var commandLine = [resolveExecutablePath(cbc.producer, Path(cbc.scope.tapiExecutablePath())).str] commandLine += ["archive", "--merge", "--allow-arch-merges"] - commandLine += cbc.inputs.map{ $0.absolutePath.str } + commandLine += cbc.inputs.map { $0.absolutePath.str } commandLine += ["-o", outputPath.str] let outputs: [any PlannedNode] = [delegate.createNode(outputPath)] + cbc.commandOrderingOutputs diff --git a/Sources/SWBCore/SpecImplementations/Tools/TiffUtilTool.swift b/Sources/SWBCore/SpecImplementations/Tools/TiffUtilTool.swift index ed93f968..80aca6f7 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/TiffUtilTool.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/TiffUtilTool.swift @@ -13,7 +13,7 @@ import SWBUtil import SWBMacro -final class TiffUtilToolSpec : GenericCompilerSpec, SpecIdentifierType, @unchecked Sendable { +final class TiffUtilToolSpec: GenericCompilerSpec, SpecIdentifierType, @unchecked Sendable { static let identifier = "com.apple.compilers.tiffutil" private class func deferredSpec(_ cbc: CommandBuildContext) -> CommandLineToolSpec? { diff --git a/Sources/SWBCore/SpecImplementations/Tools/TouchTool.swift b/Sources/SWBCore/SpecImplementations/Tools/TouchTool.swift index 57ab5f1a..e41dc526 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/TouchTool.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/TouchTool.swift @@ -13,7 +13,7 @@ import SWBUtil import SWBMacro -final class TouchToolSpec : CommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { +final class TouchToolSpec: CommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { static let identifier = "com.apple.tools.touch" override func constructTasks(_ cbc: CommandBuildContext, _ delegate: any TaskGenerationDelegate) async { diff --git a/Sources/SWBCore/SpecImplementations/Tools/UnifdefTool.swift b/Sources/SWBCore/SpecImplementations/Tools/UnifdefTool.swift index 99b2517b..3ccc70a8 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/UnifdefTool.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/UnifdefTool.swift @@ -13,14 +13,15 @@ public import SWBUtil import SWBMacro -public final class UnifdefToolSpec : CommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { +public final class UnifdefToolSpec: CommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { public static let identifier = "public.build-task.unifdef" public override func evaluatedOutputs(_ cbc: CommandBuildContext, _ delegate: any DiagnosticProducingDelegate) -> [(path: Path, isDirectory: Bool)]? { let outputs = cbc.outputs.map { ($0, false) } - return outputs.nilIfEmpty ?? cbc.resourcesDir.map { resourcesDir in - cbc.inputs.map { (resourcesDir.join($0.absolutePath.basename), false) } - } + return outputs.nilIfEmpty + ?? cbc.resourcesDir.map { resourcesDir in + cbc.inputs.map { (resourcesDir.join($0.absolutePath.basename), false) } + } } public override func constructTasks(_ cbc: CommandBuildContext, _ delegate: any TaskGenerationDelegate) async { diff --git a/Sources/SWBCore/SpecImplementations/Tools/WriteFile.swift b/Sources/SWBCore/SpecImplementations/Tools/WriteFile.swift index eccf5cf1..3eb66967 100644 --- a/Sources/SWBCore/SpecImplementations/Tools/WriteFile.swift +++ b/Sources/SWBCore/SpecImplementations/Tools/WriteFile.swift @@ -32,7 +32,7 @@ public final class WriteFileSpec: CommandLineToolSpec, SpecImplementationType, @ let execDescription = resolveExecutionDescription(cbc, delegate) let action = delegate.taskActionCreationDelegate.createAuxiliaryFileTaskAction(AuxiliaryFileTaskActionContext(output: outputNode.path, input: fileContentsPath, permissions: permissions, forceWrite: forceWrite, diagnostics: diagnostics, logContents: logContents)) let ruleName = ruleName ?? "WriteAuxiliaryFile" - delegate.createTask(type: self, ruleInfo: [ruleName, outputNode.path.str], commandLine: ["write-file", outputNode.path.str], environment: EnvironmentBindings(), workingDirectory: cbc.producer.defaultWorkingDirectory, inputs: cbc.commandOrderingInputs, outputs: [ outputNode ], mustPrecede: [], action: action, execDescription: execDescription, preparesForIndexing: preparesForIndexing, enableSandboxing: enableSandboxing, additionalTaskOrderingOptions: additionalTaskOrderingOptions, priority: .unblocksDownstreamTasks) + delegate.createTask(type: self, ruleInfo: [ruleName, outputNode.path.str], commandLine: ["write-file", outputNode.path.str], environment: EnvironmentBindings(), workingDirectory: cbc.producer.defaultWorkingDirectory, inputs: cbc.commandOrderingInputs, outputs: [outputNode], mustPrecede: [], action: action, execDescription: execDescription, preparesForIndexing: preparesForIndexing, enableSandboxing: enableSandboxing, additionalTaskOrderingOptions: additionalTaskOrderingOptions, priority: .unblocksDownstreamTasks) return fileContentsPath } } diff --git a/Sources/SWBCore/TargetDependencyResolver.swift b/Sources/SWBCore/TargetDependencyResolver.swift index b3e37419..72185ac3 100644 --- a/Sources/SWBCore/TargetDependencyResolver.swift +++ b/Sources/SWBCore/TargetDependencyResolver.swift @@ -50,7 +50,7 @@ public struct ResolvedTargetDependency: Hashable, Encodable, Sendable { } extension ResolvedTargetDependency: Serializable { - public func serialize(to serializer: T) where T : SWBUtil.Serializer { + public func serialize(to serializer: T) where T: SWBUtil.Serializer { serializer.serializeAggregate(2) { serializer.serialize(target) serializer.serialize(reason) @@ -110,12 +110,12 @@ public struct TargetBuildGraph: TargetGraph, Sendable { /// The result closure guarantees that all targets a target depends on appear in the returned array before that target. Any detected dependency cycles will be broken. public init(workspaceContext: WorkspaceContext, buildRequest: BuildRequest, buildRequestContext: BuildRequestContext, delegate: any TargetDependencyResolverDelegate, purpose: Purpose = .build) async { let (allTargets, targetDependencies, targetsToLinkedReferencesToProducingTargets, dynamicallyBuildingTargets) = - await MacroNamespace.withExpressionInterningEnabled { - await buildRequestContext.keepAliveSettingsCache { - let resolver = TargetDependencyResolver(workspaceContext: workspaceContext, buildRequest: buildRequest, buildRequestContext: buildRequestContext, delegate: delegate, purpose: purpose) - return await resolver.computeGraph() + await MacroNamespace.withExpressionInterningEnabled { + await buildRequestContext.keepAliveSettingsCache { + let resolver = TargetDependencyResolver(workspaceContext: workspaceContext, buildRequest: buildRequest, buildRequestContext: buildRequestContext, delegate: delegate, purpose: purpose) + return await resolver.computeGraph() + } } - } self.init(workspaceContext: workspaceContext, buildRequest: buildRequest, buildRequestContext: buildRequestContext, allTargets: allTargets, targetDependencies: targetDependencies, targetsToLinkedReferencesToProducingTargets: targetsToLinkedReferencesToProducingTargets, dynamicallyBuildingTargets: dynamicallyBuildingTargets) } @@ -196,31 +196,41 @@ public struct TargetBuildGraph: TargetGraph, Sendable { } private var _dependencyGraphDiagnostic: LazyCache = LazyCache { instance in // .allTargets is sorted in topological order. Reverse this so that targets appear before their dependencies in the list. - return Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("Target dependency graph (\(instance.allTargets.count) target" + (instance.allTargets.count > 1 ? "s" : "") + ")"), childDiagnostics: instance.allTargets.reversed().map { configuredTarget in - let project = instance.workspaceContext.workspace.project(for: configuredTarget.target) - let resolvedDependencies = instance.resolvedDependencies(of: configuredTarget) - let parts = [ - "Target '\(configuredTarget.target.name)' in project '\(project.name)'", - instance.buildRequest.shouldSkipExecution(target: configuredTarget.target) ? " (skipped due to 'Skip Dependencies' scheme option)" : "", - resolvedDependencies.isEmpty ? " (no dependencies)" : "", - ] - return Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData(parts.joined(separator: "")), childDiagnostics: resolvedDependencies.map { dependency in - let project = instance.workspaceContext.workspace.project(for: dependency.target.target) - let dependencyDescription = "target '\(dependency.target.target.name)' in project '\(project.name)'" - let dependencyString: String - switch dependency.reason { - case .explicit: - dependencyString = "Explicit dependency on \(dependencyDescription)" - case .implicitBuildPhaseLinkage(filename: let filename, buildableItem: _, buildPhase: let buildPhase): - dependencyString = "Implicit dependency on \(dependencyDescription) via file '\(filename)' in build phase '\(buildPhase)'" - case .implicitBuildSetting(settingName: let settingName, options: let options): - dependencyString = "Implicit dependency on \(dependencyDescription) via options '\(options.joined(separator: " "))' in build setting '\(settingName)'" - case .impliedByTransitiveDependencyViaRemovedTargets(let intermediateTargetName): - dependencyString = "Dependency on \(dependencyDescription) via transitive dependency through '\(intermediateTargetName)'" - } - return Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("➜ " + dependencyString)) - }) - }) + return Diagnostic( + behavior: .note, + location: .unknown, + data: DiagnosticData("Target dependency graph (\(instance.allTargets.count) target" + (instance.allTargets.count > 1 ? "s" : "") + ")"), + childDiagnostics: instance.allTargets.reversed().map { configuredTarget in + let project = instance.workspaceContext.workspace.project(for: configuredTarget.target) + let resolvedDependencies = instance.resolvedDependencies(of: configuredTarget) + let parts = [ + "Target '\(configuredTarget.target.name)' in project '\(project.name)'", + instance.buildRequest.shouldSkipExecution(target: configuredTarget.target) ? " (skipped due to 'Skip Dependencies' scheme option)" : "", + resolvedDependencies.isEmpty ? " (no dependencies)" : "", + ] + return Diagnostic( + behavior: .note, + location: .unknown, + data: DiagnosticData(parts.joined(separator: "")), + childDiagnostics: resolvedDependencies.map { dependency in + let project = instance.workspaceContext.workspace.project(for: dependency.target.target) + let dependencyDescription = "target '\(dependency.target.target.name)' in project '\(project.name)'" + let dependencyString: String + switch dependency.reason { + case .explicit: + dependencyString = "Explicit dependency on \(dependencyDescription)" + case .implicitBuildPhaseLinkage(filename: let filename, buildableItem: _, buildPhase: let buildPhase): + dependencyString = "Implicit dependency on \(dependencyDescription) via file '\(filename)' in build phase '\(buildPhase)'" + case .implicitBuildSetting(settingName: let settingName, options: let options): + dependencyString = "Implicit dependency on \(dependencyDescription) via options '\(options.joined(separator: " "))' in build setting '\(settingName)'" + case .impliedByTransitiveDependencyViaRemovedTargets(let intermediateTargetName): + dependencyString = "Dependency on \(dependencyDescription) via transitive dependency through '\(intermediateTargetName)'" + } + return Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("➜ " + dependencyString)) + } + ) + } + ) } } @@ -400,8 +410,7 @@ fileprivate extension TargetDependencyResolver { for configuredTarget in configuredTargets { if let newConfiguredTarget = configuredTargetsToReplace[configuredTarget] { newConfiguredTargets.insert(newConfiguredTarget) - } - else { + } else { newConfiguredTargets.insert(configuredTarget) } } @@ -418,8 +427,7 @@ fileprivate extension TargetDependencyResolver { if let newDependencyTarget = configuredTargetsToReplace[dependency.target] { let newDependency = ResolvedTargetDependency(target: newDependencyTarget, reason: dependency.reason) newDependencies.append(newDependency) - } - else { + } else { newDependencies.append(dependency) } } @@ -718,7 +726,8 @@ fileprivate extension TargetDependencyResolver { /// - dependencyPath: The ordered list of dependencies added along the path to this target, for detecting recursion. /// - imposedParameters: Additional build parameter overrides which should be imposed upon all targets along this path (for specialization purposes). private func addDependencies(forConfiguredTarget configuredTarget: ConfiguredTarget, toDependencyClosure dependencyClosure: inout OrderedSet, dependencyPath: inout OrderedSet, imposedParameters: SpecializationParameters? = nil) async { - let statusMessage = workspaceContext.userPreferences.activityTextShorteningLevel >= .allDynamicText + let statusMessage = + workspaceContext.userPreferences.activityTextShorteningLevel >= .allDynamicText ? "Computing dependencies" : "Computing dependencies for '\(configuredTarget.target.name)'" delegate.updateProgress(statusMessage: statusMessage, showInLog: false) @@ -779,7 +788,6 @@ fileprivate extension TargetDependencyResolver { await addDependencies(forConfiguredTarget: dependency.target, toDependencyClosure: &dependencyClosure, dependencyPath: &dependencyPath, imposedParameters: dependencyImposedParameters) } - // Visit all of the immediate dependencies. for configuredDependency in discoveredInfo.immediateDependencies { if Task.isCancelled { break } diff --git a/Sources/SWBCore/TaskGeneration.swift b/Sources/SWBCore/TaskGeneration.swift index c944d4fd..8c9440c3 100644 --- a/Sources/SWBCore/TaskGeneration.swift +++ b/Sources/SWBCore/TaskGeneration.swift @@ -353,10 +353,13 @@ extension SpecRegistry { func effectiveFlattenedBuildOptions(_ spec: PropertyDomainSpec) -> [String: BuildOption] { var options = spec.flattenedBuildOptions for extensionSpec in findSpecs(BuildSettingsExtensionSpec.self) where spec.conformsTo(identifier: extensionSpec.extendsConformsTo) { - options.merge(extensionSpec.flattenedBuildOptions, uniquingKeysWith: { _, new in - // Should duplicates be an error? - return new - }) + options.merge( + extensionSpec.flattenedBuildOptions, + uniquingKeysWith: { _, new in + // Should duplicates be an error? + return new + } + ) } return options } @@ -439,26 +442,32 @@ public struct CommandBuildContext { public let validityCriteria: (any TaskValidityCriteria)? public init( - producer: any CommandProducer, scope: MacroEvaluationScope, - inputs: [FileToBuild], isPreferredArch: Bool = true, + producer: any CommandProducer, + scope: MacroEvaluationScope, + inputs: [FileToBuild], + isPreferredArch: Bool = true, currentArchSpec: ArchitectureSpec? = nil, output: Path? = nil, commandOrderingInputs: [any PlannedNode] = [], commandOrderingOutputs: [any PlannedNode] = [], buildPhaseInfo: (any BuildPhaseInfoForToolSpec)? = nil, - resourcesDir: Path? = nil, tmpResourcesDir: Path? = nil, + resourcesDir: Path? = nil, + tmpResourcesDir: Path? = nil, unlocalizedResourcesDir: Path? = nil, preparesForIndexing: Bool = false, validityCriteria: (any TaskValidityCriteria)? = nil ) { self.init( - producer: producer, scope: scope, inputs: inputs, + producer: producer, + scope: scope, + inputs: inputs, isPreferredArch: isPreferredArch, currentArchSpec: currentArchSpec, outputs: output.map { [$0] } ?? [], commandOrderingInputs: commandOrderingInputs, commandOrderingOutputs: commandOrderingOutputs, - buildPhaseInfo: buildPhaseInfo, resourcesDir: resourcesDir, + buildPhaseInfo: buildPhaseInfo, + resourcesDir: resourcesDir, tmpResourcesDir: tmpResourcesDir, unlocalizedResourcesDir: unlocalizedResourcesDir, preparesForIndexing: preparesForIndexing, @@ -467,14 +476,17 @@ public struct CommandBuildContext { } public init( - producer: any CommandProducer, scope: MacroEvaluationScope, - inputs: [FileToBuild], isPreferredArch: Bool = true, + producer: any CommandProducer, + scope: MacroEvaluationScope, + inputs: [FileToBuild], + isPreferredArch: Bool = true, currentArchSpec: ArchitectureSpec? = nil, outputs: [Path], commandOrderingInputs: [any PlannedNode] = [], commandOrderingOutputs: [any PlannedNode] = [], buildPhaseInfo: (any BuildPhaseInfoForToolSpec)? = nil, - resourcesDir: Path? = nil, tmpResourcesDir: Path? = nil, + resourcesDir: Path? = nil, + tmpResourcesDir: Path? = nil, unlocalizedResourcesDir: Path? = nil, preparesForIndexing: Bool = false, validityCriteria: (any TaskValidityCriteria)? = nil @@ -584,22 +596,23 @@ public struct TaskOrderingOptions: OptionSet, CustomDebugStringConvertible, Send public static let ignorePhaseOrdering = TaskOrderingOptions(1 << 10) public var debugDescription: String { - return " String? in return self.contains(rawValue) ? description : nil - }.joined(separator: ", ") + "]>" + }.joined(separator: ", ") + "]>" } } @@ -893,12 +906,12 @@ public extension TaskGenerationDelegate { /// Create a new task taking a command line as a `String` array, and inputs and outputs as `Path` arrays. The command line will be implicitly marshalled into a `ByteString` array, and the inputs and outputs into `PlannedNode` arrays. func createTask(type: any TaskTypeDescription, dependencyData: DependencyDataStyle? = nil, payload: (any TaskPayload)? = nil, ruleInfo: [String], additionalSignatureData: String = "", commandLine: [String], additionalOutput: [String] = [], environment: EnvironmentBindings, workingDirectory: Path, inputs: [Path], outputs: [Path], mustPrecede: [any PlannedTask] = [], action: (any PlannedTaskAction)? = nil, execDescription: String? = nil, preparesForIndexing: Bool = false, enableSandboxing: Bool, llbuildControlDisabled: Bool = false, additionalTaskOrderingOptions: TaskOrderingOptions = [], usesExecutionInputs: Bool = false, isGate: Bool = false, alwaysExecuteTask: Bool = false, showInLog: Bool = true, showCommandLineInLog: Bool = true, priority: TaskPriority = .unspecified, repairViaOwnershipAnalysis: Bool = false, validityCriteria: (any TaskValidityCriteria)? = nil) { - return createTask(type: type, dependencyData: dependencyData, payload: payload, ruleInfo: ruleInfo, additionalSignatureData: additionalSignatureData, commandLine: commandLine.map{ ByteString(encodingAsUTF8: $0) }, additionalOutput: additionalOutput, environment: environment, workingDirectory: workingDirectory, inputs: inputs.map(createNode), outputs: outputs.map(createNode), mustPrecede: mustPrecede, action: action, execDescription: execDescription, preparesForIndexing: preparesForIndexing, enableSandboxing: enableSandboxing, llbuildControlDisabled: llbuildControlDisabled, additionalTaskOrderingOptions: additionalTaskOrderingOptions, usesExecutionInputs: usesExecutionInputs, isGate: isGate, alwaysExecuteTask: alwaysExecuteTask, showInLog: showInLog, showCommandLineInLog: showCommandLineInLog, priority: priority, repairViaOwnershipAnalysis: repairViaOwnershipAnalysis, validityCriteria: validityCriteria) + return createTask(type: type, dependencyData: dependencyData, payload: payload, ruleInfo: ruleInfo, additionalSignatureData: additionalSignatureData, commandLine: commandLine.map { ByteString(encodingAsUTF8: $0) }, additionalOutput: additionalOutput, environment: environment, workingDirectory: workingDirectory, inputs: inputs.map(createNode), outputs: outputs.map(createNode), mustPrecede: mustPrecede, action: action, execDescription: execDescription, preparesForIndexing: preparesForIndexing, enableSandboxing: enableSandboxing, llbuildControlDisabled: llbuildControlDisabled, additionalTaskOrderingOptions: additionalTaskOrderingOptions, usesExecutionInputs: usesExecutionInputs, isGate: isGate, alwaysExecuteTask: alwaysExecuteTask, showInLog: showInLog, showCommandLineInLog: showCommandLineInLog, priority: priority, repairViaOwnershipAnalysis: repairViaOwnershipAnalysis, validityCriteria: validityCriteria) } /// Create a new task taking a command line as a `String` array. It will be implicitly marshalled into a `ByteString` array. func createTask(type: any TaskTypeDescription, dependencyData: DependencyDataStyle? = nil, payload: (any TaskPayload)? = nil, ruleInfo: [String], additionalSignatureData: String = "", commandLine: [String], additionalOutput: [String] = [], environment: EnvironmentBindings, workingDirectory: Path, inputs: [any PlannedNode], outputs: [any PlannedNode], mustPrecede: [any PlannedTask] = [], action: (any PlannedTaskAction)? = nil, execDescription: String? = nil, preparesForIndexing: Bool = false, enableSandboxing: Bool, llbuildControlDisabled: Bool = false, additionalTaskOrderingOptions: TaskOrderingOptions = [], usesExecutionInputs: Bool = false, isGate: Bool = false, alwaysExecuteTask: Bool = false, showInLog: Bool = true, showCommandLineInLog: Bool = true, showEnvironment: Bool = false, priority: TaskPriority = .unspecified, repairViaOwnershipAnalysis: Bool = false, validityCriteria: (any TaskValidityCriteria)? = nil) { - return createTask(type: type, dependencyData: dependencyData, payload: payload, ruleInfo: ruleInfo, additionalSignatureData: additionalSignatureData, commandLine: commandLine.map{ ByteString(encodingAsUTF8: $0) }, additionalOutput: additionalOutput, environment: environment, workingDirectory: workingDirectory, inputs: inputs, outputs: outputs, mustPrecede: mustPrecede, action: action, execDescription: execDescription, preparesForIndexing: preparesForIndexing, enableSandboxing: enableSandboxing, llbuildControlDisabled: llbuildControlDisabled, additionalTaskOrderingOptions: additionalTaskOrderingOptions, usesExecutionInputs: usesExecutionInputs, isGate: isGate, alwaysExecuteTask: alwaysExecuteTask, showInLog: showInLog, showCommandLineInLog: showCommandLineInLog, showEnvironment: showEnvironment, priority: priority, repairViaOwnershipAnalysis: repairViaOwnershipAnalysis, validityCriteria: validityCriteria) + return createTask(type: type, dependencyData: dependencyData, payload: payload, ruleInfo: ruleInfo, additionalSignatureData: additionalSignatureData, commandLine: commandLine.map { ByteString(encodingAsUTF8: $0) }, additionalOutput: additionalOutput, environment: environment, workingDirectory: workingDirectory, inputs: inputs, outputs: outputs, mustPrecede: mustPrecede, action: action, execDescription: execDescription, preparesForIndexing: preparesForIndexing, enableSandboxing: enableSandboxing, llbuildControlDisabled: llbuildControlDisabled, additionalTaskOrderingOptions: additionalTaskOrderingOptions, usesExecutionInputs: usesExecutionInputs, isGate: isGate, alwaysExecuteTask: alwaysExecuteTask, showInLog: showInLog, showCommandLineInLog: showCommandLineInLog, showEnvironment: showEnvironment, priority: priority, repairViaOwnershipAnalysis: repairViaOwnershipAnalysis, validityCriteria: validityCriteria) } func createGateTask(inputs: [any PlannedNode], output: any PlannedNode, name: String? = nil, mustPrecede: [any PlannedTask] = [], payload: (any TaskPayload)? = nil, additionalSignatureData: String = "") { @@ -1078,9 +1091,11 @@ public struct OutputPathIndexingInfo: SourceFileIndexingInfo { /// The indexing info is packaged and sent to the client in a property list format. public var propertyListItem: PropertyListItem { - return .plDict([ - "outputFilePath": .plString(outputFile.str), - ] as [String: PropertyListItem]) + return .plDict( + [ + "outputFilePath": .plString(outputFile.str) + ] as [String: PropertyListItem] + ) } } @@ -1276,8 +1291,10 @@ public struct TaskGenerateLocalizationInfoOutput { /// - Parameters: /// - compilableXCStringsPaths: Paths to input source .xcstrings files. /// - producedStringsdataPaths: Paths to output .stringsdata files. - public init(compilableXCStringsPaths: [Path] = [], - producedStringsdataPaths: [LocalizationBuildPortion: [Path]] = [:]) { + public init( + compilableXCStringsPaths: [Path] = [], + producedStringsdataPaths: [LocalizationBuildPortion: [Path]] = [:] + ) { self.compilableXCStringsPaths = compilableXCStringsPaths self.producedStringsdataPaths = producedStringsdataPaths } diff --git a/Sources/SWBCore/TaskResult.swift b/Sources/SWBCore/TaskResult.swift index e52dd640..452c4ce9 100644 --- a/Sources/SWBCore/TaskResult.swift +++ b/Sources/SWBCore/TaskResult.swift @@ -80,10 +80,14 @@ public extension Optional where Wrapped == TaskResult { } public struct CommandMetrics: Hashable, Sendable { - public let utime: UInt64 /// User time (in μs) - public let stime: UInt64 /// Sys time (in μs) - public let maxRSS: UInt64 /// Max RSS (in bytes) - public let wcDuration: ElapsedTimerInterval? /// Wall time duration (in μs). + public let utime: UInt64 + /// User time (in μs) + public let stime: UInt64 + /// Sys time (in μs) + public let maxRSS: UInt64 + /// Max RSS (in bytes) + public let wcDuration: ElapsedTimerInterval? + /// Wall time duration (in μs). public init(utime: UInt64, stime: UInt64, maxRSS: UInt64, wcDuration: ElapsedTimerInterval?) { self.utime = utime diff --git a/Sources/SWBCore/TaskValidityCriteria.swift b/Sources/SWBCore/TaskValidityCriteria.swift index 280793cc..32736609 100644 --- a/Sources/SWBCore/TaskValidityCriteria.swift +++ b/Sources/SWBCore/TaskValidityCriteria.swift @@ -21,7 +21,6 @@ public protocol TaskValidityCriteria: Sendable { func isValid(_ task: any PlannedTask, _ context: any TaskValidationContext) -> Bool } - /// A task validation context exposes information a task needs to determine whether it should be included in the build. public protocol TaskValidationContext: AnyObject { /// The set of paths which have been declared as inputs to one or more tasks. diff --git a/Sources/SWBCore/ToolInfo/ClangToolInfo.swift b/Sources/SWBCore/ToolInfo/ClangToolInfo.swift index 4e4ba77d..336e7862 100644 --- a/Sources/SWBCore/ToolInfo/ClangToolInfo.swift +++ b/Sources/SWBCore/ToolInfo/ClangToolInfo.swift @@ -14,9 +14,9 @@ public import SWBMacro public import SWBUtil import Foundation -public struct ClangBlocklists : Sendable { +public struct ClangBlocklists: Sendable { - public struct CachingBlocklistInfo : ProjectFailuresBlockList, Codable, Sendable { + public struct CachingBlocklistInfo: ProjectFailuresBlockList, Codable, Sendable { /// A blocklist of project names that do not support the `CLANG_ENABLE_COMPILE_CACHE` build setting. let KnownFailures: [String] @@ -27,7 +27,7 @@ public struct ClangBlocklists : Sendable { var caching: CachingBlocklistInfo? = nil - public struct BuiltinModuleVerifierInfo : ProjectFailuresBlockList, Codable, Sendable { + public struct BuiltinModuleVerifierInfo: ProjectFailuresBlockList, Codable, Sendable { /// A blocklist of project names that do not support the `MODULE_VERIFIER_KIND=builtin` build setting. let KnownFailures: [String] enum CodingKeys: String, CodingKey { @@ -100,7 +100,7 @@ public struct DiscoveredClangToolSpecInfo: DiscoveredCommandLineToolSpecInfo { public func isCachingBlocked(_ scope: MacroEvaluationScope) -> Bool { return blocklists.isBlocked(scope, info: blocklists.caching) - } + } public func isBuiltinModuleVerifyBlocked(_ scope: MacroEvaluationScope) -> Bool { return blocklists.isBlocked(scope, info: blocklists.builtinModuleVerify) @@ -140,78 +140,82 @@ public func discoveredClangToolInfo( commandLine.append("-c") commandLine.append(Path.null.str) - return try await producer.discoveredCommandLineToolSpecInfo(delegate, nil, commandLine, { executionResult in - let outputString = String(decoding: executionResult.stdout, as: UTF8.self).trimmingCharacters(in: .whitespacesAndNewlines) - - var clangVersion: Version? = nil - var llvmVersion: Version? = nil - var isAppleClang = false - - for line in outputString.components(separatedBy: "\n") { - if line.hasPrefix("#define ") { - // Parse out the macro name and value. - let macroAssignment = line.withoutPrefix("#define ") - guard !macroAssignment.isEmpty else { continue } - - let (macroName, macroValue) = macroAssignment.split(" ") - guard !macroValue.isEmpty else { continue } - - // If the #define is __clang_version__, then we try to extract the LLVM and clang versions. The value of this macro will look something like one of the following (including the quote characters): - // "8.1.0 (clang-802.1.38)" - // "12.0.0 (clang-1200.0.22.5) [ptrauth objc isa mode: sign-and-strip]" - if macroName == "__clang_version__" { - if let match: RegEx.MatchResult = clangVersionRe.firstMatch(in: macroValue) { - llvmVersion = match["llvm"].map { try? Version($0) } ?? nil - clangVersion = match["clang"].map { try? Version($0) } ?? nil - } else if let match = try? swiftOSSToolchainClangVersionRe.regex.firstMatch(in: macroValue) { - llvmVersion = try? Version(String(match.llvm)) + return try await producer.discoveredCommandLineToolSpecInfo( + delegate, + nil, + commandLine, + { executionResult in + let outputString = String(decoding: executionResult.stdout, as: UTF8.self).trimmingCharacters(in: .whitespacesAndNewlines) + + var clangVersion: Version? = nil + var llvmVersion: Version? = nil + var isAppleClang = false + + for line in outputString.components(separatedBy: "\n") { + if line.hasPrefix("#define ") { + // Parse out the macro name and value. + let macroAssignment = line.withoutPrefix("#define ") + guard !macroAssignment.isEmpty else { continue } + + let (macroName, macroValue) = macroAssignment.split(" ") + guard !macroValue.isEmpty else { continue } + + // If the #define is __clang_version__, then we try to extract the LLVM and clang versions. The value of this macro will look something like one of the following (including the quote characters): + // "8.1.0 (clang-802.1.38)" + // "12.0.0 (clang-1200.0.22.5) [ptrauth objc isa mode: sign-and-strip]" + if macroName == "__clang_version__" { + if let match: RegEx.MatchResult = clangVersionRe.firstMatch(in: macroValue) { + llvmVersion = match["llvm"].map { try? Version($0) } ?? nil + clangVersion = match["clang"].map { try? Version($0) } ?? nil + } else if let match = try? swiftOSSToolchainClangVersionRe.regex.firstMatch(in: macroValue) { + llvmVersion = try? Version(String(match.llvm)) + } } - } - if macroName == "__apple_build_version__" { - isAppleClang = true + if macroName == "__apple_build_version__" { + isAppleClang = true + } } } - } - func getFeatures(at toolPath: Path) -> ToolFeatures { - let featuresPath = toolPath.dirname.dirname.join("share").join("clang").join("features.json") - do { - return try ToolFeatures(path: featuresPath, fs: localFS) - } catch { - // Clang was missing its own 'features.json' for a while (see rdar://72387110). Use the presence of the Swift 'features.json' for the features that were supported when it was added. - // Note that clang's is still missing on Windows: https://github.com/swiftlang/swift-installer-scripts/issues/337 - let swiftFeaturesPath = toolPath.dirname.dirname.join("share").join("swift").join("features.json") - if localFS.exists(swiftFeaturesPath) { - return .init([ .allowPcmWithCompilerErrors, .vfsDirectoryRemap, .indexUnitOutputPath]) + func getFeatures(at toolPath: Path) -> ToolFeatures { + let featuresPath = toolPath.dirname.dirname.join("share").join("clang").join("features.json") + do { + return try ToolFeatures(path: featuresPath, fs: localFS) + } catch { + // Clang was missing its own 'features.json' for a while (see rdar://72387110). Use the presence of the Swift 'features.json' for the features that were supported when it was added. + // Note that clang's is still missing on Windows: https://github.com/swiftlang/swift-installer-scripts/issues/337 + let swiftFeaturesPath = toolPath.dirname.dirname.join("share").join("swift").join("features.json") + if localFS.exists(swiftFeaturesPath) { + return .init([.allowPcmWithCompilerErrors, .vfsDirectoryRemap, .indexUnitOutputPath]) + } + return .init([]) } - return .init([]) } - } - let blocklistPaths = CompilerSpec.findToolchainBlocklists(producer, directoryOverride: blocklistsPathOverride) + let blocklistPaths = CompilerSpec.findToolchainBlocklists(producer, directoryOverride: blocklistsPathOverride) - func getBlocklist(type: T.Type, toolchainFilename: String, delegate: any TargetDiagnosticProducingDelegate) -> T? { - return CompilerSpec.getBlocklist( - type: type, - toolchainFilename: toolchainFilename, - blocklistPaths: blocklistPaths, - fs: localFS, - delegate: delegate + func getBlocklist(type: T.Type, toolchainFilename: String, delegate: any TargetDiagnosticProducingDelegate) -> T? { + return CompilerSpec.getBlocklist( + type: type, + toolchainFilename: toolchainFilename, + blocklistPaths: blocklistPaths, + fs: localFS, + delegate: delegate + ) + } + var blocklists = ClangBlocklists() + blocklists.caching = getBlocklist(type: ClangBlocklists.CachingBlocklistInfo.self, toolchainFilename: "clang-caching.json", delegate: delegate) + blocklists.builtinModuleVerify = getBlocklist(type: ClangBlocklists.BuiltinModuleVerifierInfo.self, toolchainFilename: "clang-builtin-module-verify.json", delegate: delegate) + + return DiscoveredClangToolSpecInfo( + toolPath: toolPath, + clangVersion: clangVersion, + llvmVersion: llvmVersion, + isAppleClang: isAppleClang, + blocklists: blocklists, + toolFeatures: getFeatures(at: toolPath) ) } - var blocklists = ClangBlocklists() - blocklists.caching = getBlocklist(type: ClangBlocklists.CachingBlocklistInfo.self, toolchainFilename: "clang-caching.json", delegate: delegate) - blocklists.builtinModuleVerify = getBlocklist(type: ClangBlocklists.BuiltinModuleVerifierInfo.self, toolchainFilename: "clang-builtin-module-verify.json", delegate: delegate) - - - return DiscoveredClangToolSpecInfo( - toolPath: toolPath, - clangVersion: clangVersion, - llvmVersion: llvmVersion, - isAppleClang: isAppleClang, - blocklists: blocklists, - toolFeatures: getFeatures(at: toolPath) - ) - }) + ) } diff --git a/Sources/SWBCore/ToolchainRegistry.swift b/Sources/SWBCore/ToolchainRegistry.swift index 1279156e..8984b3ed 100644 --- a/Sources/SWBCore/ToolchainRegistry.swift +++ b/Sources/SWBCore/ToolchainRegistry.swift @@ -27,8 +27,7 @@ private extension ToolchainRegistryDelegate { func issue(strict: Bool, _ path: Path, _ message: String) { if strict { error(path, message) - } - else { + } else { warning(path, message) } } @@ -79,7 +78,7 @@ public final class Toolchain: Hashable, Sendable { self.identifier = identifier self.version = version - assert(!aliases.contains{ $0.lowercased() != $0 }) + assert(!aliases.contains { $0.lowercased() != $0 }) self.aliases = aliases self.path = path @@ -165,14 +164,12 @@ public final class Toolchain: Hashable, Sendable { throw StubError.error("if the 'Name' key is present, it must be a string") } displayName = infoDisplayNameString - } - else if let infoDisplayName = items["DisplayName"] { + } else if let infoDisplayName = items["DisplayName"] { guard case .plString(let infoDisplayNameString) = infoDisplayName else { throw StubError.error("if the 'DisplayName' key is present, it must be a string") } displayName = infoDisplayNameString - } - else { + } else { displayName = Toolchain.deriveDisplayName(identifier: identifier) } @@ -185,12 +182,10 @@ public final class Toolchain: Hashable, Sendable { do { version = try Version(infoVersionString) - } - catch { + } catch { throw StubError.error("'Version' parse error: \(error)") } - } - else { + } else { // No version specified in the plist, so we derive the version from the identifier. version = Toolchain.deriveVersion(identifier: identifier) } @@ -210,8 +205,7 @@ public final class Toolchain: Hashable, Sendable { guard !aliasStr.isEmpty else { continue } aliases.insert(aliasStr.lowercased()) } - } - else { + } else { // No aliases specified in the plist, so we derive them from the identifier. aliases = Toolchain.deriveAliases(path: path, identifier: identifier) } @@ -285,7 +279,7 @@ public final class Toolchain: Hashable, Sendable { defaultSettingsWhenPrimary["TOOLCHAIN_VERSION"] = .plString(version.description) var executableSearchPaths = [ - path.join("usr").join("bin"), + path.join("usr").join("bin") ] for platformExtension in pluginManager.extensions(of: PlatformInfoExtensionPoint.self) { @@ -294,18 +288,21 @@ public final class Toolchain: Hashable, Sendable { executableSearchPaths.append(contentsOf: [ path.join("usr").join("local").join("bin"), - path.join("usr").join("libexec") + path.join("usr").join("libexec"), ]) // Testing library platform names let testingLibrarySearchDir = path.join("usr").join("lib").join("swift") - let testingLibraryPlatformNames: Set = if let platformRegistry, fs.exists(testingLibrarySearchDir) { - Set(try fs.listdir(testingLibrarySearchDir).filter { - platformRegistry.lookup(name: $0) != nil && fs.exists(testingLibrarySearchDir.join($0).join("testing")) - }) - } else { - [] - } + let testingLibraryPlatformNames: Set = + if let platformRegistry, fs.exists(testingLibrarySearchDir) { + Set( + try fs.listdir(testingLibrarySearchDir).filter { + platformRegistry.lookup(name: $0) != nil && fs.exists(testingLibrarySearchDir.join($0).join("testing")) + } + ) + } else { + [] + } // Construct the toolchain self.init(identifier: identifier, displayName: displayName, version: version, aliases: aliases, path: path, frameworkPaths: frameworkSearchPaths, libraryPaths: librarySearchPaths, defaultSettings: defaultSettings, overrideSettings: overrideSettings, defaultSettingsWhenPrimary: defaultSettingsWhenPrimary, executableSearchPaths: executableSearchPaths, testingLibraryPlatformNames: testingLibraryPlatformNames, fs: fs) @@ -315,18 +312,16 @@ public final class Toolchain: Hashable, Sendable { hasher.combine(ObjectIdentifier(self)) } - public static func ==(lhs: Toolchain, rhs: Toolchain) -> Bool { + public static func == (lhs: Toolchain, rhs: Toolchain) -> Bool { return lhs === rhs } @_spi(Testing) public static func deriveDisplayName(identifier: String) -> String { if identifier == ToolchainRegistry.defaultToolchainIdentifier { return "Xcode Default" - } - else if identifier.hasPrefix(ToolchainRegistry.appleToolchainIdentifierPrefix) { + } else if identifier.hasPrefix(ToolchainRegistry.appleToolchainIdentifierPrefix) { return identifier.withoutPrefix(ToolchainRegistry.appleToolchainIdentifierPrefix) - } - else { + } else { return identifier } } @@ -382,7 +377,7 @@ public final class Toolchain: Hashable, Sendable { let groups: [[String]] = Static.deriveVersionPattern.matchGroups(in: identifier) // error: ambiguous use of 'prefix' - let numbers: [UInt] = Array(groups.prefix(3)).map{ $0.first! }.map{ UInt($0) ?? 0 } + let numbers: [UInt] = Array(groups.prefix(3)).map { $0.first! }.map { UInt($0) ?? 0 } return Version(numbers) } @@ -466,8 +461,7 @@ public final class ToolchainRegistry: @unchecked Sendable { do { try await registerToolchainsInDirectory(path, strict: strict, aliases: searchPath.aliases, operatingSystem: hostOperatingSystem, delegate: delegate) - } - catch let err { + } catch let err { delegate.issue(strict: strict, path, "failed to load toolchains in \(path.str): \(err)") } } diff --git a/Sources/SWBCore/WorkspaceContext.swift b/Sources/SWBCore/WorkspaceContext.swift index 5f0e4f6b..b464e57f 100644 --- a/Sources/SWBCore/WorkspaceContext.swift +++ b/Sources/SWBCore/WorkspaceContext.swift @@ -111,9 +111,9 @@ public struct UserPreferences: Sendable { public static var allowsExternalToolExecutionDefaultValue: Bool { #if RC_PLAYGROUNDS - return true + return true #else - return false + return false #endif } @@ -317,7 +317,7 @@ public final class WorkspaceContext: Sendable { return sdkRegistryCache.getValue(self) } private let sdkRegistryCache = LazyCache { (workspaceContext: WorkspaceContext) -> WorkspaceContextSDKRegistry in - let overridingSDKsDir: Path? = workspaceContext.userInfo?.processEnvironment["XCODE_OVERRIDING_SDKS_DIRECTORY"].flatMap{Path($0)} + let overridingSDKsDir: Path? = workspaceContext.userInfo?.processEnvironment["XCODE_OVERRIDING_SDKS_DIRECTORY"].flatMap { Path($0) } return WorkspaceContextSDKRegistry(coreSDKRegistry: workspaceContext.core.sdkRegistry, delegate: workspaceContext.core.registryDelegate, userNamespace: workspaceContext.workspace.userNamespace, overridingSDKsDir: overridingSDKsDir) } @@ -361,8 +361,7 @@ public final class WorkspaceContext: Sendable { if let p = parameters.arena?.derivedDataPath { // If the arena defines a derived data path, then use it. cacheFolderPath = p - } - else { + } else { // Otherwise use the path to the default clang user cache directory. This will mainly used when running xcodebuild without -scheme. // First see if CCHROOT is defined in the environment. If it is, and if it does *not* start with "/Library/Caches/com.apple.Xcode", then we use it. if let CCHROOT = self.userInfo?.processEnvironment["CCHROOT"], !CCHROOT.isEmpty { @@ -386,10 +385,10 @@ public final class WorkspaceContext: Sendable { extension FSProxy { private static var CreatedByBuildSystemAttribute: String { #if os(Linux) || os(Android) - // On Linux, "the name [of an extended attribute] must be a null-terminated string prefixed by a namespace identifier and a dot character" and only the "user" namespace is available for unrestricted access. - "user.org.swift.swift-build.CreatedByBuildSystem" + // On Linux, "the name [of an extended attribute] must be a null-terminated string prefixed by a namespace identifier and a dot character" and only the "user" namespace is available for unrestricted access. + "user.org.swift.swift-build.CreatedByBuildSystem" #else - "com.apple.xcode.CreatedByBuildSystem" + "com.apple.xcode.CreatedByBuildSystem" #endif } @@ -450,4 +449,4 @@ extension MachOInfo: FileSystemInitializable { } } -extension XCFramework: FileSystemInitializable { } +extension XCFramework: FileSystemInitializable {} diff --git a/Sources/SWBCore/XCFramework.swift b/Sources/SWBCore/XCFramework.swift index 1326f5fe..02cd57c0 100644 --- a/Sources/SWBCore/XCFramework.swift +++ b/Sources/SWBCore/XCFramework.swift @@ -293,7 +293,7 @@ public struct XCFramework: Hashable, Sendable { self.libraryIdentifier = libraryIdentifier self.supportedPlatform = supportedPlatform self.supportedArchitectures = supportedArchitectures - self.platformVariant = platformVariant?.nilIfEmpty // remove the property if it is empty + self.platformVariant = platformVariant?.nilIfEmpty // remove the property if it is empty self.libraryPath = libraryPath self.binaryPath = binaryPath self.headersPath = headersPath @@ -482,7 +482,7 @@ extension XCFramework.Library: Hashable { } /// Returns `true` iff the `libraryIdentifiers` are equal. The rest of the library components are not used for equality. - static public func ==(lhs: XCFramework.Library, rhs: XCFramework.Library) -> Bool { + static public func == (lhs: XCFramework.Library, rhs: XCFramework.Library) -> Bool { return lhs.libraryIdentifier == rhs.libraryIdentifier } } @@ -544,8 +544,7 @@ extension XCFramework.Library: Hashable { if platformVariant == "macabi" { try container.encode("maccatalyst", forKey: .platformVariant) - } - else { + } else { try container.encodeIfPresent(platformVariant, forKey: .platformVariant) } @@ -622,8 +621,7 @@ extension XCFramework { let libraries: [XCFramework.Library] do { version = try Version(other.version) - } - catch { + } catch { throw XCFrameworkValidationError.unsupportedVersion(version: other.version) } @@ -672,8 +670,7 @@ extension XCFramework { let subfolder: Path if scope.evaluate(BuiltinMacros.DEPLOYMENT_LOCATION) { subfolder = scope.evaluate(BuiltinMacros.BUILT_PRODUCTS_DIR) - } - else { + } else { subfolder = scope.unmodifiedTargetBuildDir } @@ -693,13 +690,11 @@ extension XCFramework { case internalDistribution var libraryPath: Path? { - if case let .library(path, _, _) = self { return path } - else { return nil } + if case let .library(path, _, _) = self { return path } else { return nil } } var headersPath: Path? { - if case let .library(_, path, _) = self { return path } - else { return nil } + if case let .library(_, path, _) = self { return path } else { return nil } } var debugSymbolPaths: [Path] { @@ -753,7 +748,6 @@ extension XCFramework { } } - // If the '-archive' flag is used, then all -framework/-library usages will be prefixed with their corresponding path into the archive. Also, the '-headers' and '-debug-symbols' will be added pointing into the archive. This function does not handle any of the error handling, but lets the rest of the system deal with duplicate or improper usage. var newCommandLine: [String] = [] @@ -776,8 +770,7 @@ extension XCFramework { newCommandLine.append(archiveRoot.join(root).join(name).str) rewriteDebugSymbolCommandLine(archiveRoot, name, fs, &newCommandLine) - } - else { + } else { newCommandLine.append(entry) } @@ -796,8 +789,7 @@ extension XCFramework { } rewriteDebugSymbolCommandLine(archiveRoot, name, fs, &newCommandLine) - } - else { + } else { newCommandLine.append(entry) } @@ -841,7 +833,7 @@ extension XCFramework { precondition(currentWorkingDirectory.isAbsolute, "path '\(currentWorkingDirectory.str)' is not absolute") // The -archive flag is handled in a very special way; it re-writes the user's entered command line by emitting the corresponding -framework/-library, -headers, -debug-symbols arguments. - let commandLine = rewriteCommandLine(commandLine, cwd: currentWorkingDirectory, fs: fs) + let commandLine = rewriteCommandLine(commandLine, cwd: currentWorkingDirectory, fs: fs) var arguments = [Argument]() var argumentIndex = commandLine.startIndex @@ -869,8 +861,8 @@ extension XCFramework { switch parseState { case .next: switch arg { - case "createXCFramework": parseState = .next // the main command from Swift Build - case "-create-xcframework": parseState = .next // passed through via xcodebuild's parameter passing splat + case "createXCFramework": parseState = .next // the main command from Swift Build + case "-create-xcframework": parseState = .next // passed through via xcodebuild's parameter passing splat case "-framework": parseState = .framework case "-library": parseState = .library case "-headers": parseState = .libraryHeader @@ -884,8 +876,7 @@ extension XCFramework { // When running via `xcodebuild`, there are additional arguments passed that we want to safely ignore. if arg.hasPrefix("-DVT") || arg.hasPrefix("-ExtraPlugInFolders") { parseState = .next - } - else { + } else { return .failure(XCFrameworkCreationError(message: "error: invalid argument '\(arg)'.")) } } @@ -932,7 +923,7 @@ extension XCFramework { arguments.append(.output(path: normalize(path: arg, cwd: currentWorkingDirectory))) parseState = .next - case .end: break // do nothing + case .end: break // do nothing } // Time to grab the next index. @@ -941,8 +932,8 @@ extension XCFramework { let (frameworkCount, libraryCount, outputCount) = arguments.reduce((0, 0, 0)) { (acc, arg) in switch arg { - case .framework(_,_): return (acc.0 + 1, acc.1, acc.2) - case .library(_,_,_): return (acc.0, acc.1 + 1, acc.2) + case .framework(_, _): return (acc.0 + 1, acc.1, acc.2) + case .library(_, _, _): return (acc.0, acc.1 + 1, acc.2) case .output(_): return (acc.0, acc.1, acc.2 + 1) case .internalDistribution: return acc } @@ -1030,8 +1021,7 @@ extension XCFramework { mergeableMetadataArchs.insert(slice.arch) } } - } - catch { + } catch { return .failure(XCFrameworkCreationError(message: "error: unable to determine mergeability of the binary at '\(binaryPath.str)': \(error)")) } if !mergeableMetadataArchs.isEmpty, Set(supportedArchs) != mergeableMetadataArchs { @@ -1087,8 +1077,8 @@ extension XCFramework { let fs = localFS // Utility function for constructing the array of `XCFramework.Library` based on the command line arguments. A mapping of the path and the resulting library is returned upon success. - func xcframeworkLibraries(from arguments: [Argument]) -> Result<[LibraryPathsKey:XCFramework.Library], XCFrameworkCreationError> { - var libraryMap = [LibraryPathsKey:XCFramework.Library]() + func xcframeworkLibraries(from arguments: [Argument]) -> Result<[LibraryPathsKey: XCFramework.Library], XCFrameworkCreationError> { + var libraryMap = [LibraryPathsKey: XCFramework.Library]() do { for arg in arguments { @@ -1124,8 +1114,7 @@ extension XCFramework { default: continue } } - } - catch { + } catch { return .failure(XCFrameworkCreationError(message: error.localizedDescription)) } @@ -1147,10 +1136,12 @@ extension XCFramework { let xcframework = try XCFramework(libraries: libraryMap.values.map { $0 }) - guard let outputPath = parsedCommandLineArgs.filter({ - if case .output(_) = $0 { return true } - return false - }).first?.outputPath else { + guard + let outputPath = parsedCommandLineArgs.filter({ + if case .output(_) = $0 { return true } + return false + }).first?.outputPath + else { // this is a fatalError() as `parseCommandLine` should have already handled this error. fatalError("no output path found.") } @@ -1209,8 +1200,7 @@ extension XCFramework { } return (true, "xcframework successfully written out to: \(outputPath.str)\n") - } - catch { + } catch { return (false, "\(error.localizedDescription)\n") } } @@ -1320,7 +1310,7 @@ extension XCFramework { guard fs.exists(copyHeadersFromPath) else { throw XCFrameworkValidationError.missingPathEntry(xcframeworkPath: xcframeworkPath, libraryIdentifier: library.libraryIdentifier, plistKey: XCFrameworkInfoPlist_V1.Library.CodingKeys.headersPath.stringValue, plistValue: library.headersPath?.str ?? "") } - let copyHeadersToPath = copyLibraryToPath.join(Path("include")) // this is the path that is added by default from the compile process. + let copyHeadersToPath = copyLibraryToPath.join(Path("include")) // this is the path that is added by default from the compile process. // Ensure the headers path actually exists on disk. if !dryRun { diff --git a/Sources/SWBGenericUnixPlatform/Plugin.swift b/Sources/SWBGenericUnixPlatform/Plugin.swift index 5e103d24..8491969c 100644 --- a/Sources/SWBGenericUnixPlatform/Plugin.swift +++ b/Sources/SWBGenericUnixPlatform/Plugin.swift @@ -27,7 +27,7 @@ final class GenericUnixPlugin: Sendable { func swiftExecutablePath(fs: any FSProxy) -> Path? { [ Environment.current["SWIFT_EXEC"].map(Path.init), - StackedSearchPath(environment: .current, fs: fs).lookup(Path("swift")) + StackedSearchPath(environment: .current, fs: fs).lookup(Path("swift")), ].compactMap { $0 }.first(where: fs.exists) } @@ -81,15 +81,18 @@ struct GenericUnixPlatformInfoExtension: PlatformInfoExtension { guard operatingSystem.createFallbackSystemToolchain || operatingSystem != context.hostOperatingSystem else { return nil } - return try (.root, [ - "Type": .plString("Platform"), - "Name": .plString(operatingSystem.xcodePlatformName), - "Identifier": .plString(operatingSystem.xcodePlatformName), - "Description": .plString(operatingSystem.xcodePlatformName), - "FamilyName": .plString(operatingSystem.xcodePlatformName.capitalized), - "FamilyIdentifier": .plString(operatingSystem.xcodePlatformName), - "IsDeploymentPlatform": .plString("YES"), - ]) + return try ( + .root, + [ + "Type": .plString("Platform"), + "Name": .plString(operatingSystem.xcodePlatformName), + "Identifier": .plString(operatingSystem.xcodePlatformName), + "Description": .plString(operatingSystem.xcodePlatformName), + "FamilyName": .plString(operatingSystem.xcodePlatformName.capitalized), + "FamilyIdentifier": .plString(operatingSystem.xcodePlatformName), + "IsDeploymentPlatform": .plString("YES"), + ] + ) } } } @@ -195,11 +198,16 @@ struct GenericUnixSDKRegistryExtension: SDKRegistryExtension { sysroot = swiftSDK.path architectures = try swiftSDK.targetTriples.keys.map { try LLVMTriple($0).arch }.sorted() tripleVersion = try Set(swiftSDK.targetTriples.keys.compactMap { try LLVMTriple($0).systemVersion }).only?.description - customProperties = try Dictionary(uniqueKeysWithValues: swiftSDK.targetTriples.map { targetTriple in - try ("__SYSROOT_\(LLVMTriple(targetTriple.key).arch)", .plString(swiftSDK.path.join(targetTriple.value.sdkRootPath).str)) - }).merging([ - "SYSROOT": "$(__SYSROOT_$(CURRENT_ARCH))", - ], uniquingKeysWith: { _, new in new }) + customProperties = try Dictionary( + uniqueKeysWithValues: swiftSDK.targetTriples.map { targetTriple in + try ("__SYSROOT_\(LLVMTriple(targetTriple.key).arch)", .plString(swiftSDK.path.join(targetTriple.value.sdkRootPath).str)) + } + ).merging( + [ + "SYSROOT": "$(__SYSROOT_$(CURRENT_ARCH))" + ], + uniquingKeysWith: { _, new in new } + ) } catch { // FIXME: Handle errors? return nil @@ -221,7 +229,7 @@ struct GenericUnixSDKRegistryExtension: SDKRegistryExtension { } else if let tripleVersion { realTripleVersion = tripleVersion } else { - return nil // couldn't compute triple version for FreeBSD + return nil // couldn't compute triple version for FreeBSD } deploymentTargetSettings = [ "DeploymentTargetSettingName": .plString("FREEBSD_DEPLOYMENT_TARGET"), @@ -233,24 +241,31 @@ struct GenericUnixSDKRegistryExtension: SDKRegistryExtension { deploymentTargetSettings = [:] } - return try (sysroot, platform, [ - "Type": .plString("SDK"), - "Version": .plString(Version(ProcessInfo.processInfo.operatingSystemVersion).zeroTrimmed.description), - "CanonicalName": .plString(operatingSystem.xcodePlatformName), - "IsBaseSDK": .plBool(true), - "DefaultProperties": .plDict([ - "PLATFORM_NAME": .plString(operatingSystem.xcodePlatformName), - ].merging(defaultProperties, uniquingKeysWith: { _, new in new })), - "CustomProperties": .plDict(customProperties), - "SupportedTargets": .plDict([ - operatingSystem.xcodePlatformName: .plDict([ - "Archs": .plArray(architectures.map { .plString($0) }), - "LLVMTargetTripleEnvironment": .plString(tripleEnvironment), - "LLVMTargetTripleSys": .plString(operatingSystem.xcodePlatformName), - "LLVMTargetTripleVendor": .plString("unknown"), - ].merging(deploymentTargetSettings, uniquingKeysWith: { _, new in new })) - ]), - ]) + return try ( + sysroot, platform, + [ + "Type": .plString("SDK"), + "Version": .plString(Version(ProcessInfo.processInfo.operatingSystemVersion).zeroTrimmed.description), + "CanonicalName": .plString(operatingSystem.xcodePlatformName), + "IsBaseSDK": .plBool(true), + "DefaultProperties": .plDict( + [ + "PLATFORM_NAME": .plString(operatingSystem.xcodePlatformName) + ].merging(defaultProperties, uniquingKeysWith: { _, new in new }) + ), + "CustomProperties": .plDict(customProperties), + "SupportedTargets": .plDict([ + operatingSystem.xcodePlatformName: .plDict( + [ + "Archs": .plArray(architectures.map { .plString($0) }), + "LLVMTargetTripleEnvironment": .plString(tripleEnvironment), + "LLVMTargetTripleSys": .plString(operatingSystem.xcodePlatformName), + "LLVMTargetTripleVendor": .plString("unknown"), + ].merging(deploymentTargetSettings, uniquingKeysWith: { _, new in new }) + ) + ]), + ] + ) }.compactMap { $0 } } } @@ -295,7 +310,8 @@ struct GenericUnixToolchainRegistryExtension: ToolchainRegistryExtension { defaultSettingsWhenPrimary: [:], executableSearchPaths: realSwiftPath.dirname.relativeSubpath(from: path).map { [path.join($0).join("bin")] } ?? [], testingLibraryPlatformNames: [], - fs: fs) + fs: fs + ) ] } } diff --git a/Sources/SWBLLBuild/LowLevelBuildSystem.swift b/Sources/SWBLLBuild/LowLevelBuildSystem.swift index 42487c84..8fb65182 100644 --- a/Sources/SWBLLBuild/LowLevelBuildSystem.swift +++ b/Sources/SWBLLBuild/LowLevelBuildSystem.swift @@ -17,7 +17,7 @@ public import SWBLibc @_exported public import llbuild #if !LLBUILD_FRAMEWORK -@_exported public import llbuildSwift + @_exported public import llbuildSwift #endif // Filesystem adaptors for SWBLLBuild.FileSystem. @@ -36,15 +36,15 @@ extension SWBUtil.FileInfo: SWBLLBuild.FileInfo { statBuf.st_mode = numericCast(self.permissions) statBuf.st_size = numericCast(self.size) #if canImport(Darwin) - statBuf.st_mtimespec.tv_sec = numericCast(self.modificationTimestamp) - statBuf.st_mtimespec.tv_nsec = self.modificationNanoseconds + statBuf.st_mtimespec.tv_sec = numericCast(self.modificationTimestamp) + statBuf.st_mtimespec.tv_nsec = self.modificationNanoseconds #elseif os(Windows) - statBuf.st_mtime = self.modificationTimestamp + statBuf.st_mtime = self.modificationTimestamp #elseif canImport(Glibc) || canImport(Musl) || canImport(Android) - statBuf.st_mtim.tv_sec = numericCast(self.modificationTimestamp) - statBuf.st_mtim.tv_nsec = self.modificationNanoseconds + statBuf.st_mtim.tv_sec = numericCast(self.modificationTimestamp) + statBuf.st_mtim.tv_nsec = self.modificationNanoseconds #else - #error("Not implemented for this platform") + #error("Not implemented for this platform") #endif return statBuf } @@ -132,9 +132,9 @@ public enum BuildValueKind: UInt32 { extension llbuild_pid_t { public static var invalid: Self { #if os(Windows) - INVALID_HANDLE_VALUE + INVALID_HANDLE_VALUE #else - -1 + -1 #endif } } @@ -142,9 +142,9 @@ extension llbuild_pid_t { extension llbuild_pid_t { public var pid: pid_t { #if os(Windows) - return Int32(GetProcessId(self)) + return Int32(GetProcessId(self)) #else - return self + return self #endif } } diff --git a/Sources/SWBLibc/libc.swift b/Sources/SWBLibc/libc.swift index 6f980ee6..1323d31d 100644 --- a/Sources/SWBLibc/libc.swift +++ b/Sources/SWBLibc/libc.swift @@ -11,19 +11,19 @@ //===----------------------------------------------------------------------===// #if canImport(Darwin) -@_exported import Darwin.C + @_exported import Darwin.C #elseif os(Windows) -@_exported import ucrt -@_exported import CRT -@_exported import WinSDK + @_exported import ucrt + @_exported import CRT + @_exported import WinSDK #elseif canImport(Glibc) -@_exported import Glibc + @_exported import Glibc #elseif canImport(Musl) -@_exported import Musl + @_exported import Musl #elseif canImport(Android) -@_exported import Android + @_exported import Android #else -#error("Missing libc or equivalent") + #error("Missing libc or equivalent") #endif @_exported import SWBCLibc diff --git a/Sources/SWBMacro/MacroCondition.swift b/Sources/SWBMacro/MacroCondition.swift index 9d1d8bb7..d2715567 100644 --- a/Sources/SWBMacro/MacroCondition.swift +++ b/Sources/SWBMacro/MacroCondition.swift @@ -34,7 +34,7 @@ public final class MacroCondition: Serializable, Hashable, CustomStringConvertib hasher.combine(valuePattern) } - public static func ==(lhs: MacroCondition, rhs: MacroCondition) -> Bool { + public static func == (lhs: MacroCondition, rhs: MacroCondition) -> Bool { return lhs.parameter == rhs.parameter && lhs.valuePattern == rhs.valuePattern } @@ -100,8 +100,7 @@ public final class MacroCondition: Serializable, Hashable, CustomStringConvertib let parmName: String = try deserializer.deserialize() if let aParm = delegate.namespace.lookupConditionParameter(parmName) { self.parameter = aParm - } - else { + } else { self.parameter = delegate.namespace.declareConditionParameter(parmName) } diff --git a/Sources/SWBMacro/MacroConditionExpression.swift b/Sources/SWBMacro/MacroConditionExpression.swift index 38ed351a..0a73ffea 100644 --- a/Sources/SWBMacro/MacroConditionExpression.swift +++ b/Sources/SWBMacro/MacroConditionExpression.swift @@ -14,21 +14,17 @@ import SWBUtil // MARK: MacroConditionExpression classes - /// A parsed condition which can be evaluated in the context of a scope to return a boolean or string result. This is used in build options to define conditions under which the option should contribute arguments to a command line. -public class MacroConditionExpression: CustomStringConvertible, @unchecked Sendable -{ +public class MacroConditionExpression: CustomStringConvertible, @unchecked Sendable { /// Parse a ``MacroConditionExpression`` object from ``string``. /// /// - parameter string: The string from which the expression will be parsed. /// - parameter macroNamespace: If passed, this will be used to parse any strings which might contain build settings. If not passed, then the ``BuiltinMacros`` namespace will be used. /// - parameter diagnosticsHandler: If any errors are encountered during parsing, an error string will be passed to this block. Defaults to an empty block. /// - returns: The parsed expression, or nil if any errors were encountered. - public class func fromString(_ string: String, macroNamespace: MacroNamespace, diagnosticsHandler: @escaping (((String) -> Void)) = {_ in }) -> MacroConditionExpression? - { + public class func fromString(_ string: String, macroNamespace: MacroNamespace, diagnosticsHandler: @escaping (((String) -> Void)) = { _ in }) -> MacroConditionExpression? { // If the string is empty, then so is the expression. - if(string.isEmpty) - { + if (string.isEmpty) { return MacroConditionStringConstantExpression(macroNamespace.parseString(string)) } @@ -42,8 +38,7 @@ public class MacroConditionExpression: CustomStringConvertible, @unchecked Senda error = diagnostic diagnosticsHandler(diagnostic) } - if parser.nextToken.type != .eof && error == nil - { + if parser.nextToken.type != .eof && error == nil { // No error was detected but we still have more, unparsed tokens at the end of the string this early termination indicates an error. error = "expected operator or end-of-string, but found '\(parser.scanner.stringForToken(parser.nextToken))' at offset \(parser.nextToken.offset)" diagnosticsHandler(error!) @@ -55,53 +50,45 @@ public class MacroConditionExpression: CustomStringConvertible, @unchecked Senda } /// Evaluate the receiver as a string. - public func evaluateAsString(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> String - { + public func evaluateAsString(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> String { fatalError("This method is a subclass responsibility.") } /// Evaluate the receiver as a boolean. - public func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool - { + public func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool { fatalError("This method is a subclass responsibility.") } /** - Parse an expression, which has the following format: - - expression: - ternary-conditional-expression - ; - */ - private class func parseExpression(_ parser: ParserState, _ diagnosticsHandler: @escaping ((String) -> Void)) -> MacroConditionExpression? - { + * Parse an expression, which has the following format: + * + * expression: + * ternary-conditional-expression + * ; + **/ + private class func parseExpression(_ parser: ParserState, _ diagnosticsHandler: @escaping ((String) -> Void)) -> MacroConditionExpression? { return parseTernaryConditionalExpression(parser, diagnosticsHandler) } /** - Parse a ternary conditional expression, which has the following format: - - ternary-conditional-expression: - logical-OR-expression - | logical-OR-expression '?' expression ':' ternary-conditional-expression - ; - */ - private class func parseTernaryConditionalExpression(_ parser: ParserState, _ diagnosticsHandler: @escaping ((String) -> Void)) -> MacroConditionExpression? - { + * Parse a ternary conditional expression, which has the following format: + * + * ternary-conditional-expression: + * logical-OR-expression + * | logical-OR-expression '?' expression ':' ternary-conditional-expression + * ; + **/ + private class func parseTernaryConditionalExpression(_ parser: ParserState, _ diagnosticsHandler: @escaping ((String) -> Void)) -> MacroConditionExpression? { var expr = parseLogicalORExpression(parser, diagnosticsHandler) - if parser.nextToken.type == .questionMark - { + if parser.nextToken.type == .questionMark { // logical-OR-expression '?' expression ':' ternary-conditional-expression parser.step() let thenExpr = (parser.nextToken.type == .colon) ? expr : parseExpression(parser, diagnosticsHandler) - if parser.nextToken.type == .colon - { + if parser.nextToken.type == .colon { parser.step() let elseExpr = parseExpression(parser, diagnosticsHandler) expr = MacroConditionTernaryConditionalExpression(condExpr: expr, thenExpr: thenExpr, elseExpr: elseExpr) - } - else - { + } else { diagnosticsHandler("expected ':' but found '\(parser.scanner.stringForToken(parser.nextToken))' at offset \(parser.nextToken.offset)") } } @@ -109,23 +96,20 @@ public class MacroConditionExpression: CustomStringConvertible, @unchecked Senda } /** - Parse a logical-OR expression, which has the following format: - - logical-OR-expression: - logical-AND-expression - | logical-OR-expression '||' logical-AND-expression - ; - - '``or``' is a synonym for '``||``'. - */ - private class func parseLogicalORExpression(_ parser: ParserState, _ diagnosticsHandler: @escaping ((String) -> Void)) -> MacroConditionExpression? - { + * Parse a logical-OR expression, which has the following format: + * + * logical-OR-expression: + * logical-AND-expression + * | logical-OR-expression '||' logical-AND-expression + * ; + * + * '``or``' is a synonym for '``||``'. + **/ + private class func parseLogicalORExpression(_ parser: ParserState, _ diagnosticsHandler: @escaping ((String) -> Void)) -> MacroConditionExpression? { var expr = parseLogicalANDExpression(parser, diagnosticsHandler) - if parser.nextToken.type == .or - { + if parser.nextToken.type == .or { // logical-AND-expression '&&' equality-expression - repeat - { + repeat { parser.step() expr = MacroConditionLogicalORExpression(leftExpr: expr, rightExpr: parseLogicalANDExpression(parser, diagnosticsHandler)) } while parser.nextToken.type == .or @@ -134,23 +118,20 @@ public class MacroConditionExpression: CustomStringConvertible, @unchecked Senda } /** - Parse a logical-AND expression, which has the following format: - - logical-AND-expression: - equality-expression - | logical-AND-expression '&&' equality-expression - ; - - '``and``' is a synonym for '``&&``'. - */ - private class func parseLogicalANDExpression(_ parser: ParserState, _ diagnosticsHandler: @escaping ((String) -> Void)) -> MacroConditionExpression? - { + * Parse a logical-AND expression, which has the following format: + * + * logical-AND-expression: + * equality-expression + * | logical-AND-expression '&&' equality-expression + * ; + * + * '``and``' is a synonym for '``&&``'. + **/ + private class func parseLogicalANDExpression(_ parser: ParserState, _ diagnosticsHandler: @escaping ((String) -> Void)) -> MacroConditionExpression? { var expr = parseEqualityExpression(parser, diagnosticsHandler) - if parser.nextToken.type == .and - { - // logical-AND-expression '&&' equality-expression - repeat - { + if parser.nextToken.type == .and { + // logical-AND-expression '&&' equality-expression + repeat { parser.step() expr = MacroConditionLogicalANDExpression(leftExpr: expr, rightExpr: parseEqualityExpression(parser, diagnosticsHandler)) } while parser.nextToken.type == .and @@ -159,27 +140,24 @@ public class MacroConditionExpression: CustomStringConvertible, @unchecked Senda } /** - Parse an equality expression, which has the following format: - - equality-expression: - relational-expression - | equality-expression '==' relational-expression - | equality-expression '!=' relational-expression - | equality-expression 'contains' relational-expression - ; - - '``is``' is a synonym for '``==``', and '``isnot``' is a synonym for '``!=``'. - */ - private class func parseEqualityExpression(_ parser: ParserState, _ diagnosticsHandler: @escaping ((String) -> Void)) -> MacroConditionExpression? - { + * Parse an equality expression, which has the following format: + * + * equality-expression: + * relational-expression + * | equality-expression '==' relational-expression + * | equality-expression '!=' relational-expression + * | equality-expression 'contains' relational-expression + * ; + * + * '``is``' is a synonym for '``==``', and '``isnot``' is a synonym for '``!=``'. + **/ + private class func parseEqualityExpression(_ parser: ParserState, _ diagnosticsHandler: @escaping ((String) -> Void)) -> MacroConditionExpression? { var expr = parseRelationalExpression(parser, diagnosticsHandler) - if parser.nextToken.type == .equals || parser.nextToken.type == .notEquals || parser.nextToken.type == .contains - { + if parser.nextToken.type == .equals || parser.nextToken.type == .notEquals || parser.nextToken.type == .contains { // equality-expression '==' relational-expression // | equality-expression '!=' relational-expression // | equality-expression 'CONTAINS' relational-expression - repeat - { + repeat { let tokenType = parser.nextToken.type parser.step() switch tokenType @@ -194,26 +172,25 @@ public class MacroConditionExpression: CustomStringConvertible, @unchecked Senda // Kind of an odd default, but it reproduces the original behavior. expr = MacroConditionInequalityExpression(leftExpr: expr, rightExpr: parseRelationalExpression(parser, diagnosticsHandler)) } - } while parser.nextToken.type == .equals || parser.nextToken.type == .notEquals || parser.nextToken.type == .contains + } while parser.nextToken.type == .equals || parser.nextToken.type == .notEquals || parser.nextToken.type == .contains } return expr } /** - Parse a relational expression, which has the following format: - - relational-expression: - unary-expression - | relational-expression '<' unary-expression - | relational-expression '>' unary-expression - | relational-expression '<=' unary-expression - | relational-expression '>=' unary-expression - ; - - At present relational comparisons are not supported, so this method just calls through to ``parseUnaryExpression()``. - */ - private class func parseRelationalExpression(_ parser: ParserState, _ diagnosticsHandler: @escaping ((String) -> Void)) -> MacroConditionExpression? - { + * Parse a relational expression, which has the following format: + * + * relational-expression: + * unary-expression + * | relational-expression '<' unary-expression + * | relational-expression '>' unary-expression + * | relational-expression '<=' unary-expression + * | relational-expression '>=' unary-expression + * ; + * + * At present relational comparisons are not supported, so this method just calls through to ``parseUnaryExpression()``. + **/ + private class func parseRelationalExpression(_ parser: ParserState, _ diagnosticsHandler: @escaping ((String) -> Void)) -> MacroConditionExpression? { let expr = parseUnaryExpression(parser, diagnosticsHandler) // TODO: Relational expressions were not implemented in the native build system support for macro condition expressions, so they're not implemented here yet either. @@ -222,77 +199,65 @@ public class MacroConditionExpression: CustomStringConvertible, @unchecked Senda } /** - Parse a unary expression, which has the following format: - - unary-expression: - primary-expression - | 'not' primary-expression - ; - */ - private class func parseUnaryExpression(_ parser: ParserState, _ diagnosticsHandler: @escaping ((String) -> Void)) -> MacroConditionExpression? - { - if parser.nextToken.type == .not - { + * Parse a unary expression, which has the following format: + * + * unary-expression: + * primary-expression + * | 'not' primary-expression + * ; + **/ + private class func parseUnaryExpression(_ parser: ParserState, _ diagnosticsHandler: @escaping ((String) -> Void)) -> MacroConditionExpression? { + if parser.nextToken.type == .not { // 'not' primary-expression parser.step() let expr = parsePrimaryExpression(parser, diagnosticsHandler) return expr != nil ? MacroConditionLogicalNOTExpression(expr!) : nil - } - else - { + } else { // primary-expression return parsePrimaryExpression(parser, diagnosticsHandler) } } /** - Parse a primary expression, which has the following format: - - primary-expression: - constant - | '(' expression ')' - ; - - Note that a quirk in the scanner (carried over from the native build system) requires there to be a space between the last token in ``expression`` and the closing paren ``)``, or else the closing paren will be scanned as part of that last token. - */ - private class func parsePrimaryExpression(_ parser: ParserState, _ diagnosticsHandler: @escaping ((String) -> Void)) -> MacroConditionExpression? - { + * Parse a primary expression, which has the following format: + * + * primary-expression: + * constant + * | '(' expression ')' + * ; + * + * Note that a quirk in the scanner (carried over from the native build system) requires there to be a space between the last token in ``expression`` and the closing paren ``)``, or else the closing paren will be scanned as part of that last token. + **/ + private class func parsePrimaryExpression(_ parser: ParserState, _ diagnosticsHandler: @escaping ((String) -> Void)) -> MacroConditionExpression? { var expr: MacroConditionExpression? = nil switch parser.nextToken.type { case .leftParen: // '(' expression ')' - parser.step() // Skip the '(' + parser.step() // Skip the '(' let e = parseExpression(parser, diagnosticsHandler) - if parser.nextToken.type == .rightParen - { + if parser.nextToken.type == .rightParen { // Found a ')', so the enclosed expression is our expression. expr = e - parser.step() // Skip the ')' - } - else - { + parser.step() // Skip the ')' + } else { diagnosticsHandler("expected ')' but found '\(parser.scanner.stringForToken(parser.nextToken))' at offset \(parser.nextToken.offset)") } default: // constant (we treat every token other than '(' and EOF as a string constant) let nextToken = parser.nextToken - if nextToken.type != .eof - { + if nextToken.type != .eof { // The token string might contain backslashes, which have already served their purpose in not ending quoted strings etc. But we need to strip them out now. var unescapedChars = [UInt8]() var i = 0 - while i < nextToken.length - { - var ch = parser.scanner.bytes[nextToken.offset+i] - if ch == 92 /* '\\' */ && i < nextToken.length-1 - { + while i < nextToken.length { + var ch = parser.scanner.bytes[nextToken.offset + i] + if ch == 92 /* '\\' */ && i < nextToken.length - 1 { i += 1 - ch = parser.scanner.bytes[nextToken.offset+i] + ch = parser.scanner.bytes[nextToken.offset + i] } - if i < nextToken.length - { + if i < nextToken.length { unescapedChars.append(ch) } i += 1 @@ -303,15 +268,13 @@ public class MacroConditionExpression: CustomStringConvertible, @unchecked Senda let unescapedString = String(decoding: unescapedChars, as: UTF8.self) let parsedString = parser.macroNamespace.parseString(unescapedString) { diagnostic in // The parse failed - if diagnostic.level == MacroExpressionDiagnostic.Level.error - { + if diagnostic.level == MacroExpressionDiagnostic.Level.error { parseSucceeded = false diagnosticsHandler("Unable to parse string: \(unescapedString)") } } // If the parse succeeded, then set the expression to return. - if parseSucceeded - { + if parseSucceeded { expr = MacroConditionStringConstantExpression(parsedString) } @@ -322,280 +285,226 @@ public class MacroConditionExpression: CustomStringConvertible, @unchecked Senda return expr } - public var description: String - { + public var description: String { return "[\(type(of: self))]" } } - // MARK: Concrete subclasses - /// Abstract base class for expressions that whose natural return type is a string. These can still be converted to booleans using evaluateAsBoolean() -private class MacroConditionStringExpression: MacroConditionExpression, @unchecked Sendable -{ - override func evaluateAsString(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> String - { +private class MacroConditionStringExpression: MacroConditionExpression, @unchecked Sendable { + override func evaluateAsString(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> String { fatalError("This method is a subclass responsibility.") } - override func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool - { + override func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool { return evaluateAsString(scope, lookup: lookup).boolValue } } /// A constant string condition expression. -private final class MacroConditionStringConstantExpression: MacroConditionStringExpression, @unchecked Sendable -{ +private final class MacroConditionStringConstantExpression: MacroConditionStringExpression, @unchecked Sendable { let macroExpr: MacroStringExpression - init(_ macroExpr: MacroStringExpression) - { + init(_ macroExpr: MacroStringExpression) { self.macroExpr = macroExpr } - override func evaluateAsString(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> String - { + override func evaluateAsString(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> String { return scope.evaluate(macroExpr, lookup: lookup) } - override var description: String - { + override var description: String { return "'\(macroExpr.stringRep)'" } } /// Abstract base class for expressions that whose natural return type is a boolean. These can still be converted to strings using evaluateAsString(). -private class MacroConditionBooleanExpression: MacroConditionExpression, @unchecked Sendable -{ - override func evaluateAsString(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> String - { +private class MacroConditionBooleanExpression: MacroConditionExpression, @unchecked Sendable { + override func evaluateAsString(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> String { return evaluateAsBoolean(scope, lookup: lookup) ? "YES" : "NO" } - override func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool - { + override func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool { fatalError("This method is a subclass responsibility.") } } // True and False constant expressions are not presently used. @available(*, unavailable) -private final class MacroConditionTrueConstantExpression: MacroConditionBooleanExpression, @unchecked Sendable -{ - override func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool - { +private final class MacroConditionTrueConstantExpression: MacroConditionBooleanExpression, @unchecked Sendable { + override func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool { return true } - override var description: String - { + override var description: String { return "YES" } } // True and False constant expressions are not presently used. @available(*, unavailable) -private final class MacroConditionFalseConstantExpression: MacroConditionBooleanExpression, @unchecked Sendable -{ - override func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool - { +private final class MacroConditionFalseConstantExpression: MacroConditionBooleanExpression, @unchecked Sendable { + override func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool { return false } - override var description: String - { + override var description: String { return "NO" } } /// Abstract base class for boolean expressions that operate on a single operand (either boolean or string). -private class MacroConditionUnaryBooleanExpression: MacroConditionExpression, @unchecked Sendable -{ +private class MacroConditionUnaryBooleanExpression: MacroConditionExpression, @unchecked Sendable { let expr: MacroConditionExpression? - init(_ expr: MacroConditionExpression?) - { + init(_ expr: MacroConditionExpression?) { self.expr = expr } - override var description: String - { + override var description: String { return "(\(type(of: self)) \(String(describing: expr)) )" } } -private final class MacroConditionLogicalNOTExpression: MacroConditionUnaryBooleanExpression, @unchecked Sendable -{ - override func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool - { +private final class MacroConditionLogicalNOTExpression: MacroConditionUnaryBooleanExpression, @unchecked Sendable { + override func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool { let value = (expr != nil ? expr!.evaluateAsBoolean(scope, lookup: lookup) : false) return value == false } - override var description: String - { + override var description: String { return "(NOT \(String(describing: expr)) )" } } /// Abstract base class for boolean expressions that operate on two operands (either booleans or strings). -private class MacroConditionBinaryBooleanExpression: MacroConditionBooleanExpression, @unchecked Sendable -{ +private class MacroConditionBinaryBooleanExpression: MacroConditionBooleanExpression, @unchecked Sendable { let leftExpr: MacroConditionExpression? let rightExpr: MacroConditionExpression? - init(leftExpr: MacroConditionExpression?, rightExpr: MacroConditionExpression?) - { + init(leftExpr: MacroConditionExpression?, rightExpr: MacroConditionExpression?) { self.leftExpr = leftExpr self.rightExpr = rightExpr } - override var description: String - { + override var description: String { return "(\(type(of: self)) \(String(describing: leftExpr)) \(String(describing: rightExpr)))" } } -private final class MacroConditionEqualityExpression: MacroConditionBinaryBooleanExpression, @unchecked Sendable -{ - override func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool - { +private final class MacroConditionEqualityExpression: MacroConditionBinaryBooleanExpression, @unchecked Sendable { + override func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool { // We don't know if the left and right expressions are booleans or strings (in fact, each could be different), but string comparison encompasses everything boolean comparison does and also lets us do things like "$(X) == 'YES'", so we go with that here. Any subexpression that is boolean will be converted to a string for the purposes of the comparison. let leftString = leftExpr != nil ? leftExpr!.evaluateAsString(scope, lookup: lookup) : "" let rightString = rightExpr != nil ? rightExpr!.evaluateAsString(scope, lookup: lookup) : "" return (leftString == rightString) } - override var description: String - { + override var description: String { return "(\(String(describing: leftExpr)) EQ \(String(describing: rightExpr)) )" } } -private final class MacroConditionInequalityExpression: MacroConditionBinaryBooleanExpression, @unchecked Sendable -{ - override func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool - { +private final class MacroConditionInequalityExpression: MacroConditionBinaryBooleanExpression, @unchecked Sendable { + override func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool { // We don't know if the left and right expressions are booleans or strings (in fact, each could be different), but string comparison encompasses everything boolean comparison does and also lets us do things like "$(X) == 'YES'", so we go with that here. Any subexpression that is boolean will be converted to a string for the purposes of the comparison. let leftString = leftExpr != nil ? leftExpr!.evaluateAsString(scope, lookup: lookup) : "" let rightString = rightExpr != nil ? rightExpr!.evaluateAsString(scope, lookup: lookup) : "" return (leftString != rightString) } - override var description: String - { + override var description: String { return "(\(String(describing: leftExpr)) NEQ \(String(describing: rightExpr)) )" } } -private final class MacroConditionLogicalANDExpression: MacroConditionBinaryBooleanExpression, @unchecked Sendable -{ - override func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool - { +private final class MacroConditionLogicalANDExpression: MacroConditionBinaryBooleanExpression, @unchecked Sendable { + override func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool { // We implicitly treat any subexpressions that are actually strings as boolean evaluations, e.g. "'NO' ^ ( 'X' == 'X')" evaluates to false ('NO' is treated as boolean false). let leftBoolValue = leftExpr != nil ? leftExpr!.evaluateAsBoolean(scope, lookup: lookup) : false let rightBoolValue = rightExpr != nil ? rightExpr!.evaluateAsBoolean(scope, lookup: lookup) : false return leftBoolValue && rightBoolValue } - override var description: String - { + override var description: String { return "(\(String(describing: leftExpr)) AND \(String(describing: rightExpr)) )" } } -private final class MacroConditionLogicalORExpression: MacroConditionBinaryBooleanExpression, @unchecked Sendable -{ - override func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool - { +private final class MacroConditionLogicalORExpression: MacroConditionBinaryBooleanExpression, @unchecked Sendable { + override func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool { // We implicitly treat any subexpressions that are actually strings as boolean evaluations, e.g. "'YES' || ( 'X' == 'X')" evaluates to true ('YES' is treated as boolean true). let leftBoolValue = leftExpr != nil ? leftExpr!.evaluateAsBoolean(scope, lookup: lookup) : false let rightBoolValue = rightExpr != nil ? rightExpr!.evaluateAsBoolean(scope, lookup: lookup) : false return leftBoolValue || rightBoolValue } - override var description: String - { + override var description: String { return "(\(String(describing: leftExpr)) OR \(String(describing: rightExpr)) )" } } // XOR is not presently used. @available(*, unavailable) -private final class MacroConditionLogicalXORExpression: MacroConditionBinaryBooleanExpression, @unchecked Sendable -{ - override func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool - { +private final class MacroConditionLogicalXORExpression: MacroConditionBinaryBooleanExpression, @unchecked Sendable { + override func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool { // We implicitly treat any subexpressions that are actually strings as boolean evaluations, e.g. "'NO' ^ ( 'X' == 'X')" evaluates to false ('NO' is treated as boolean false). let leftBoolValue = leftExpr != nil ? leftExpr!.evaluateAsBoolean(scope, lookup: lookup) : false let rightBoolValue = rightExpr != nil ? rightExpr!.evaluateAsBoolean(scope, lookup: lookup) : false return leftBoolValue == rightBoolValue } - override var description: String - { + override var description: String { return "(\(String(describing: leftExpr)) XOR \(String(describing: rightExpr)) )" } } -private final class MacroConditionContainsExpression: MacroConditionBinaryBooleanExpression, @unchecked Sendable -{ - override func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool - { +private final class MacroConditionContainsExpression: MacroConditionBinaryBooleanExpression, @unchecked Sendable { + override func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool { // As the contains operator works on strings, we're assuming that both expressions, left and right, will be strings. Also of note, we currently assume the substring search will be case insensitive. It might be good in the future to allow the user to specify case sensitivity vs. non-sensitivity. As a point of comparison, NSPredicate does this by appending '[c]' to the operator. let leftString = leftExpr != nil ? leftExpr!.evaluateAsString(scope, lookup: lookup) : "" let rightString = rightExpr != nil ? rightExpr!.evaluateAsString(scope, lookup: lookup) : "" return leftString.contains(rightString) } - override var description: String - { + override var description: String { return "(\(String(describing: leftExpr)) CONTAINS \(String(describing: rightExpr)) )" } } /// A ternary conditional expression. -private final class MacroConditionTernaryConditionalExpression: MacroConditionExpression, @unchecked Sendable -{ +private final class MacroConditionTernaryConditionalExpression: MacroConditionExpression, @unchecked Sendable { let condExpr: MacroConditionExpression? let thenExpr: MacroConditionExpression? let elseExpr: MacroConditionExpression? - init(condExpr: MacroConditionExpression?, thenExpr: MacroConditionExpression?, elseExpr: MacroConditionExpression?) - { + init(condExpr: MacroConditionExpression?, thenExpr: MacroConditionExpression?, elseExpr: MacroConditionExpression?) { self.condExpr = condExpr self.thenExpr = thenExpr self.elseExpr = elseExpr } - override func evaluateAsString(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> String - { + override func evaluateAsString(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> String { // We implicitly treat the condition expression as a boolean expression; if it's a string, it will be evaluated as a boolean. We treat the then or else expression as a string, however, since that is non-lossy while a boolean would be lossy. If the then or else expression, respectively, is a boolean expression, it will be converted to a string. let condBoolValue = condExpr != nil ? condExpr!.evaluateAsBoolean(scope, lookup: lookup) : false let resultExpr = condBoolValue ? thenExpr : elseExpr return resultExpr != nil ? resultExpr!.evaluateAsString(scope, lookup: lookup) : "" } - override func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool - { + override func evaluateAsBoolean(_ scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil) -> Bool { return evaluateAsString(scope, lookup: lookup).boolValue } - override var description: String - { + override var description: String { return "(\(String(describing: condExpr)) ? \(String(describing: thenExpr)) : \(String(describing: elseExpr)) )" } } - // MARK: Scanning - -private enum TokenType: String -{ +private enum TokenType: String { // Not all of these are presently used. case invalid case string @@ -617,8 +526,7 @@ private enum TokenType: String case eof } -private struct Token: CustomStringConvertible -{ +private struct Token: CustomStringConvertible { /// The token type. var type: TokenType @@ -631,29 +539,25 @@ private struct Token: CustomStringConvertible /// Byte length of token in the string. var length: Int - fileprivate var description: String - { + fileprivate var description: String { return "<\(type.rawValue):o=\(offset),l=\(length)>" } } -private class ScannerState -{ +private class ScannerState { /// UTF-8 byte buffer that’s being scanned. let bytes: [UInt8] /// Index of the next character to read in the string. var currIdx: Int - init(_ string: String) - { + init(_ string: String) { self.bytes = [UInt8](string.utf8) self.currIdx = 0 } /// Scans ``bytes`` until the next token has been created, and returns it. - func getNextToken() -> Token - { + func getNextToken() -> Token { // Skip whitespace. scanUntil({ !self.isAtWhitespace }) @@ -661,106 +565,96 @@ private class ScannerState var token = Token(type: .invalid, flags: 0, offset: currIdx, length: 0) switch currChar { - case 0: // EOF + case 0: // EOF token.type = .eof token.length = 0 break - case 61: // '=' + case 61: // '=' advance() - if currChar == 61 - { + if currChar == 61 { // We found a '==' token.type = .equals token.length = 2 advance() } - // Otherwise we just found a '=' + // Otherwise we just found a '=' - case 33: // '!' + case 33: // '!' advance() - if currChar == 61 - { + if currChar == 61 { // We found a '!=' token.type = .notEquals token.length = 2 advance() - } - else - { + } else { // We found a '!' token.type = .not token.length = 1 } - case 38: // '&' + case 38: // '&' advance() - if currChar == 38 - { + if currChar == 38 { // We found a '&&' token.type = .and token.length = 2 advance() } - // Otherwise we just found a '&' + // Otherwise we just found a '&' - case 124: // '|' + case 124: // '|' advance() - if currChar == 124 - { + if currChar == 124 { // We found a '||' token.type = .or token.length = 2 advance() } - // Otherwise we just found a '|' + // Otherwise we just found a '|' - case 63: // '?' + case 63: // '?' token.type = .questionMark token.length = 1 advance() - case 58: // ':' + case 58: // ':' token.type = .colon token.length = 1 advance() - case 40: // '(' + case 40: // '(' token.type = .leftParen token.length = 1 advance() - case 41: // ')' + case 41: // ')' token.type = .rightParen token.length = 1 advance() - case 39, 34: // '\'' and '\"' + case 39, 34: // '\'' and '\"' // Scanning a quoted string. // Save the quote character and advance the token's index beyond it. let quoteChar = currChar let startIdx = currIdx token.offset += 1 // Scan until the matching quote, if there is one. - repeat - { + repeat { advance() - if currChar == 92 { advance() } // Skip '\\' - } while currChar != quoteChar && !isAtEndOfStream + if currChar == 92 { advance() } // Skip '\\' + } while currChar != quoteChar && !isAtEndOfStream var endIdx = currIdx - if currChar == quoteChar - { + if currChar == quoteChar { // Skip over the trailing quote. advance() endIdx = currIdx - } - else - { + } else { // unterminated quote... what to do? - endIdx += 1 // Simulate a quote so that the math works out when setting token.length below + endIdx += 1 // Simulate a quote so that the math works out when setting token.length below } token.type = .string - token.length = ((endIdx - 2) - startIdx) // excludes the leading and trailing quotes + token.length = ((endIdx - 2) - startIdx) // excludes the leading and trailing quotes default: // Scan until we reach whitespace or end-of-string. Then record the range of what we scanned. @@ -770,40 +664,26 @@ private class ScannerState token.length = (currIdx - startIdx) // Recognize keywords - if token.length == 2 && bytes[startIdx ..< currIdx] == "or" - { + if token.length == 2 && bytes[startIdx.. 32 // ' ' + while ((token.offset + token.length) < bytes.count) && bytes[token.offset + token.length] > 32 // ' ' { token.length += 1 } @@ -815,87 +695,73 @@ private class ScannerState } /// Returns the UTF-8 byte at the current position, which is zero if the cursor is currently at the very end of the string. - private var currChar: UInt8 - { + private var currChar: UInt8 { assert(currIdx <= bytes.count) return (currIdx < bytes.count) ? bytes[currIdx] : 0 } /// Returns the UTF-8 byte at the position immediately after the current position, which is zero if the cursor is currently at either the very end of the string or the position before the very end. - private var nextChar: UInt8 - { + private var nextChar: UInt8 { assert(currIdx <= bytes.count) return (currIdx + 1 < bytes.count) ? bytes[currIdx + 1] : 0 } /// Returns the UTF-8 byte at the position immediately after the position immediately after the current position, which is zero if the cursor is currently at either the very end of the string, at the position before the very end, or at the position before that. - private var nextNextChar: UInt8 - { + private var nextNextChar: UInt8 { assert(currIdx <= bytes.count) return (currIdx + 2 < bytes.count) ? bytes[currIdx + 2] : 0 } /// Advances the character to the next position, or does nothing if the cursor is currently already at end-of-string. - private func advance(_ offset: Int = 1) - { + private func advance(_ offset: Int = 1) { assert(currIdx <= bytes.count) currIdx = min(currIdx + offset, bytes.count) } /// Returns true if and only if the cursor is currently at end-of-string. - private var isAtEndOfStream: Bool - { + private var isAtEndOfStream: Bool { assert(currIdx <= bytes.count) return currIdx == bytes.count } /// Returns true if and only if the cursor is currently at a whitespace character. The characters considered whitespace are the same as the `isspace()` function, i.e. space (` `), horizontal tab (`\t`), vertical tab (`\v`), carriage return (`\r`), newline (`\n`), and form feed (`\f`). - private var isAtWhitespace: Bool - { + private var isAtWhitespace: Bool { return currChar == /* ' ' */ 32 || (currChar >= /* '\t' */ 9 && currChar <= /* '\r' */ 13) } /// Advances the cursor until it reaches end-of-string or until the block (which is invoked for each character) returns true. Leaves the cursor at the character (if any) that caused the scan to stop. Returns the (possibly empty) substring from the starting position to (but not including) the stop position. - private func scanUntil(_ block: (UInt8) -> Bool) - { + private func scanUntil(_ block: (UInt8) -> Bool) { // Record the starting index, and advance until we reach end-of-string or one of the specified stop characters. After that we return the (possibly empty) substring. while !(isAtEndOfStream || block(currChar)) { advance() } } - private func scanUntil(_ block: () -> Bool) - { + private func scanUntil(_ block: () -> Bool) { // Record the starting index, and advance until we reach end-of-string or one of the specified stop characters. After that we return the (possibly empty) substring. while !(isAtEndOfStream || block()) { advance() } } - func stringForToken(_ token: Token) -> String - { - return String(decoding: bytes[token.offset ..< token.offset+token.length], as: UTF8.self) + func stringForToken(_ token: Token) -> String { + return String(decoding: bytes[token.offset.. Bool { + public static func == (lhs: MacroConditionParameter, rhs: MacroConditionParameter) -> Bool { return lhs === rhs } diff --git a/Sources/SWBMacro/MacroConditionSet.swift b/Sources/SWBMacro/MacroConditionSet.swift index 8894baf8..b323298c 100644 --- a/Sources/SWBMacro/MacroConditionSet.swift +++ b/Sources/SWBMacro/MacroConditionSet.swift @@ -12,7 +12,7 @@ public import SWBUtil -public final class MacroConditionSet : Serializable, CustomStringConvertible, Sendable { +public final class MacroConditionSet: Serializable, CustomStringConvertible, Sendable { /// The conditions, ordered from highest to lowest priority. public let conditions: Array @@ -23,7 +23,7 @@ public final class MacroConditionSet : Serializable, CustomStringConvertible, Se /// Returns the condition for the parameter, if any. public subscript(_ parameter: MacroConditionParameter) -> MacroCondition? { - return conditions.first{ $0.parameter === parameter } + return conditions.first { $0.parameter === parameter } } /// Evaluates the condition against the dictionary of parameter values, returning `true` if there’s a match and `false` if not. Missing values are interpreted as the empty value, and only match the condition if the `fnmatch()`-style pattern is `*` (meaning that it matches everything). @@ -33,7 +33,7 @@ public final class MacroConditionSet : Serializable, CustomStringConvertible, Se } public var description: String { - return conditions.map{ "[\($0)]" }.joined(separator: "") + return conditions.map { "[\($0)]" }.joined(separator: "") } // Serialization @@ -51,7 +51,7 @@ public final class MacroConditionSet : Serializable, CustomStringConvertible, Se } extension MacroConditionSet: Equatable { - public static func ==(lhs: MacroConditionSet, rhs: MacroConditionSet) -> Bool { + public static func == (lhs: MacroConditionSet, rhs: MacroConditionSet) -> Bool { return lhs.conditions == rhs.conditions } } diff --git a/Sources/SWBMacro/MacroConfigFileDiagnostic.swift b/Sources/SWBMacro/MacroConfigFileDiagnostic.swift index ccfb3811..9154619c 100644 --- a/Sources/SWBMacro/MacroConfigFileDiagnostic.swift +++ b/Sources/SWBMacro/MacroConfigFileDiagnostic.swift @@ -72,7 +72,7 @@ public struct MacroConfigFileDiagnostic: Sendable { } extension MacroConfigFileDiagnostic: Equatable { - public static func ==(lhs: MacroConfigFileDiagnostic, rhs: MacroConfigFileDiagnostic) -> Bool { + public static func == (lhs: MacroConfigFileDiagnostic, rhs: MacroConfigFileDiagnostic) -> Bool { if lhs.kind != rhs.kind { return false } if lhs.level != rhs.level { return false } if lhs.message != rhs.message { return false } diff --git a/Sources/SWBMacro/MacroConfigFileParser.swift b/Sources/SWBMacro/MacroConfigFileParser.swift index 63610a08..8611109d 100644 --- a/Sources/SWBMacro/MacroConfigFileParser.swift +++ b/Sources/SWBMacro/MacroConfigFileParser.swift @@ -53,7 +53,6 @@ public final class MacroConfigFileParser { /// Index of the start of the current line in the byte array. private var currentLineStartIdx: Int = 0 - /// Initializes the macro expression parser with the given string and delegate. How the string is parsed depends on the particular parse method that’s invoked, such as `parseAsString()` or `parseAsStringList()`, and not on the configuration of the parser. public init(byteString: ByteString, path: Path, delegate: (any MacroConfigFileParserDelegate)?) { self.delegate = delegate @@ -70,78 +69,71 @@ public final class MacroConfigFileParser { /// Returns the current column number of the parser. Column numbers are one-based. public var columnNumber: Int { return currIdx - currentLineStartIdx + 1 } - /* - - Grammar: - - xcconfig: - line - - line: - "" // empty - "#" directive - assignment ';'? - - directive: - "include" '\"' string '\"' - "include?" '\"' string '\"' - - assignment: - macro '=' - macro '=' value - macro conditions '=' value - - conditions: - condition - conditions condition - - condition: - '[' condition-parameter '=' condition-pattern ']' - - condition-parameter: - anything-except-equals-sign-and-right-bracket+ - - condition-pattern: - anything-except-right-bracket+ - - comment: - '/' '/' anything-except-newline '\n' - - end-of-line: - '\n' - '\r' - '\r' '\n' - - - - */ - + /** + * Grammar: + * + * xcconfig: + * line + * + * line: + * "" // empty + * "#" directive + * assignment ';'? + * + * directive: + * "include" '\"' string '\"' + * "include?" '\"' string '\"' + * + * assignment: + * macro '=' + * macro '=' value + * macro conditions '=' value + * + * conditions: + * condition + * conditions condition + * + * condition: + * '[' condition-parameter '=' condition-pattern ']' + * + * condition-parameter: + * anything-except-equals-sign-and-right-bracket+ + * + * condition-pattern: + * anything-except-right-bracket+ + * + * comment: + * '/' '/' anything-except-newline '\n' + * + * end-of-line: + * '\n' + * '\r' + * '\r' '\n' + * + * + * + **/ /// Checks whether the cursor is at the end of a line, i.e. either at a line terminator (\n, \r, \r\n, or a UTF8-coded Unicode Line Separator or Paragraph Separator) or at the end of the entire input. private var isAtEndOfLine: Bool { return isAtEndOfStream || currChar == /* '\n' */ 10 || currChar == /* '\r' */ 13 || (currChar == 0xE2 && nextChar == 0x80 && (nextNextChar == /* Unicode Line Separator */ 0xA8 || nextNextChar == /* Unicode Paragraph Separator */ 0xA9)) } - /// Checks whether the cursor is at the start of a `//` style comment. private var isAtStartOfComment: Bool { return currChar == /* '/' */ 47 && nextChar == /* '/' */ 47 } - /// If the cursor is at an end-of-line (as determined by `isAtEOL()`), it is advanced past the end-of-line and `currLine` is incremented. Otherwise this function does nothing. Note that an end-of-line may be one, two, or three bytes in length. private func scanEOL() { var advancement = 0 if currChar == /* '\n' */ 10 { advancement = 1 - } - else if currChar == /* '\r' */ 13 { + } else if currChar == /* '\r' */ 13 { advancement = (nextChar == /* '\n' */ 10) ? 2 : 1 - } - else if currChar == 0xE2 && nextChar == 0x80 && (nextNextChar == 0xA8 || nextNextChar == 0xA9) { + } else if currChar == 0xE2 && nextChar == 0x80 && (nextNextChar == 0xA8 || nextNextChar == 0xA9) { advancement = 3 - } - else { + } else { return } advance(advancement) @@ -149,25 +141,21 @@ public final class MacroConfigFileParser { currentLineStartIdx = currIdx } - /// Advances the cursor until it reaches the next line terminator or end-of-stream. Does nothing if the cursor is already at the end of a line or of the stream. private func skipRestOfLine() { while !isAtEndOfLine { advance() } } - /// Advances the cursor past spaces, tabs, and comments (but doesn’t go past an end-of-line). private func skipWhitespaceAndComments() { while !isAtEndOfStream { if currChar.isASCIISpace { // Normal whitespace — skip to the next byte. advance() - } - else if isAtStartOfComment { + } else if isAtStartOfComment { // Start of single-line comment. Skip the rest of the line. skipRestOfLine() - } - else { + } else { break } } @@ -183,7 +171,7 @@ public final class MacroConfigFileParser { // FIXME: We should really handle escapes here too. let markIdx = currIdx scanUntil({ self.isAtEndOfLine || $0 == /* '\"' */ 34 }) - let fileName = String(decoding: bytes[markIdx ..< currIdx], as: UTF8.self) + let fileName = String(decoding: bytes[markIdx.. valueMarkIdx && bytes[endIdx-1].isASCIISpace { + while endIdx > valueMarkIdx && bytes[endIdx - 1].isASCIISpace { endIdx -= 1 } // Trim any trailing semicolon (and preceding whitespace). - if bytes[endIdx-1] == /* ';' */ 59 { + if bytes[endIdx - 1] == /* ';' */ 59 { endIdx -= 1 - while endIdx > valueMarkIdx && bytes[endIdx-1].isASCIISpace { + while endIdx > valueMarkIdx && bytes[endIdx - 1].isASCIISpace { endIdx -= 1 } } // Make the value into a string. - let value = String(decoding: bytes[valueMarkIdx ..< endIdx], as: UTF8.self) + let value = String(decoding: bytes[valueMarkIdx.. (String?, [(param: String, pattern: String)]?) { // We do this using our existing parsing code. - class SingleMacroNameDelegate : MacroConfigFileParserDelegate { + class SingleMacroNameDelegate: MacroConfigFileParserDelegate { var macroName: String? var conditions: [(param: String, pattern: String)]? func foundPreprocessorInclusion(_ fileName: String, optional: Bool, parser: MacroConfigFileParser) -> MacroConfigFileParser? { @@ -556,7 +532,6 @@ public final class MacroConfigFileParser { } } - /// Returns true if and only if `ch` is an uppercase or lowercase letter in the ASCII range (i.e., it matches '[A-Za-z]'). private func isAlpha(_ ch: UInt8) -> Bool { return (ch >= /* 'A' */ 65 && ch <= /* 'Z' */ 90) || (ch >= /* 'a' */ 97 && ch <= /* 'z' */ 122) @@ -577,7 +552,6 @@ private func isValidMacroNameNthChar(_ ch: UInt8) -> Bool { return isAlpha(ch) || isDigit(ch) || ch == /* '_' */ 95 } - /// Encapsulates the callbacks that a .xcconfig file parser invokes during a parse. All methods are optional. Separating the actions into a protocol allows the .xcconfig parser to be used for a variety of tasks, and makes it easier to test and profile. The parser is passed to each of the delegate methods, and its `position` property can be used to access the current index in the original string. The parser is as lenient as possible, and tries to recover from errors as well as possible in order to preserve the Xcode semantics. The delegate is guaranteed to see the entire contents of the input string, regardless of how many errors are discovered (some of that contents might be misparsed in the wake of errors, however). public protocol MacroConfigFileParserDelegate { diff --git a/Sources/SWBMacro/MacroDeclaration.swift b/Sources/SWBMacro/MacroDeclaration.swift index 9ae5d4a9..46c76399 100644 --- a/Sources/SWBMacro/MacroDeclaration.swift +++ b/Sources/SWBMacro/MacroDeclaration.swift @@ -33,7 +33,7 @@ public class MacroDeclaration: Hashable, CustomStringConvertible, Encodable, @un } /// Tests for quality based on the identities of the two macro declarations. - public static func ==(lhs: MacroDeclaration, rhs: MacroDeclaration) -> Bool { + public static func == (lhs: MacroDeclaration, rhs: MacroDeclaration) -> Bool { return lhs === rhs } diff --git a/Sources/SWBMacro/MacroEvaluationProgram.swift b/Sources/SWBMacro/MacroEvaluationProgram.swift index 530329c5..e6b97fcb 100644 --- a/Sources/SWBMacro/MacroEvaluationProgram.swift +++ b/Sources/SWBMacro/MacroEvaluationProgram.swift @@ -377,33 +377,32 @@ final class MacroEvaluationProgram: Serializable, Sendable { for instr in instructions { switch instr { - case .appendLiteral(let s): + case .appendLiteral(let s): // Emit a literal sequence of characters to the result buffer. Even an empty string can have significant meaning if it causes a pending list element separator to be made real, so we don’t take any shortcuts here by checking for empty string or anything like that. Any instruction that is actually unnecessary should have already been optimized out by the instruction generation logic anyway. (subresults.last ?? resultBuilder).append(s) - case .appendStringFormOnlyLiteral(let s): + case .appendStringFormOnlyLiteral(let s): // Emit a literal sequence of characters to the result buffer, as with `.appendLiteral`, but only if 'alwaysEvalAsString' is true. This is used for whitespace, quotes, and escape characters that appear in the string form but not the string list form. This allows the same macro evaluation program to be used for both the string form and the string list form. if alwaysEvalAsString { (subresults.last ?? resultBuilder).append(s) } - case .setNeedsListSeparator(let s): + case .setNeedsListSeparator(let s): // Either set a list separator or add a string-list-form-only substring (such as whitespace) to the result buffer, depending on whether or not the caller wants us to always execute the evaluation program as a string. Note that we don’t look at `allEvalsAreStrings` here — that refers to evaluation of any embedded macro references. if alwaysEvalAsString { // Add the whitespace which was captured for this separator in the string form. (subresults.last ?? resultBuilder).append(s) - } - else { + } else { // Tell the result builder that we’ll need a list element separator. This doesn’t add one immediately, but rather sets a flag so that the next `.appendLiteral` instruction will cause a list separator to be added. This allows us to, for example, concatenate a completely empty array without getting extraneous list separators. (subresults.last ?? resultBuilder).setNeedsListElementSeparator() } - case .beginSubresult: + case .beginSubresult: // Push a new, empty buffer onto the top of the subresult stack. This must be balanced by one of the below instructions that use and pop result buffers off the stack. let subresult = MacroEvaluationResultBuilder() subresults.append(subresult) - case .evalNamedMacro(let asString, let preservesOriginal): + case .evalNamedMacro(let asString, let preservesOriginal): // Pop the topmost subresult buffer, and use its contents as the name of a macro to evaluate. It’s an internal error if the subresult stack is empty. let nb = subresults.popLast()! let s = nb.buildString() @@ -413,8 +412,7 @@ final class MacroEvaluationProgram: Serializable, Sendable { if let value = context.nextValueForMacro(macro) { // We found a value, so we evaluate its associated "macro evaluation program” into it the topmost subresult buffer. Note that multiple programs often contribute to the same buffer, e.g. in "$(X)/$(Y)". value.expression.evaluate(context: MacroEvaluationContext(scope: context.scope, macro: macro, value: value, parent: context), resultBuilder: subresults.last!, alwaysEvalAsString: asString || alwaysEvalAsString) - } - else { + } else { if preservesOriginal { // If we are preserving the original string, we append it now. (subresults.last ?? resultBuilder).append("$" + s) @@ -425,20 +423,19 @@ final class MacroEvaluationProgram: Serializable, Sendable { } } } - } - else { + } else { // It’s an unknown macro, so we cannot possibly have any definition for it — this should really be reported back as an error, and we should refine the API so that we can tell the calling context about it. For now we silently append either the original string, if we've been asked to preserve it. if preservesOriginal { (subresults.last ?? resultBuilder).append("$" + s) } } - case .mergeSubresult: + case .mergeSubresult: // Pop the topmost subresult buffer, and merge its contents into the buffer below it buffer. It’s an internal error if the subresult stack is empty. let nb = subresults.popLast()! (subresults.last ?? resultBuilder).appendContentsOfResultBuilder(nb) - case .applyRetrievalOperator(let op): + case .applyRetrievalOperator(let op): // Pop the topmost subresult buffer, and apply the retrieval operator to each of its elements. This results in a new equivalent subresult buffer, which we then push. It’s an internal error if the subresult stack is empty. let sb = subresults.popLast()! let nb = MacroEvaluationResultBuilder() @@ -448,7 +445,7 @@ final class MacroEvaluationProgram: Serializable, Sendable { } subresults.append(nb) - case .applyReplacementOperator(let op): + case .applyReplacementOperator(let op): // Pop the topmost subresult buffer, and apply the retrieval operator to each of its elements. This results in a new equivalent subresult buffer, which we then push. It’s an internal error if the subresult stack is empty. let operand = subresults.popLast()!.buildString() let sb = subresults.popLast()! @@ -458,8 +455,7 @@ final class MacroEvaluationProgram: Serializable, Sendable { nb.append(op.apply(to: elem, withReplacement: operand)) nb.setNeedsListElementSeparator() } - } - else { + } else { // Special case: If the subresult buffer is empty, but the operator wants to be applied even to empty results, then we do so here, applying it to an empty string. if op.applyToEmptyResult { nb.append(op.apply(to: "", withReplacement: operand)) @@ -508,7 +504,6 @@ final class MacroEvaluationProgram: Serializable, Sendable { } } - /// A helper class to build a MacroEvaluationProgram. Regular clients of macro evaluation don’t need to be aware of this class. final class MacroEvaluationProgramBuilder { @@ -526,7 +521,6 @@ final class MacroEvaluationProgramBuilder { } } - /// Lets a macro expression evaluator program build a result (either a string or a string list). Conceptually, the result consists of a sequence of literal string fragments separated by “list element separators”, which demarcate the subranges of the result string that form the string list elements. Both string and string list results are supported using the same function (a string and a single-element string list are the same thing, in practical terms). This approach of a single string demarcated by separators is conceptually quite similar to how, for example, a continuous stream of audio data is separated into tracks using “cue sheet split points” on a CD. In the case of macro expression evaluation, this approach avoids many of the special cases that would otherwise occur when evaluating a string list that refers to a mixture of string and string list subexpressions. final class MacroEvaluationResultBuilder { @@ -545,7 +539,7 @@ final class MacroEvaluationResultBuilder { fileprivate private(set) var hasHadAnyTextAppended = false /// Create a macro result builder. - init() { } + init() {} /// If the “needs list element separator” flag has been set, this function adds a list separator for the index corresponding to the current end of the accumulator string, and clears the flag. Otherwise, this function does nothing. Clients never invoke this function directly — instead, they note the need for a list element separator and let it be created the next time anything is appended. private func applyPendingListElementSeparatorIfNeeded() { diff --git a/Sources/SWBMacro/MacroEvaluationScope.swift b/Sources/SWBMacro/MacroEvaluationScope.swift index 0c4c5e14..8157c7d8 100644 --- a/Sources/SWBMacro/MacroEvaluationScope.swift +++ b/Sources/SWBMacro/MacroEvaluationScope.swift @@ -27,12 +27,18 @@ private extension MacroValueAssignmentTable { /// A lightweight parameterized “view” of a MacroValueAssignmentTable, allowing clients to evaluate macro expressions under a particular set of conditions. In the future a MacroEvaluationScope will also bind conditions to affect the values that are found in the table. Unlike many of the other classes in the macro evaluation subsystem, MacroEvaluationScope is a prominent class from a client perspective — after declaring macros, parsing macro expressions, and creating macro-to-expression tables, all actual evaluation occurs through a MacroEvaluationScope. public final class MacroEvaluationScope: Serializable, Sendable { - static let evaluations = Statistic("MacroEvaluationScope.evaluations", - "The number of evaluation requests.") - static let evaluationsComputed = Statistic("MacroEvaluationScope.evaluationsComputed", - "The number of evaluations which were actually computed (not cached).") - static let exprEvaluations = Statistic("MacroEvaluationScope.exprEvaluations", - "The number of expression evaluation requests.") + static let evaluations = Statistic( + "MacroEvaluationScope.evaluations", + "The number of evaluation requests." + ) + static let evaluationsComputed = Statistic( + "MacroEvaluationScope.evaluationsComputed", + "The number of evaluations which were actually computed (not cached)." + ) + static let exprEvaluations = Statistic( + "MacroEvaluationScope.exprEvaluations", + "The number of expression evaluation requests." + ) private struct SubscopeKey: Hashable { let parameter: MacroConditionParameter @@ -227,8 +233,6 @@ public final class MacroEvaluationScope: Serializable, Sendable { return compute() } - - /// Evaluate the given string macro and return the result. /// /// - Parameter lookup: If provided, this closure will be invoked for each initial macro lookup to potentially supply an alternate expression to evaluate. @@ -274,9 +278,9 @@ public final class MacroEvaluationScope: Serializable, Sendable { serializer.serialize(parm.name) serializer.serialize(values) } - } // key-value pair - } // conditionParameterValues - } // the whole scope + } // key-value pair + } // conditionParameterValues + } // the whole scope } public init(from deserializer: any Deserializer) throws { @@ -297,8 +301,7 @@ public final class MacroEvaluationScope: Serializable, Sendable { let parm: MacroConditionParameter if let aParm = delegate.namespace.lookupConditionParameter(parmName) { parm = aParm - } - else { + } else { parm = delegate.namespace.declareConditionParameter(parmName) } diff --git a/Sources/SWBMacro/MacroExpression.swift b/Sources/SWBMacro/MacroExpression.swift index e54655c5..5fba8eb2 100644 --- a/Sources/SWBMacro/MacroExpression.swift +++ b/Sources/SWBMacro/MacroExpression.swift @@ -13,7 +13,7 @@ public import SWBUtil /// A macro value expression is a parsed representation of a string that might contain macro references. Parsed macro expressions are immutable, and depend only on the contents of the input string. Macro names are currently bound after parsing, so macro expressions are in fact independent of the namespace that was used to parse them. There is no public API on MacroExpression to evaluate it in a MacroEvaluationScope — instead, use the `evaluate()` method on MacroEvaluationScope. -public class MacroExpression : PolymorphicSerializable, CustomStringConvertible, @unchecked Sendable { +public class MacroExpression: PolymorphicSerializable, CustomStringConvertible, @unchecked Sendable { /// String representation from which the expression was instantiated. public let stringRep: String @@ -64,23 +64,23 @@ public class MacroExpression : PolymorphicSerializable, CustomStringConvertible, public static let implementations: [SerializableTypeCode: any PolymorphicSerializable.Type] = [ 0: MacroExpression.self, 1: MacroStringExpression.self, - 2: MacroStringListExpression.self - ] + 2: MacroStringListExpression.self, + ] } extension MacroExpression: Equatable { - public static func ==(lhs: MacroExpression, rhs: MacroExpression) -> Bool { + public static func == (lhs: MacroExpression, rhs: MacroExpression) -> Bool { // Two MacroExpressions are the same if they are of the same type and have the same string representation. return type(of: lhs) == type(of: rhs) && lhs.stringRep == rhs.stringRep } } /// Represents a macro expression that can be evaluated as a string. -public final class MacroStringExpression : MacroExpression, Encodable, @unchecked Sendable { +public final class MacroStringExpression: MacroExpression, Encodable, @unchecked Sendable { } /// Represents a macro expression that can be evaluated as a string list. -public final class MacroStringListExpression : MacroExpression, Encodable, @unchecked Sendable { +public final class MacroStringListExpression: MacroExpression, Encodable, @unchecked Sendable { } /// Support static storage of parsed string expressions. diff --git a/Sources/SWBMacro/MacroExpressionDiagnostic.swift b/Sources/SWBMacro/MacroExpressionDiagnostic.swift index d67ae774..43d03652 100644 --- a/Sources/SWBMacro/MacroExpressionDiagnostic.swift +++ b/Sources/SWBMacro/MacroExpressionDiagnostic.swift @@ -72,7 +72,7 @@ public struct MacroExpressionDiagnostic: CustomDebugStringConvertible, Sendable } extension MacroExpressionDiagnostic: Equatable { - public static func ==(lhs: MacroExpressionDiagnostic, rhs: MacroExpressionDiagnostic) -> Bool { + public static func == (lhs: MacroExpressionDiagnostic, rhs: MacroExpressionDiagnostic) -> Bool { if lhs.string != rhs.string { return false } if lhs.range != rhs.range { return false } if lhs.kind != rhs.kind { return false } diff --git a/Sources/SWBMacro/MacroExpressionParsing.swift b/Sources/SWBMacro/MacroExpressionParsing.swift index 601927a4..93039216 100644 --- a/Sources/SWBMacro/MacroExpressionParsing.swift +++ b/Sources/SWBMacro/MacroExpressionParsing.swift @@ -34,11 +34,9 @@ public final class MacroExpressionParser { self.currIdx = utf8.startIndex } - /// Returns the current position of the scanner, as an index into the input string (which can be accessed through the `string` property). This is commonly used by custom implementations of the parser delegate function callbacks — the description of each callback function specifies the guaranteed position of the parser in the input string at the time the callback function is invoked. private var position: Input.Index { return currIdx } - /// Parses the contents of the input string as a macro expression using string semantics, i.e. one that doesn’t attach special significance to quotes and whitespace. The parser’s delegate methods are invoked as the expression is parsed — this includes methods for detecting warnings and errors. In accordance with historical semantics, parsing tries to recover after errors, so that as many errors as possible can be detected in one parse. public func parseAsString() { // Check for the edge case of a completely empty string. @@ -53,8 +51,7 @@ public final class MacroExpressionParser { if currChar == UInt8(ascii: "$") { // We found a substitution subexpression of some kind. We parse it, invoking delegate methods and advancing the cursor as we go. parseSubstitutionSubexpression(alwaysEvalAsString: true) - } - else { + } else { // Collect literal characters until we either find another substitution subexpression or reach end-of-string. if let literal = scanUntil(UInt8(ascii: "$")) { // We found at least one literal character, so tell the delegate about it. @@ -67,7 +64,6 @@ public final class MacroExpressionParser { assert(isAtEnd) } - /// Parses the contents of the input string as a macro expression using string list semantics, i.e. one that respects quotes and backslashes, and that treats unquoted whitespace as string list element separators. The parser’s delegate methods are invoked as the expression is parsed — this includes methods for detecting warnings and errors. Of special note is that the delegate’s “list element separator” callback is invoked between list elements. In accordance with historical semantics, parsing tries to recover after errors, so that as many errors as possible can be detected in one parse. public func parseAsStringList() { // Capture unquoted unescaped whitespace to be emitted only when evaluating as a string. We don't capture it as a list separator because we never have a separator before the first element of the list. @@ -98,7 +94,6 @@ public final class MacroExpressionParser { assert(isAtEnd) } - /// Private function that parses a chunk of whitespace, stopping at end-of-string or at the first non-whitespace character. The cursor is expected to already be positioned at the first whitespace character of the chunk. The parser’s delegate methods are invoked as the whitespace is traversed — this includes methods for detecting warnings and errors. private func parseWhitespace(asListSeparator parseAsListSeparator: Bool) { // If we're not at a whitespace character then we return. (In principle this should never happen.) @@ -110,14 +105,12 @@ public final class MacroExpressionParser { // If directed to capture the whitespace as a list separator, then we do so - *except* that if the whitespace is at the end of the string, then we don't treat it as a list separator. if parseAsListSeparator && !isAtEnd { - delegate.foundListElementSeparator(utf8[markIdx ..< currIdx], parser: self) - } - else { - delegate.foundStringFormOnlyLiteralStringFragment(utf8[markIdx ..< currIdx], parser: self) + delegate.foundListElementSeparator(utf8[markIdx.. nameIdx) // Tell the delegate about the literal string we found. - delegate.foundLiteralStringFragment(utf8[nameIdx ..< currIdx], parser: self) + delegate.foundLiteralStringFragment(utf8[nameIdx.. and . if currChar == UInt8(ascii: "$") && nextChar == UInt8(ascii: "(") { @@ -281,16 +266,14 @@ public final class MacroExpressionParser { // Tell the delegate that we’ve found the end of the macro name. delegate.foundEndOfMacroName(wasBracketed: false, parser: self) - } - else { + } else { // We found a ‘$’ character followed by something other than parentheses or an alphanumeric character. We treat it as a literal ‘$’. let fragment = utf8[origIdx.. origIdx ? utf8[origIdx ..< currIdx] : nil + return currIdx > origIdx ? utf8[origIdx.. UInt8? { // The set of delimiters we support is historical. We do emit warnings about delimiters other than ‘(’. switch ch { - case UInt8(ascii: "("): return UInt8(ascii: ")") - case UInt8(ascii: "{"): return UInt8(ascii: "}") - case UInt8(ascii: "["): return UInt8(ascii: "]") + case UInt8(ascii: "("): return UInt8(ascii: ")") + case UInt8(ascii: "{"): return UInt8(ascii: "}") + case UInt8(ascii: "["): return UInt8(ascii: "]") default: return nil } } @@ -530,7 +500,6 @@ private func isValidOperatorNameChar(_ chOpt: UInt8?) -> Bool { return isAlpha(ch) || (ch >= UInt8(ascii: "0") && ch <= UInt8(ascii: "9")) || ch == UInt8(ascii: "-") || ch == UInt8(ascii: "+") || ch == UInt8(ascii: ".") || ch == UInt8(ascii: "_") } - /// Encapsulates the callbacks that a macro expression parser invokes during a parse. All methods are optional. Separating the actions into a protocol allows the macro expression parser to be used for a variety of tasks, and makes it easier to test and profile. The parser is passed to each of the delegate methods, and its `position` property can be used to access the current index in the original string. The parser is as lenient as possible, and tries to recover from errors as well as possible in order to preserve the Xcode semantics. The delegate is guaranteed to see the entire contents of the input string, regardless of how many errors are discovered (some of that contents might be misparsed as literals after errors have been found, however). public protocol MacroExpressionParserDelegate { typealias Input = MacroExpressionParser.Input diff --git a/Sources/SWBMacro/MacroNamespace.swift b/Sources/SWBMacro/MacroNamespace.swift index eb42c54b..8f675991 100644 --- a/Sources/SWBMacro/MacroNamespace.swift +++ b/Sources/SWBMacro/MacroNamespace.swift @@ -17,16 +17,20 @@ import Synchronization /// /// This class *is* thread-safe (macros may be declared and looked up concurrently). public final class MacroNamespace: CustomDebugStringConvertible, Encodable, Sendable { - static let parsedStrings = Statistic("MacroNamespace.parsedStrings", - "The number of strings which were parsed into macro expressions.") - static let parsedLists = Statistic("MacroNamespace.parsedLists", - "The number of lists which were parsed into macro expression.") + static let parsedStrings = Statistic( + "MacroNamespace.parsedStrings", + "The number of strings which were parsed into macro expressions." + ) + static let parsedLists = Statistic( + "MacroNamespace.parsedLists", + "The number of lists which were parsed into macro expression." + ) /// Parent namespace. All declarations in the parent namespace are visible to this namespace, and the same rules regarding type conflicts apply. let parentNamespace: MacroNamespace? /// Maps macro names to declarations. Each declaration is of one of the concrete subclasses of MacroDeclaration, based on its type. - private let macroRegistry = LockedValue>([:]) + private let macroRegistry = LockedValue>([:]) private enum CodingKeys: String, CodingKey { case parentNamespace @@ -135,7 +139,7 @@ public final class MacroNamespace: CustomDebugStringConvertible, Encodable, Send } /// Maps condition parameter names to condition parameters. Each declaration is an instance of MacroConditionParameter. - private let conditionParameters = LockedValue>([:]) + private let conditionParameters = LockedValue>([:]) /// Looks up and returns the macro condition parameter that's associated with ‘name’, if any. The name is not allowed to be the empty string. public func lookupConditionParameter(_ name: String) -> MacroConditionParameter? { @@ -216,8 +220,7 @@ public final class MacroNamespace: CustomDebugStringConvertible, Encodable, Send // Emit an instruction to replace the subresult buffer at the top of the buffer stack with the result of applying a ‘retrieval’ operator to it. if let op = MacroEvaluationProgram.MacroEvaluationRetrievalOperator(String(operatorName)!) { programBuilder.emit(.applyRetrievalOperator(op)) - } - else { + } else { // The operator was unrecognized, so emit an error. handleDiagnostic(MacroExpressionDiagnostic(string: parser.string, range: parser.currIdx.. MacroStringExpression { -#if DEBUG - // We make it an error in debug builds to try and parse '$(' as a literal unless we've explicitly specified it's ok. Otherwise, it is almost certainly (but not necessarily) a programmatic error if it ever does. - if !allowSubstitutionPrefix, string.contains("$(") { - fatalError("pushing literal string containing a possible macro reference: \(string)')") - } -#endif + #if DEBUG + // We make it an error in debug builds to try and parse '$(' as a literal unless we've explicitly specified it's ok. Otherwise, it is almost certainly (but not necessarily) a programmatic error if it ever does. + if !allowSubstitutionPrefix, string.contains("$(") { + fatalError("pushing literal string containing a possible macro reference: \(string)')") + } + #endif let emitter = MacroEvaluationProgramBuilder() emitter.emit(MacroEvaluationProgram.EvalInstr.appendLiteral(string)) @@ -277,14 +279,14 @@ public final class MacroNamespace: CustomDebugStringConvertible, Encodable, Send /// "Parses" `strings` as a list of literal strings. public func parseLiteralStringList(_ strings: [String]) -> MacroStringListExpression { -#if DEBUG - for string in strings { - // We make it an error in debug builds to try and parse '$(' as a literal. This never comes up in our test suite, and it is almost certainly (but not necessarily) a programmatic error when if it ever does. - if string.contains("$(") { - fatalError("pushing literal string containing a possible macro reference: \(string)')") + #if DEBUG + for string in strings { + // We make it an error in debug builds to try and parse '$(' as a literal. This never comes up in our test suite, and it is almost certainly (but not necessarily) a programmatic error when if it ever does. + if string.contains("$(") { + fatalError("pushing literal string containing a possible macro reference: \(string)')") + } } - } -#endif + #endif let emitter = MacroEvaluationProgramBuilder() var needsListSeparator = false @@ -378,7 +380,7 @@ public final class MacroNamespace: CustomDebugStringConvertible, Encodable, Send return nil } } - return parseForMacro(macro, value:strings, diagnosticsHandler: diagnosticsHandler) + return parseForMacro(macro, value: strings, diagnosticsHandler: diagnosticsHandler) default: return nil @@ -404,14 +406,14 @@ public final class MacroNamespace: CustomDebugStringConvertible, Encodable, Send // if this setting key is matched by any `associatedTypesForKeysMatching`, use the associatedType provided if let associatedType = associatedTypesForKeysMatching?.first(where: { key.contains($0.key) })?.value { switch associatedType { - case .boolean: return lookupOrDeclareMacro(BooleanMacroDeclaration.self, macroName) - case .string: return lookupOrDeclareMacro(StringMacroDeclaration.self, macroName) - case .stringList: return lookupOrDeclareMacro(StringListMacroDeclaration.self, macroName) - case .userDefined: return lookupOrDeclareMacro(UserDefinedMacroDeclaration.self, macroName) - case .path: return lookupOrDeclareMacro(PathMacroDeclaration.self, macroName) - case .pathList: return lookupOrDeclareMacro(PathListMacroDeclaration.self, macroName) + case .boolean: return lookupOrDeclareMacro(BooleanMacroDeclaration.self, macroName) + case .string: return lookupOrDeclareMacro(StringMacroDeclaration.self, macroName) + case .stringList: return lookupOrDeclareMacro(StringListMacroDeclaration.self, macroName) + case .userDefined: return lookupOrDeclareMacro(UserDefinedMacroDeclaration.self, macroName) + case .path: return lookupOrDeclareMacro(PathMacroDeclaration.self, macroName) + case .pathList: return lookupOrDeclareMacro(PathListMacroDeclaration.self, macroName) } - // If this is a user defined table, unknown settings should be treated as user defined. + // If this is a user defined table, unknown settings should be treated as user defined. } else if allowUserDefined { return lookupOrDeclareMacro(UserDefinedMacroDeclaration.self, macroName) } else { @@ -427,13 +429,11 @@ public final class MacroNamespace: CustomDebugStringConvertible, Encodable, Send if let parsedMacroName = parsedName { macroName = parsedMacroName if let conditions = parsedConditions { - conditionSet = MacroConditionSet(conditions: conditions.map{ MacroCondition(parameter: declareConditionParameter($0.0), valuePattern: $0.1) }) - } - else { + conditionSet = MacroConditionSet(conditions: conditions.map { MacroCondition(parameter: declareConditionParameter($0.0), valuePattern: $0.1) }) + } else { conditionSet = nil } - } - else { + } else { // If we can't parse a name and condition, then use the original key as the name. c.f. macroName = key conditionSet = nil @@ -458,7 +458,7 @@ public final class MacroNamespace: CustomDebugStringConvertible, Encodable, Send /// This just takes a `[String: String]` dictionary and marshals the values as `PropertyListItem.string` values, then calls `parseTable([String: PropertyListItem], ...)` to parse the resulting table. public func parseTable(_ settings: [String: String], allowUserDefined: Bool, associatedTypesForKeysMatching: [String: MacroType]? = nil, diagnosticsHandler: ((MacroExpressionDiagnostic) -> Void)? = nil) throws -> MacroValueAssignmentTable { var settingsCopy = [String: PropertyListItem]() - for (key,value) in settings { + for (key, value) in settings { settingsCopy[key] = .plString(value) } return try self.parseTable(settingsCopy, allowUserDefined: allowUserDefined, associatedTypesForKeysMatching: associatedTypesForKeysMatching, diagnosticsHandler: diagnosticsHandler) @@ -531,7 +531,7 @@ public final class MacroNamespace: CustomDebugStringConvertible, Encodable, Send /// The table to use for parsed string expression interning, if installed. static var stringExpressionInterningTable: Registry? { - return interningState.withLock{ $0._stringExpressionInterningTable } + return interningState.withLock { $0._stringExpressionInterningTable } } } diff --git a/Sources/SWBMacro/MacroValueAssignmentTable.swift b/Sources/SWBMacro/MacroValueAssignmentTable.swift index 6357b827..6ce4061f 100644 --- a/Sources/SWBMacro/MacroValueAssignmentTable.swift +++ b/Sources/SWBMacro/MacroValueAssignmentTable.swift @@ -76,7 +76,6 @@ public struct MacroValueAssignmentTable: Serializable, Sendable { push(macro, namespace.parseLiteralStringList(literal), conditions: conditions) } - /// Adds a mapping from `macro` to `value`, inserting it ahead of any already existing assignment for the same macro. Unless the value refers to the lower-precedence expression (using `$(inherited)` notation), any existing assignments are shadowed but not removed. package mutating func push(_ macro: MacroDeclaration, _ value: MacroExpression, conditions: MacroConditionSet? = nil, locationRef: InternedMacroValueAssignmentLocation? = nil) { assert(namespace.lookupMacroDeclaration(macro.name) === macro) @@ -182,14 +181,12 @@ public struct MacroValueAssignmentTable: Serializable, Sendable { // Assignment is conditioned on the specified parameter; we need to evaluate it in order to decide what to do. if effectiveConditionValue.evaluate(condition) == true { // Condition evaluates to true, so we push an assignment with a condition set that excludes the condition. - let filteredConditions = conditions.conditions.filter{ $0.parameter != parameter } + let filteredConditions = conditions.conditions.filter { $0.parameter != parameter } table.push(macro, assignment.expression, conditions: filteredConditions.isEmpty ? nil : MacroConditionSet(conditions: filteredConditions), locationRef: assignment._location) - } - else { + } else { // Condition evaluates to false, so we elide the assignment. } - } - else { + } else { // Assignment isn't conditioned on the specified parameter, so we just push it as-is. table.push(macro, assignment.expression, conditions: assignment.conditions, locationRef: assignment._location) } @@ -245,14 +242,14 @@ public struct MacroValueAssignmentTable: Serializable, Sendable { case .path: serializer.serialize(4) case .pathList: serializer.serialize(5) } - serializer.endAggregate() // MacroDeclaration key + serializer.endAggregate() // MacroDeclaration key // Serialize the MacroValueAssignment. serializer.serialize(asgn) - serializer.endAggregate() // key-value pair + serializer.endAggregate() // key-value pair } - serializer.endAggregate() // valueAssignments + serializer.endAggregate() // valueAssignments - serializer.endAggregate() // the whole table + serializer.endAggregate() // the whole table } public init(from deserializer: any Deserializer) throws { @@ -289,8 +286,7 @@ public struct MacroValueAssignmentTable: Serializable, Sendable { } guard aDecl.type == type else { throw DeserializerError.incorrectType("Mismatched type for MacroDeclaration \(name): expected '\(type)', found '\(aDecl.type)' from code '\(typeCode)'.") } decl = aDecl - } - else { + } else { // Declare the declaration using the type we deserialized. switch typeCode { case 0: decl = delegate.namespace.lookupOrDeclareMacro(BooleanMacroDeclaration.self, name) @@ -452,7 +448,7 @@ package struct InternedMacroValueAssignmentLocation: Serializable, Sendable { self.endColumn = endColumn } - public func serialize(to serializer: T) where T : SWBUtil.Serializer { + public func serialize(to serializer: T) where T: SWBUtil.Serializer { serializer.beginAggregate(5) serializer.serialize(pathRef) serializer.serialize(startLine) @@ -487,10 +483,9 @@ private func insertCopiesOfMacroValueAssignmentNodes(_ srcAsgn: MacroValueAssign } if let srcNext = srcAsgn.next { - return MacroValueAssignment(expression: srcAsgn.expression, conditions:srcAsgn.conditions, next: insertCopiesOfMacroValueAssignmentNodes(srcNext, inFrontOf: dstAsgn), locationRef: srcAsgn._location) - } - else { - return MacroValueAssignment(expression: srcAsgn.expression, conditions:srcAsgn.conditions, next: dstAsgn, locationRef: srcAsgn._location) + return MacroValueAssignment(expression: srcAsgn.expression, conditions: srcAsgn.conditions, next: insertCopiesOfMacroValueAssignmentNodes(srcNext, inFrontOf: dstAsgn), locationRef: srcAsgn._location) + } else { + return MacroValueAssignment(expression: srcAsgn.expression, conditions: srcAsgn.conditions, next: dstAsgn, locationRef: srcAsgn._location) } } diff --git a/Sources/SWBProjectModel/IDE/IDEPIFGUID.swift b/Sources/SWBProjectModel/IDE/IDEPIFGUID.swift index 1e516d74..8955d1ff 100644 --- a/Sources/SWBProjectModel/IDE/IDEPIFGUID.swift +++ b/Sources/SWBProjectModel/IDE/IDEPIFGUID.swift @@ -13,7 +13,7 @@ import Foundation /// A unique identifier for a PIF object. It doesn't have to be unique globally; just within the workspace. -public class IDEPIFGUID : CustomStringConvertible { +public class IDEPIFGUID: CustomStringConvertible { /// Immutable string containing the string representation; right now, this is the only representation we carry. public let stringRepresentation: String diff --git a/Sources/SWBProjectModel/IDE/IDEPIFObject.swift b/Sources/SWBProjectModel/IDE/IDEPIFObject.swift index 68fba333..97e04106 100644 --- a/Sources/SWBProjectModel/IDE/IDEPIFObject.swift +++ b/Sources/SWBProjectModel/IDE/IDEPIFObject.swift @@ -12,9 +12,8 @@ import Foundation - /// Protocol describing top-level PIF objects. -public protocol IDEPIFObject : IDEPIFGenerating { +public protocol IDEPIFObject: IDEPIFGenerating { /// The name of the PIF object type (workspace, project, or target). static var pifObjectTypeName: String { get } /// Returns the list of subobjects. diff --git a/Sources/SWBProjectModel/IDE/IDEPIFObjectInfo.swift b/Sources/SWBProjectModel/IDE/IDEPIFObjectInfo.swift index 4078a5a2..ad88c48a 100644 --- a/Sources/SWBProjectModel/IDE/IDEPIFObjectInfo.swift +++ b/Sources/SWBProjectModel/IDE/IDEPIFObjectInfo.swift @@ -22,8 +22,10 @@ public class IDEPIFObjectInfo { public internal(set) var generatePIF: PIFGeneratingClosure /// Initializes the IDEPIFObjectInfo with a \p signature (a sort of digest that indicates whether the PIF needs to be regenerated) and a PIF generation \p block that will be invoked if it does. The generator block might not be invoked if the signature is the same as in a previous invocation. - public init(signature: String, - generatePIF: @escaping PIFGeneratingClosure) { + public init( + signature: String, + generatePIF: @escaping PIFGeneratingClosure + ) { self.signature = signature self.generatePIF = generatePIF } diff --git a/Sources/SWBProjectModel/IDE/IDESwiftPackageExtensions.swift b/Sources/SWBProjectModel/IDE/IDESwiftPackageExtensions.swift index 1d4d4d26..45c29cc6 100644 --- a/Sources/SWBProjectModel/IDE/IDESwiftPackageExtensions.swift +++ b/Sources/SWBProjectModel/IDE/IDESwiftPackageExtensions.swift @@ -22,20 +22,19 @@ public protocol PIFRepresentable { func serialize(to serializer: any IDEPIFSerializer) -> PIFDict } - -extension PIF.Project : PIFRepresentable { +extension PIF.Project: PIFRepresentable { public func serialize(to serializer: any IDEPIFSerializer) -> PIFDict { - var dict : PIFDict = [ + var dict: PIFDict = [ PIFKey_guid: id, PIFKey_Project_name: name, - PIFKey_Project_isPackage: isPackage ? "true" : "false" , + PIFKey_Project_isPackage: isPackage ? "true" : "false", PIFKey_path: path, PIFKey_Project_projectDirectory: projectDir, - PIFKey_buildConfigurations: buildConfigs.map{ $0.serialize(to: serializer) }, + PIFKey_buildConfigurations: buildConfigs.map { $0.serialize(to: serializer) }, PIFKey_Project_defaultConfigurationName: "Release", PIFKey_Project_groupTree: mainGroup.serialize(to: serializer), - PIFKey_Project_targets: targets.compactMap{ $0.signature }, + PIFKey_Project_targets: targets.compactMap { $0.signature }, ] if let developmentRegion { @@ -46,7 +45,7 @@ extension PIF.Project : PIFRepresentable { } } -extension PIF.Group : PIFRepresentable { +extension PIF.Group: PIFRepresentable { public func serialize(to serializer: any IDEPIFSerializer) -> PIFDict { return [ @@ -55,12 +54,12 @@ extension PIF.Group : PIFRepresentable { PIFKey_Reference_sourceTree: pathBase.asString, PIFKey_path: path, PIFKey_name: name ?? path, - PIFKey_Reference_children: subitems.map{ ($0 as! (any PIFRepresentable)).serialize(to: serializer) } + PIFKey_Reference_children: subitems.map { ($0 as! (any PIFRepresentable)).serialize(to: serializer) }, ] } } -extension PIF.FileReference : PIFRepresentable { +extension PIF.FileReference: PIFRepresentable { private func fileTypeIdentifier(for path: String) -> String { // FIXME: We need real logic here. [SwiftPM] When generating PIF, we need a standard way to determine the file type @@ -93,7 +92,6 @@ extension PIF.FileReference : PIFRepresentable { case "y", "ym", "ymm", "ypp", "yp", "yxx": return "sourcecode.yacc" - // FIXME: This is probably now more important because of resources support. case "xcassets": return "folder.assetcatalog" @@ -118,7 +116,7 @@ extension PIF.FileReference : PIFRepresentable { return "folder.rkassets" default: - return SwiftBuildFileType.all.first{ $0.fileTypes.contains(pathExtension) }?.fileTypeIdentifier ?? "file" + return SwiftBuildFileType.all.first { $0.fileTypes.contains(pathExtension) }?.fileTypeIdentifier ?? "file" } } @@ -129,12 +127,12 @@ extension PIF.FileReference : PIFRepresentable { PIFKey_Reference_sourceTree: pathBase.asString, PIFKey_path: path, // FIXME: We need a real solution here (or: could we not omit the file type and let it be inferred?) - PIFKey_Reference_fileType: fileType ?? fileTypeIdentifier(for: path) + PIFKey_Reference_fileType: fileType ?? fileTypeIdentifier(for: path), ] } } -extension PIF.BaseTarget : PIFRepresentable { +extension PIF.BaseTarget: PIFRepresentable { public func serialize(to serializer: any IDEPIFSerializer) -> PIFDict { _serialize(to: serializer) } @@ -143,7 +141,7 @@ extension PIF.BaseTarget : PIFRepresentable { extension PIF.PlatformFilter: PIFRepresentable { public func serialize(to serializer: any IDEPIFSerializer) -> PIFDict { if environment.isEmpty { - return [ "platform": platform ] + return ["platform": platform] } else { return [ "platform": platform, @@ -153,46 +151,46 @@ extension PIF.PlatformFilter: PIFRepresentable { } } -extension PIF.HeadersBuildPhase : PIFRepresentable { +extension PIF.HeadersBuildPhase: PIFRepresentable { public func serialize(to serializer: any IDEPIFSerializer) -> PIFDict { return [ PIFKey_type: "com.apple.buildphase.headers", PIFKey_guid: id, - PIFKey_BuildPhase_buildFiles: files.map{ $0.serialize(to: serializer) }, + PIFKey_BuildPhase_buildFiles: files.map { $0.serialize(to: serializer) }, ] } } -extension PIF.SourcesBuildPhase : PIFRepresentable { +extension PIF.SourcesBuildPhase: PIFRepresentable { public func serialize(to serializer: any IDEPIFSerializer) -> PIFDict { return [ PIFKey_type: "com.apple.buildphase.sources", PIFKey_guid: id, - PIFKey_BuildPhase_buildFiles: files.map{ $0.serialize(to: serializer) }, + PIFKey_BuildPhase_buildFiles: files.map { $0.serialize(to: serializer) }, ] } } -extension PIF.FrameworksBuildPhase : PIFRepresentable { +extension PIF.FrameworksBuildPhase: PIFRepresentable { public func serialize(to serializer: any IDEPIFSerializer) -> PIFDict { return [ PIFKey_type: "com.apple.buildphase.frameworks", PIFKey_guid: id, - PIFKey_BuildPhase_buildFiles: files.map{ $0.serialize(to: serializer) }, + PIFKey_BuildPhase_buildFiles: files.map { $0.serialize(to: serializer) }, ] } } -extension PIF.CopyFilesBuildPhase : PIFRepresentable { +extension PIF.CopyFilesBuildPhase: PIFRepresentable { public func serialize(to serializer: any IDEPIFSerializer) -> PIFDict { return [ PIFKey_type: "com.apple.buildphase.copy-files", PIFKey_guid: id, - PIFKey_BuildPhase_buildFiles: files.map{ $0.serialize(to: serializer) }, + PIFKey_BuildPhase_buildFiles: files.map { $0.serialize(to: serializer) }, PIFKey_BuildPhase_destinationSubfolder: destinationSubfolder.pathString, PIFKey_BuildPhase_destinationSubpath: destinationSubpath, PIFKey_BuildPhase_runOnlyForDeploymentPostprocessing: (runOnlyForDeploymentPostprocessing ? "true" : "false"), @@ -200,7 +198,7 @@ extension PIF.CopyFilesBuildPhase : PIFRepresentable { } } -extension PIF.ShellScriptBuildPhase : PIFRepresentable { +extension PIF.ShellScriptBuildPhase: PIFRepresentable { public func serialize(to serializer: any IDEPIFSerializer) -> PIFDict { return [ @@ -209,14 +207,14 @@ extension PIF.ShellScriptBuildPhase : PIFRepresentable { PIFKey_BuildPhase_name: name, PIFKey_BuildPhase_shellPath: shellPath, PIFKey_BuildPhase_scriptContents: scriptContents, - PIFKey_BuildPhase_buildFiles: files.map{ $0.serialize(to: serializer) }, + PIFKey_BuildPhase_buildFiles: files.map { $0.serialize(to: serializer) }, PIFKey_BuildPhase_inputFilePaths: inputPaths, PIFKey_BuildPhase_outputFilePaths: outputPaths, PIFKey_BuildPhase_emitEnvironment: emitEnvironment ? "true" : "false", PIFKey_BuildPhase_sandboxingOverride: sandboxingOverride.valueForPIF, PIFKey_BuildPhase_alwaysOutOfDate: alwaysOutOfDate ? "true" : "false", PIFKey_BuildPhase_runOnlyForDeploymentPostprocessing: runOnlyForDeploymentPostprocessing ? "true" : "false", - PIFKey_BuildPhase_originalObjectID: originalObjectID + PIFKey_BuildPhase_originalObjectID: originalObjectID, ] } } @@ -226,12 +224,12 @@ extension PIF.CopyBundleResourcesBuildPhase: PIFRepresentable { return [ PIFKey_type: "com.apple.buildphase.resources", PIFKey_guid: id, - PIFKey_BuildPhase_buildFiles: files.map{ $0.serialize(to: serializer) }, + PIFKey_BuildPhase_buildFiles: files.map { $0.serialize(to: serializer) }, ] } } -extension PIF.BuildRule : PIFRepresentable { +extension PIF.BuildRule: PIFRepresentable { public func serialize(to serializer: any IDEPIFSerializer) -> PIFDict { var dict = PIFDict() @@ -294,7 +292,7 @@ extension PIF.CustomTask: PIFRepresentable { } } -extension PIF.BuildFile : PIFRepresentable { +extension PIF.BuildFile: PIFRepresentable { public func serialize(to serializer: any IDEPIFSerializer) -> PIFDict { switch self.ref { @@ -303,7 +301,7 @@ extension PIF.BuildFile : PIFRepresentable { dict[PIFKey_guid] = id dict[PIFKey_BuildFile_fileReference] = refId dict[PIFKey_BuildFile_headerVisibility] = headerVisibility?.rawValue - dict[PIFKey_platformFilters] = platformFilters.map{ $0.serialize(to: serializer) } + dict[PIFKey_platformFilters] = platformFilters.map { $0.serialize(to: serializer) } dict[PIFKey_BuildFile_codeSignOnCopy] = codeSignOnCopy ? "true" : "false" dict[PIFKey_BuildFile_removeHeadersOnCopy] = removeHeadersOnCopy ? "true" : "false" @@ -331,13 +329,13 @@ extension PIF.BuildFile : PIFRepresentable { return [ PIFKey_guid: id, PIFKey_BuildFile_targetReference: refId, - PIFKey_platformFilters: platformFilters.map{ $0.serialize(to: serializer) } + PIFKey_platformFilters: platformFilters.map { $0.serialize(to: serializer) }, ] } } } -extension PIF.BuildConfig : PIFRepresentable { +extension PIF.BuildConfig: PIFRepresentable { public func serialize(to serializer: any IDEPIFSerializer) -> PIFDict { return [ @@ -349,16 +347,16 @@ extension PIF.BuildConfig : PIFRepresentable { } } -extension PIF.ImpartedBuildProperties : PIFRepresentable { +extension PIF.ImpartedBuildProperties: PIFRepresentable { public func serialize(to serializer: any IDEPIFSerializer) -> PIFDict { return [ - PIFKey_BuildConfiguration_buildSettings: settings.serialize(to: serializer), + PIFKey_BuildConfiguration_buildSettings: settings.serialize(to: serializer) ] } } -extension PIF.BuildSettings : PIFRepresentable { +extension PIF.BuildSettings: PIFRepresentable { public func serialize(to serializer: any IDEPIFSerializer) -> PIFDict { // Borderline hacky, but the main thing is that adding or changing a build setting does not require any changes to the property list representation code. Using a hand-coded serializer might be more efficient but not even remotely as robust, and robustness is the key factor for this use case, as there aren't going to be millions of BuildSettings structs. diff --git a/Sources/SWBProjectModel/PIFGenerationModel.swift b/Sources/SWBProjectModel/PIFGenerationModel.swift index fdbe73a4..e0d3f2b8 100644 --- a/Sources/SWBProjectModel/PIFGenerationModel.swift +++ b/Sources/SWBProjectModel/PIFGenerationModel.swift @@ -102,7 +102,7 @@ public enum PIF { @discardableResult public func addAggregateTarget(id idOpt: String? = nil, name: String) -> AggregateTarget { let id = idOpt ?? nextTargetId precondition(!id.isEmpty) - precondition(!targets.contains{ $0.id == id }) + precondition(!targets.contains { $0.id == id }) precondition(!name.isEmpty) let target = AggregateTarget(id: id, name: name) targets.append(target) @@ -112,7 +112,7 @@ public enum PIF { /// Creates and adds a new empty build configuration, i.e. one that does not initially have any build settings. The name must not be empty and must not be equal to the name of any existing build configuration in the project. @discardableResult public func addBuildConfig(name: String, settings: BuildSettings = BuildSettings(), impartedBuildSettings: BuildSettings) -> BuildConfig { precondition(!name.isEmpty) - precondition(!buildConfigs.contains{ $0.name == name }) + precondition(!buildConfigs.contains { $0.name == name }) let id = "\(self.id)::BUILDCONFIG_\(buildConfigs.count)" let buildConfig = BuildConfig(id: id, name: name, settings: settings, impartedBuildSettings: impartedBuildSettings) buildConfigs.append(buildConfig) @@ -164,7 +164,7 @@ public enum PIF { } /// A reference to a file system entity (a file, folder, etc). - public class FileReference : Reference { + public class FileReference: Reference { public var fileType: String? public var expectedSignature: String? @@ -176,7 +176,7 @@ public enum PIF { } /// A group that can contain References (FileReferences and other Groups). The resolved path of a group is used as the base path for any child references whose source tree type is GroupRelative. - public class Group : Reference { + public class Group: Reference { public var subitems = [Reference]() private var nextRefId: String { @@ -224,7 +224,7 @@ public enum PIF { /// Creates and adds a new empty build configuration, i.e. one that does not initially have any build settings. The name must not be empty and must not be equal to the name of any existing build configuration in the target. @discardableResult public func addBuildConfig(name: String, settings: BuildSettings = BuildSettings(), impartedBuildSettings: BuildSettings) -> BuildConfig { precondition(!name.isEmpty) - precondition(!buildConfigs.contains{ $0.name == name }) + precondition(!buildConfigs.contains { $0.name == name }) let id = "\(self.id)::BUILDCONFIG_\(buildConfigs.count)" let buildConfig = BuildConfig(id: id, name: name, settings: settings, impartedBuildSettings: impartedBuildSettings) buildConfigs.append(buildConfig) @@ -260,20 +260,22 @@ public enum PIF { PIFKey_type: "aggregate", PIFKey_guid: id, PIFKey_name: name, - PIFKey_Target_dependencies: dependencies.map { [ - PIFKey_guid: $0.targetId, - PIFKey_platformFilters: $0.platformFilters.map{ $0.serialize(to: serializer) } - ] }, - PIFKey_Target_buildRules: buildRules.map{ $0.serialize(to: serializer) }, - PIFKey_Target_buildPhases: buildPhases.map{ ($0 as! (any PIFRepresentable)).serialize(to: serializer) }, + PIFKey_Target_dependencies: dependencies.map { + [ + PIFKey_guid: $0.targetId, + PIFKey_platformFilters: $0.platformFilters.map { $0.serialize(to: serializer) }, + ] + }, + PIFKey_Target_buildRules: buildRules.map { $0.serialize(to: serializer) }, + PIFKey_Target_buildPhases: buildPhases.map { ($0 as! (any PIFRepresentable)).serialize(to: serializer) }, PIFKey_Target_customTasks: customTasks.map { $0.serialize(to: serializer) }, - PIFKey_buildConfigurations: buildConfigs.map{ $0.serialize(to: serializer) }, + PIFKey_buildConfigurations: buildConfigs.map { $0.serialize(to: serializer) }, ] } } /// An Xcode target, representing a single entity to build. - public class Target : BaseTarget { + public class Target: BaseTarget { public var productName: String public var productType: ProductType public var productReference: FileReference? @@ -338,10 +340,12 @@ public enum PIF { /// Adds a "copy files" build phase, i.e. one that copies files to an arbitrary location relative to the product. @discardableResult public func addCopyFilesBuildPhase(destinationSubfolder: CopyFilesBuildPhase.DestinationSubfolder, destinationSubpath: String = "", runOnlyForDeploymentPostprocessing: Bool = false) -> CopyFilesBuildPhase { - let phase = CopyFilesBuildPhase(id: nextBuildPhaseId, - destinationSubfolder: destinationSubfolder, - destinationSubpath: destinationSubpath, - runOnlyForDeploymentPostprocessing: runOnlyForDeploymentPostprocessing) + let phase = CopyFilesBuildPhase( + id: nextBuildPhaseId, + destinationSubfolder: destinationSubfolder, + destinationSubpath: destinationSubpath, + runOnlyForDeploymentPostprocessing: runOnlyForDeploymentPostprocessing + ) buildPhases.append(phase) return phase } @@ -375,7 +379,8 @@ public enum PIF { alwaysOutOfDate: alwaysOutOfDate, runOnlyForDeploymentPostprocessing: runOnlyForDeploymentPostprocessing, originalObjectID: originalObjectID, - insertAtFront: insertAtFront) + insertAtFront: insertAtFront + ) } /// Adds a "shell script" build phase, i.e. one that runs a custom shell script as part of the build. @@ -405,7 +410,8 @@ public enum PIF { runOnlyForDeploymentPostprocessing: runOnlyForDeploymentPostprocessing, originalObjectID: originalObjectID, sandboxingOverride: sandboxingOverride, - alwaysRunForInstallHdrs: alwaysRunForInstallHdrs) + alwaysRunForInstallHdrs: alwaysRunForInstallHdrs + ) if insertAtFront { buildPhases.insert(phase, at: 0) } else { @@ -429,7 +435,7 @@ public enum PIF { public func addDependency(on targetId: String, platformFilters: Set, linkProduct: Bool) { super.addDependency(on: targetId, platformFilters: platformFilters) if linkProduct { - let frameworksPhase = buildPhases.first{ $0 is FrameworksBuildPhase } ?? addFrameworksBuildPhase() + let frameworksPhase = buildPhases.first { $0 is FrameworksBuildPhase } ?? addFrameworksBuildPhase() frameworksPhase.addBuildFile(productOf: targetId, platformFilters: platformFilters) } } @@ -440,24 +446,24 @@ public enum PIF { /// Convenience function to add a file reference to the Headers build phase, after creating it if needed. @discardableResult public func addHeaderFile(ref: FileReference) -> BuildFile { - let headerPhase = buildPhases.first{ $0 is HeadersBuildPhase } ?? addHeadersBuildPhase() + let headerPhase = buildPhases.first { $0 is HeadersBuildPhase } ?? addHeadersBuildPhase() return headerPhase.addBuildFile(fileRef: ref) } /// Convenience function to add a file reference to the Sources build phase, after creating it if needed. @discardableResult public func addSourceFile(ref: FileReference, generatedCodeVisibility: BuildFile.GeneratedCodeVisibility?) -> BuildFile { - let sourcesPhase = buildPhases.first{ $0 is SourcesBuildPhase } ?? addSourcesBuildPhase() + let sourcesPhase = buildPhases.first { $0 is SourcesBuildPhase } ?? addSourcesBuildPhase() return sourcesPhase.addBuildFile(fileRef: ref, generatedCodeVisibility: generatedCodeVisibility) } @discardableResult public func addSourceFile(ref: FileReference) -> BuildFile { - let sourcesPhase = buildPhases.first{ $0 is SourcesBuildPhase } ?? addSourcesBuildPhase() + let sourcesPhase = buildPhases.first { $0 is SourcesBuildPhase } ?? addSourcesBuildPhase() return sourcesPhase.addBuildFile(fileRef: ref, generatedCodeVisibility: nil) } /// Convenience function to add a file reference to the Frameworks build phase, after creating it if needed. @discardableResult public func addLibrary(ref: FileReference, platformFilters: Set, codeSignOnCopy: Bool = false, removeHeadersOnCopy: Bool = false) -> BuildFile { - let frameworksPhase = buildPhases.first{ $0 is FrameworksBuildPhase } ?? addFrameworksBuildPhase() + let frameworksPhase = buildPhases.first { $0 is FrameworksBuildPhase } ?? addFrameworksBuildPhase() return frameworksPhase.addBuildFile(fileRef: ref, platformFilters: platformFilters, codeSignOnCopy: codeSignOnCopy, removeHeadersOnCopy: removeHeadersOnCopy) } @@ -470,7 +476,7 @@ public enum PIF { } @discardableResult public func addResourceFile(ref: FileReference, platformFilters: Set, resourceRule: BuildFile.ResourceRule? = nil) -> BuildFile { - let resourcesPhase = buildPhases.first{ $0 is CopyBundleResourcesBuildPhase } ?? addCopyBundleResourcesBuildPhase() + let resourcesPhase = buildPhases.first { $0 is CopyBundleResourcesBuildPhase } ?? addCopyBundleResourcesBuildPhase() return resourcesPhase.addBuildFile(fileRef: ref, platformFilters: platformFilters, resourceRule: resourceRule) } @@ -489,11 +495,13 @@ public enum PIF { PIFKey_guid: id, PIFKey_name: name, PIFKey_Target_customTasks: customTasks.map { $0.serialize(to: serializer) }, - PIFKey_Target_dependencies: dependencies.map{ [ - PIFKey_guid: $0.targetId, - PIFKey_platformFilters: $0.platformFilters.map{ $0.serialize(to: serializer) } - ] }, - PIFKey_buildConfigurations: buildConfigs.map{ $0.serialize(to: serializer) }, + PIFKey_Target_dependencies: dependencies.map { + [ + PIFKey_guid: $0.targetId, + PIFKey_platformFilters: $0.platformFilters.map { $0.serialize(to: serializer) }, + ] + }, + PIFKey_buildConfigurations: buildConfigs.map { $0.serialize(to: serializer) }, ] // Add the framework build phase, if present. if let phase = buildPhases.first as? PIF.FrameworksBuildPhase { @@ -509,17 +517,17 @@ public enum PIF { PIFKey_type: "standard", PIFKey_guid: id, PIFKey_name: name, - PIFKey_Target_dependencies: dependencies.map{ ["guid": $0.targetId, "platformFilters": $0.platformFilters.map{ $0.serialize(to: serializer) }] }, + PIFKey_Target_dependencies: dependencies.map { ["guid": $0.targetId, "platformFilters": $0.platformFilters.map { $0.serialize(to: serializer) }] }, PIFKey_Target_productTypeIdentifier: productType.asString, PIFKey_Target_productReference: [ PIFKey_type: "file", PIFKey_guid: "PRODUCTREF-\(id)", PIFKey_name: productName, ], - PIFKey_Target_buildRules: buildRules.map{ $0.serialize(to: serializer) }, - PIFKey_Target_buildPhases: buildPhases.map{ ($0 as! (any PIFRepresentable)).serialize(to: serializer) }, + PIFKey_Target_buildRules: buildRules.map { $0.serialize(to: serializer) }, + PIFKey_Target_buildPhases: buildPhases.map { ($0 as! (any PIFRepresentable)).serialize(to: serializer) }, PIFKey_Target_customTasks: customTasks.map { $0.serialize(to: serializer) }, - PIFKey_buildConfigurations: buildConfigs.map{ $0.serialize(to: serializer) }, + PIFKey_buildConfigurations: buildConfigs.map { $0.serialize(to: serializer) }, ] if let dynamicTargetVariant { result[PIFKey_Target_dynamicTargetVariantGuid] = dynamicTargetVariant.id @@ -581,28 +589,28 @@ public enum PIF { } /// A "headers" build phase, i.e. one that copies headers into a directory of the product, after suitable processing. - public class HeadersBuildPhase : BuildPhase { + public class HeadersBuildPhase: BuildPhase { public override init(id: String) { super.init(id: id) } } /// A "sources" build phase, i.e. one that compiles sources and provides them to be linked into the executable code of the product. - public class SourcesBuildPhase : BuildPhase { + public class SourcesBuildPhase: BuildPhase { public override init(id: String) { super.init(id: id) } } /// A "frameworks" build phase, i.e. one that links compiled code and libraries into the executable of the product. - public class FrameworksBuildPhase : BuildPhase { + public class FrameworksBuildPhase: BuildPhase { public override init(id: String) { super.init(id: id) } } /// A "copy files" build phase, i.e. one that copies files to an arbitrary location relative to the product. - public class CopyFilesBuildPhase : BuildPhase { + public class CopyFilesBuildPhase: BuildPhase { public let destinationSubfolder: DestinationSubfolder public let destinationSubpath: String public let runOnlyForDeploymentPostprocessing: Bool @@ -642,7 +650,7 @@ public enum PIF { } /// A "shell script" build phase, i.e. one that runs a custom shell script. - public class ShellScriptBuildPhase : BuildPhase { + public class ShellScriptBuildPhase: BuildPhase { public var name: String public var scriptContents: String public var shellPath: String @@ -693,7 +701,7 @@ public enum PIF { } } - public class CopyBundleResourcesBuildPhase : BuildPhase { + public class CopyBundleResourcesBuildPhase: BuildPhase { public override init(id: String) { super.init(id: id) } @@ -1049,7 +1057,7 @@ public enum PIF { public var WATCHOS_DEPLOYMENT_TARGET: String? @available(*, deprecated, renamed: "GENERATE_PRELINK_OBJECT_FILE") - public var GENERATE_MASTER_OBJECT_FILE: String? // ignore-unacceptable-language + public var GENERATE_MASTER_OBJECT_FILE: String? // ignore-unacceptable-language } } diff --git a/Sources/SWBProtocol/AsyncSequence.swift b/Sources/SWBProtocol/AsyncSequence.swift index 55ac8e6c..743cdb75 100644 --- a/Sources/SWBProtocol/AsyncSequence.swift +++ b/Sources/SWBProtocol/AsyncSequence.swift @@ -101,7 +101,7 @@ extension AsyncSequence where Self.Element == UInt8 { } } -extension AsyncIPCMessageSequence: Sendable where Base: Sendable { } +extension AsyncIPCMessageSequence: Sendable where Base: Sendable {} @available(*, unavailable) -extension AsyncIPCMessageSequence.AsyncIterator: Sendable { } +extension AsyncIPCMessageSequence.AsyncIterator: Sendable {} diff --git a/Sources/SWBProtocol/BuildAction.swift b/Sources/SWBProtocol/BuildAction.swift index 443bc4f0..ad852f34 100644 --- a/Sources/SWBProtocol/BuildAction.swift +++ b/Sources/SWBProtocol/BuildAction.swift @@ -13,8 +13,8 @@ public import SWBUtil public enum BuildAction: String, Serializable, Codable, CaseIterable, Comparable, Sendable { - case analyze = "analyze" // used by legacy target-based builds using xcodebuild - case archive = "archive" // used by legacy target-based builds using xcodebuild + case analyze = "analyze" // used by legacy target-based builds using xcodebuild + case archive = "archive" // used by legacy target-based builds using xcodebuild case clean = "clean" case build = "build" case exportLoc = "exportloc" @@ -72,7 +72,7 @@ public enum BuildAction: String, Serializable, Codable, CaseIterable, Comparable } } - public func serialize(to serializer: T) where T : Serializer { + public func serialize(to serializer: T) where T: Serializer { serializer.serialize(rawValue) } diff --git a/Sources/SWBProtocol/BuildOperationMessages.swift b/Sources/SWBProtocol/BuildOperationMessages.swift index 2f7c774d..e7677ad5 100644 --- a/Sources/SWBProtocol/BuildOperationMessages.swift +++ b/Sources/SWBProtocol/BuildOperationMessages.swift @@ -80,18 +80,20 @@ public enum BuildOperationTargetType: SerializableCodable, Equatable, Sendable { public func encode(to encoder: any Swift.Encoder) throws { var container = encoder.singleValueContainer() - try container.encode({ () -> CodingKeys in - switch self { - case .aggregate: - return .aggregate - case .external: - return .external - case .packageProduct: - return .packageProduct - case .standard: - return .standard - } - }().rawValue) + try container.encode( + { () -> CodingKeys in + switch self { + case .aggregate: + return .aggregate + case .external: + return .external + case .packageProduct: + return .packageProduct + case .standard: + return .standard + } + }().rawValue + ) } } @@ -562,7 +564,7 @@ public struct BuildOperationMetrics: Equatable, Codable, Sendable { counters.isEmpty && taskCounters.isEmpty } - public init(counters: [Counter : Int], taskCounters: [String: [TaskCounter: Int]]) { + public init(counters: [Counter: Int], taskCounters: [String: [TaskCounter: Int]]) { self.counters = counters self.taskCounters = taskCounters } @@ -930,10 +932,10 @@ public struct BuildOperationConsoleOutputEmitted: Message, Equatable, Serializab } public enum CodingKeys: CodingKey { - case data // legacy key + case data // legacy key case data2 case taskID - case taskSignature // legacy key + case taskSignature // legacy key case taskSignature2 case targetID } @@ -991,7 +993,7 @@ public struct BuildOperationDiagnosticEmitted: Message, Equatable, SerializableC enum TaskCodingKeys: CodingKey { case taskID - case taskSignature // legacy key + case taskSignature // legacy key case taskSignature2 case targetID } @@ -1002,7 +1004,7 @@ public struct BuildOperationDiagnosticEmitted: Message, Equatable, SerializableC enum GlobalTaskCodingKeys: CodingKey { case taskID - case taskSignature // legacy key + case taskSignature // legacy key case taskSignature2 } diff --git a/Sources/SWBProtocol/DependencyGraphMessages.swift b/Sources/SWBProtocol/DependencyGraphMessages.swift index bb2b6815..232f78fc 100644 --- a/Sources/SWBProtocol/DependencyGraphMessages.swift +++ b/Sources/SWBProtocol/DependencyGraphMessages.swift @@ -71,10 +71,8 @@ public struct DependencyGraphResponse: Message, SerializableCodable, Equatable { } } - // MARK: Getting declared dependency info - public struct DumpBuildDependencyInfoRequest: SessionChannelBuildMessage, RequestMessage, SerializableCodable, Equatable { public typealias ResponseMessage = VoidResponse @@ -100,7 +98,6 @@ public struct DumpBuildDependencyInfoRequest: SessionChannelBuildMessage, Reques } } - // MARK: Registering messages let dependencyGraphMessageTypes: [any Message.Type] = [ diff --git a/Sources/SWBProtocol/LocalizationMessages.swift b/Sources/SWBProtocol/LocalizationMessages.swift index 5e61a679..0e5fcfc7 100644 --- a/Sources/SWBProtocol/LocalizationMessages.swift +++ b/Sources/SWBProtocol/LocalizationMessages.swift @@ -54,5 +54,5 @@ public struct LocalizationInfoResponse: Message, Equatable, SerializableCodable let localizationMessageTypes: [any Message.Type] = [ LocalizationInfoRequest.self, - LocalizationInfoResponse.self + LocalizationInfoResponse.self, ] diff --git a/Sources/SWBProtocol/MacroEvaluationMessages.swift b/Sources/SWBProtocol/MacroEvaluationMessages.swift index faea9a3a..c37a8142 100644 --- a/Sources/SWBProtocol/MacroEvaluationMessages.swift +++ b/Sources/SWBProtocol/MacroEvaluationMessages.swift @@ -12,14 +12,12 @@ public import SWBUtil - public enum MacroEvaluationRequestLevel: Equatable, Sendable, SerializableCodable { case defaults case project(_ guid: String) case target(_ guid: String) } - /// Requests a macro evaluation scope handle in a session. /// /// This is a flexible message in that the project, target, and build parameters are all optional, but the handler `CreateMacroEvaluationScopeMsg.handle()` will request the handle for the appropriate scope based on which fields were passed. @@ -58,7 +56,6 @@ public struct DiscardMacroEvaluationScope: SessionMessage, RequestMessage, Equat } } - /// The context within which a macro evaluation should occur. public enum MacroEvaluationRequestContext: Equatable, Sendable, SerializableCodable { /// A `Settings` handle, if called by something which is holding on to a handle. diff --git a/Sources/SWBProtocol/Message.swift b/Sources/SWBProtocol/Message.swift index c703e376..8fa8bd8c 100644 --- a/Sources/SWBProtocol/Message.swift +++ b/Sources/SWBProtocol/Message.swift @@ -369,7 +369,7 @@ public struct AppleSystemFrameworkNamesRequest: RequestMessage, Equatable, Pendi self.xcodeAppPath = developerPath?.dirname.dirname } - public func legacySerialize(to serializer: T) where T : SWBUtil.Serializer { + public func legacySerialize(to serializer: T) where T: SWBUtil.Serializer { serializer.serializeAggregate(2) { serializer.serialize(self.xcodeAppPath) serializer.serialize(self.developerPath) @@ -398,7 +398,7 @@ public struct ProductTypeSupportsMacCatalystRequest: RequestMessage, Equatable, self.productTypeIdentifier = productTypeIdentifier } - public func legacySerialize(to serializer: T) where T : SWBUtil.Serializer { + public func legacySerialize(to serializer: T) where T: SWBUtil.Serializer { serializer.serializeAggregate(3) { serializer.serialize(self.xcodeAppPath) serializer.serialize(self.developerPath) @@ -433,17 +433,17 @@ public struct CreateSessionRequest: RequestMessage, Equatable, SerializableCodab public let appPath: Path? public let cachePath: Path? public let inferiorProductsPath: Path? - public let environment: [String:String]? + public let environment: [String: String]? public init(name: String, developerPath: Path?, cachePath: Path?, inferiorProductsPath: Path?) { // ABI compatibility self.init(name: name, developerPath: developerPath, cachePath: cachePath, inferiorProductsPath: inferiorProductsPath, environment: nil) } - public init(name: String, developerPath: Path?, cachePath: Path?, inferiorProductsPath: Path?, environment: [String:String]?) { // ABI Compatibility + public init(name: String, developerPath: Path?, cachePath: Path?, inferiorProductsPath: Path?, environment: [String: String]?) { // ABI Compatibility self.init(name: name, developerPath: developerPath, resourceSearchPaths: [], cachePath: cachePath, inferiorProductsPath: inferiorProductsPath, environment: environment) } - public init(name: String, developerPath: Path?, resourceSearchPaths: [Path], cachePath: Path?, inferiorProductsPath: Path?, environment: [String:String]?) { // API/ABI compatibility + public init(name: String, developerPath: Path?, resourceSearchPaths: [Path], cachePath: Path?, inferiorProductsPath: Path?, environment: [String: String]?) { // API/ABI compatibility self.name = name self.developerPath = developerPath self.developerPath2 = nil @@ -454,7 +454,7 @@ public struct CreateSessionRequest: RequestMessage, Equatable, SerializableCodab self.environment = environment } - public init(name: String, developerPath: DeveloperPath?, resourceSearchPaths: [Path], cachePath: Path?, inferiorProductsPath: Path?, environment: [String:String]?) { + public init(name: String, developerPath: DeveloperPath?, resourceSearchPaths: [Path], cachePath: Path?, inferiorProductsPath: Path?, environment: [String: String]?) { self.name = name self.developerPath2 = developerPath switch developerPath { @@ -500,7 +500,7 @@ public struct CreateSessionResponse: Message { self.diagnostics = try deserializer.deserialize() } - public func serialize(to serializer: T) where T : Serializer { + public func serialize(to serializer: T) where T: Serializer { serializer.beginAggregate(2) serializer.serialize(self.sessionID) serializer.serialize(self.diagnostics) @@ -549,9 +549,9 @@ public struct SetSessionSystemInfoRequest: SessionMessage, RequestMessage, Equat serializer.beginAggregate(3) serializer.serialize(sessionHandle) let osVersion = self.operatingSystemVersion.zeroPadded(toMinimumNumberOfComponents: 3) - serializer.serialize(osVersion.rawValue[0]) // major - serializer.serialize(osVersion.rawValue[1]) // minor - serializer.serialize(osVersion.rawValue[2]) // update + serializer.serialize(osVersion.rawValue[0]) // major + serializer.serialize(osVersion.rawValue[1]) // minor + serializer.serialize(osVersion.rawValue[2]) // update serializer.serialize(self.productBuildVersion) serializer.serialize(self.nativeArchitecture) } @@ -662,7 +662,7 @@ public struct ListSessionsRequest: RequestMessage, Equatable { public static let name = "LIST_SESSIONS" - public init() { } + public init() {} public init(from deserializer: any Deserializer) throws { try deserializer.beginAggregate(0) @@ -902,13 +902,13 @@ public struct TransferSessionPIFObjectsLegacyRequest: SessionMessage, RequestMes public init(from deserializer: any Deserializer) throws { try deserializer.beginAggregate(2) self.sessionHandle = try deserializer.deserialize() - self.objects = (try deserializer.deserialize() as [ObjectData]).map{ $0.data } + self.objects = (try deserializer.deserialize() as [ObjectData]).map { $0.data } } public func serialize(to serializer: T) { serializer.beginAggregate(2) serializer.serialize(self.sessionHandle) - serializer.serialize(self.objects.map{ ObjectData($0) }) + serializer.serialize(self.objects.map { ObjectData($0) }) } } @@ -1185,69 +1185,70 @@ public struct IPCMessage: Serializable, Sendable { static let extraMessageTypes: [any Message.Type] = [] /// All known message types. - static let messageTypes: [any Message.Type] = [ - PingRequest.self, - SetConfigItemRequest.self, - ClearAllCachesRequest.self, - - GetPlatformsRequest.self, - GetSDKsRequest.self, - GetSpecsRequest.self, - GetStatisticsRequest.self, - GetToolchainsRequest.self, - GetBuildSettingsDescriptionRequest.self, - ExecuteCommandLineToolRequest.self, - - CreateSessionRequest.self, - CreateSessionResponse.self, - SetSessionSystemInfoRequest.self, - SetSessionUserInfoRequest.self, - SetSessionUserPreferencesRequest.self, - LookupToolchainRequest.self, - LookupToolchainResponse.self, - ListSessionsRequest.self, - ListSessionsResponse.self, - WaitForQuiescenceRequest.self, - DeleteSessionRequest.self, - - SetSessionWorkspaceContainerPathRequest.self, - SetSessionPIFRequest.self, - TransferSessionPIFRequest.self, - TransferSessionPIFResponse.self, - TransferSessionPIFObjectsRequest.self, - TransferSessionPIFObjectsLegacyRequest.self, - AuditSessionPIFRequest.self, - IncrementalPIFLookupFailureRequest.self, - - WorkspaceInfoRequest.self, - WorkspaceInfoResponse.self, - - CreateXCFrameworkRequest.self, - - AppleSystemFrameworkNamesRequest.self, - ProductTypeSupportsMacCatalystRequest.self, - DeveloperPathRequest.self, - - // TODO: Delete once all clients are no longer calling the public APIs which invoke this message - AvailableAppExtensionPointIdentifiersRequest.self, - MacCatalystUnavailableFrameworkNamesRequest.self, - - ErrorResponse.self, - BoolResponse.self, - StringResponse.self, - StringListResponse.self - ] + buildOperationMessageTypes - + macroEvaluationMessageTypes - + planningOperationMessageTypes - + taskConstructionMessageTypes - + indexingMessageTypes - + previewInfoMessageTypes - + projectDescriptorMessageTypes - + documentationMessageTypes - + localizationMessageTypes - + dependencyClosureMessageTypes - + dependencyGraphMessageTypes - + buildDescriptionMessages + static let messageTypes: [any Message.Type] = + [ + PingRequest.self, + SetConfigItemRequest.self, + ClearAllCachesRequest.self, + + GetPlatformsRequest.self, + GetSDKsRequest.self, + GetSpecsRequest.self, + GetStatisticsRequest.self, + GetToolchainsRequest.self, + GetBuildSettingsDescriptionRequest.self, + ExecuteCommandLineToolRequest.self, + + CreateSessionRequest.self, + CreateSessionResponse.self, + SetSessionSystemInfoRequest.self, + SetSessionUserInfoRequest.self, + SetSessionUserPreferencesRequest.self, + LookupToolchainRequest.self, + LookupToolchainResponse.self, + ListSessionsRequest.self, + ListSessionsResponse.self, + WaitForQuiescenceRequest.self, + DeleteSessionRequest.self, + + SetSessionWorkspaceContainerPathRequest.self, + SetSessionPIFRequest.self, + TransferSessionPIFRequest.self, + TransferSessionPIFResponse.self, + TransferSessionPIFObjectsRequest.self, + TransferSessionPIFObjectsLegacyRequest.self, + AuditSessionPIFRequest.self, + IncrementalPIFLookupFailureRequest.self, + + WorkspaceInfoRequest.self, + WorkspaceInfoResponse.self, + + CreateXCFrameworkRequest.self, + + AppleSystemFrameworkNamesRequest.self, + ProductTypeSupportsMacCatalystRequest.self, + DeveloperPathRequest.self, + + // TODO: Delete once all clients are no longer calling the public APIs which invoke this message + AvailableAppExtensionPointIdentifiersRequest.self, + MacCatalystUnavailableFrameworkNamesRequest.self, + + ErrorResponse.self, + BoolResponse.self, + StringResponse.self, + StringListResponse.self, + ] + buildOperationMessageTypes + + macroEvaluationMessageTypes + + planningOperationMessageTypes + + taskConstructionMessageTypes + + indexingMessageTypes + + previewInfoMessageTypes + + projectDescriptorMessageTypes + + documentationMessageTypes + + localizationMessageTypes + + dependencyClosureMessageTypes + + dependencyGraphMessageTypes + + buildDescriptionMessages /// Reverse name mapping. static let messageNameToID: [String: any Message.Type] = { diff --git a/Sources/SWBProtocol/MessageSupport.swift b/Sources/SWBProtocol/MessageSupport.swift index 65e79bfc..3326b6c0 100644 --- a/Sources/SWBProtocol/MessageSupport.swift +++ b/Sources/SWBProtocol/MessageSupport.swift @@ -72,8 +72,8 @@ public enum BuildCommandMessagePayload: SerializableCodable, Equatable, Sendable case .migrate: break case let .generateAssemblyCode(buildOnlyTheseFiles), - let .generatePreprocessedFile(buildOnlyTheseFiles), - let .singleFileBuild(buildOnlyTheseFiles): + let .generatePreprocessedFile(buildOnlyTheseFiles), + let .singleFileBuild(buildOnlyTheseFiles): try container.encode(buildOnlyTheseFiles, forKey: .files) case let .prepareForIndexing(buildOnlyTheseTargets, enableIndexBuildArena): try container.encode(buildOnlyTheseTargets, forKey: .targets) @@ -457,7 +457,7 @@ public struct PreviewInfoTargetDependencyInfo: Codable, Equatable, Sendable { public init( productModuleName: String, - objectFileInputMap: [String : Set], + objectFileInputMap: [String: Set], linkCommandLine: [String], linkerWorkingDirectory: String?, swiftEnableOpaqueTypeErasure: Bool, @@ -561,10 +561,12 @@ public struct LocalizationInfoMessagePayload: SerializableCodable, Equatable, Se /// Paths to generated source code files holding string symbols, keyed by xcstrings file path. public var generatedSymbolFilesByXCStringsPath = [Path: Set]() - public init(targetIdentifier: String, - compilableXCStringsPaths: Set, - producedStringsdataPaths: [LocalizationInfoBuildPortion: Set], - effectivePlatformName: String?) { + public init( + targetIdentifier: String, + compilableXCStringsPaths: Set, + producedStringsdataPaths: [LocalizationInfoBuildPortion: Set], + effectivePlatformName: String? + ) { self.targetIdentifier = targetIdentifier self.compilableXCStringsPaths = compilableXCStringsPaths self.producedStringsdataPaths = producedStringsdataPaths diff --git a/Sources/SWBProtocol/PIFKeyConstants.swift b/Sources/SWBProtocol/PIFKeyConstants.swift index 59292d3c..4f215c1c 100644 --- a/Sources/SWBProtocol/PIFKeyConstants.swift +++ b/Sources/SWBProtocol/PIFKeyConstants.swift @@ -15,127 +15,127 @@ import Foundation // MARK: PIF key constant definitions // Generic keys used by multiple kinds of objects -public let PIFKey_guid = "guid" -public let PIFKey_name = "name" -public let PIFKey_path = "path" -public let PIFKey_type = "type" -public let PIFKey_buildConfigurations = "buildConfigurations" -public let PIFKey_impartedBuildProperties = "impartedBuildProperties" -public let PIFKey_platformFilters = "platformFilters" +public let PIFKey_guid = "guid" +public let PIFKey_name = "name" +public let PIFKey_path = "path" +public let PIFKey_type = "type" +public let PIFKey_buildConfigurations = "buildConfigurations" +public let PIFKey_impartedBuildProperties = "impartedBuildProperties" +public let PIFKey_platformFilters = "platformFilters" // Keys specific to workspaces -public let PIFKey_Workspace_projects = "projects" +public let PIFKey_Workspace_projects = "projects" // Keys specific to projects -public let PIFKey_Project_projectDirectory = "projectDirectory" -public let PIFKey_Project_name = "projectName" -public let PIFKey_Project_isPackage = "projectIsPackage" -public let PIFKey_Project_targets = "targets" -public let PIFKey_Project_groupTree = "groupTree" -public let PIFKey_Project_defaultConfigurationName = "defaultConfigurationName" -public let PIFKey_Project_developmentRegion = "developmentRegion" -public let PIFKey_Project_classPrefix = "classPrefix" -public let PIFKey_Project_appPreferencesBuildSettings = "appPreferencesBuildSettings" +public let PIFKey_Project_projectDirectory = "projectDirectory" +public let PIFKey_Project_name = "projectName" +public let PIFKey_Project_isPackage = "projectIsPackage" +public let PIFKey_Project_targets = "targets" +public let PIFKey_Project_groupTree = "groupTree" +public let PIFKey_Project_defaultConfigurationName = "defaultConfigurationName" +public let PIFKey_Project_developmentRegion = "developmentRegion" +public let PIFKey_Project_classPrefix = "classPrefix" +public let PIFKey_Project_appPreferencesBuildSettings = "appPreferencesBuildSettings" // Keys specific to references -public let PIFKey_Reference_sourceTree = "sourceTree" -public let PIFKey_Reference_fileType = "fileType" -public let PIFKey_Reference_producingTarget = "producingTarget" -public let PIFKey_Reference_regionVariantName = "regionVariantName" -public let PIFKey_Reference_remoteReference = "remoteReference" -public let PIFKey_Reference_children = "children" -public let PIFKey_Reference_fileTextEncoding = "fileTextEncoding" -public let PIFKey_Reference_expectedSignature = "expectedSignature" +public let PIFKey_Reference_sourceTree = "sourceTree" +public let PIFKey_Reference_fileType = "fileType" +public let PIFKey_Reference_producingTarget = "producingTarget" +public let PIFKey_Reference_regionVariantName = "regionVariantName" +public let PIFKey_Reference_remoteReference = "remoteReference" +public let PIFKey_Reference_children = "children" +public let PIFKey_Reference_fileTextEncoding = "fileTextEncoding" +public let PIFKey_Reference_expectedSignature = "expectedSignature" // Keys specific to targets -public let PIFKey_Target_productReference = "productReference" -public let PIFKey_Target_productTypeIdentifier = "productTypeIdentifier" -public let PIFKey_Target_dependencies = "dependencies" -public let PIFKey_Target_buildPhases = "buildPhases" -public let PIFKey_Target_buildRules = "buildRules" -public let PIFKey_Target_customTasks = "customTasks" +public let PIFKey_Target_productReference = "productReference" +public let PIFKey_Target_productTypeIdentifier = "productTypeIdentifier" +public let PIFKey_Target_dependencies = "dependencies" +public let PIFKey_Target_buildPhases = "buildPhases" +public let PIFKey_Target_buildRules = "buildRules" +public let PIFKey_Target_customTasks = "customTasks" public let PIFKey_Target_performanceTestsBaselinesPath = "performanceTestsBaselinesPath" public let PIFKey_Target_predominantSourceCodeLanguage = "predominantSourceCodeLanguage" -public let PIFKey_Target_isPackageTarget = "isPackageTarget" -public let PIFKey_Target_provisioningSourceData = "provisioningSourceData" +public let PIFKey_Target_isPackageTarget = "isPackageTarget" +public let PIFKey_Target_provisioningSourceData = "provisioningSourceData" public let PIFKey_Target_developmentTeamIdentifier = "developmentTeamIdentifier" -public let PIFKey_Target_frameworksBuildPhase = "frameworksBuildPhase" -public let PIFKey_Target_dynamicTargetVariantGuid = "dynamicTargetVariantGuid" -public let PIFKey_Target_approvedByUser = "approvedByUser" +public let PIFKey_Target_frameworksBuildPhase = "frameworksBuildPhase" +public let PIFKey_Target_dynamicTargetVariantGuid = "dynamicTargetVariantGuid" +public let PIFKey_Target_approvedByUser = "approvedByUser" // Keys specific to external targets -public let PIFKey_ExternalTarget_toolPath = "toolPath" -public let PIFKey_ExternalTarget_arguments = "arguments" -public let PIFKey_ExternalTarget_workingDirectory = "workingDirectory" -public let PIFKey_ExternalTarget_passBuildSettingsInEnvironment = "passBuildSettingsInEnvironment" +public let PIFKey_ExternalTarget_toolPath = "toolPath" +public let PIFKey_ExternalTarget_arguments = "arguments" +public let PIFKey_ExternalTarget_workingDirectory = "workingDirectory" +public let PIFKey_ExternalTarget_passBuildSettingsInEnvironment = "passBuildSettingsInEnvironment" // Keys specific to build phases -public let PIFKey_BuildPhase_buildFiles = "buildFiles" -public let PIFKey_BuildPhase_destinationSubfolder = "destinationSubfolder" -public let PIFKey_BuildPhase_destinationSubpath = "destinationSubpath" -public let PIFKey_BuildPhase_emitEnvironment = "emitEnvironment" -public let PIFKey_BuildPhase_sandboxingOverride = "sandboxingOverride" -public let PIFKey_BuildPhase_inputFilePaths = "inputFilePaths" -public let PIFKey_BuildPhase_inputFileListPaths = "inputFileListPaths" -public let PIFKey_BuildPhase_name = "name" -public let PIFKey_BuildPhase_originalObjectID = "originalObjectID" -public let PIFKey_BuildPhase_outputFilePaths = "outputFilePaths" -public let PIFKey_BuildPhase_outputFileListPaths = "outputFileListPaths" -public let PIFKey_BuildPhase_runOnlyForDeploymentPostprocessing = "runOnlyForDeploymentPostprocessing" -public let PIFKey_BuildPhase_shellPath = "shellPath" -public let PIFKey_BuildPhase_scriptContents = "scriptContents" -public let PIFKey_BuildPhase_dependencyFileFormat = "dependencyFileFormat" -public let PIFKey_BuildPhase_dependencyFilePaths = "dependencyFilePaths" -public let PIFKey_BuildPhase_alwaysOutOfDate = "alwaysOutOfDate" -public let PIFKey_BuildPhase_alwaysRunForInstallHdrs = "alwaysRunForInstallHdrs" +public let PIFKey_BuildPhase_buildFiles = "buildFiles" +public let PIFKey_BuildPhase_destinationSubfolder = "destinationSubfolder" +public let PIFKey_BuildPhase_destinationSubpath = "destinationSubpath" +public let PIFKey_BuildPhase_emitEnvironment = "emitEnvironment" +public let PIFKey_BuildPhase_sandboxingOverride = "sandboxingOverride" +public let PIFKey_BuildPhase_inputFilePaths = "inputFilePaths" +public let PIFKey_BuildPhase_inputFileListPaths = "inputFileListPaths" +public let PIFKey_BuildPhase_name = "name" +public let PIFKey_BuildPhase_originalObjectID = "originalObjectID" +public let PIFKey_BuildPhase_outputFilePaths = "outputFilePaths" +public let PIFKey_BuildPhase_outputFileListPaths = "outputFileListPaths" +public let PIFKey_BuildPhase_runOnlyForDeploymentPostprocessing = "runOnlyForDeploymentPostprocessing" +public let PIFKey_BuildPhase_shellPath = "shellPath" +public let PIFKey_BuildPhase_scriptContents = "scriptContents" +public let PIFKey_BuildPhase_dependencyFileFormat = "dependencyFileFormat" +public let PIFKey_BuildPhase_dependencyFilePaths = "dependencyFilePaths" +public let PIFKey_BuildPhase_alwaysOutOfDate = "alwaysOutOfDate" +public let PIFKey_BuildPhase_alwaysRunForInstallHdrs = "alwaysRunForInstallHdrs" // Keys specific to build files -public let PIFKey_BuildFile_fileReference = "fileReference" -public let PIFKey_BuildFile_targetReference = "targetReference" -public let PIFKey_BuildFile_additionalCompilerOptions = "additionalCompilerOptions" -public let PIFKey_BuildFile_assetTags = "assetTags" -public let PIFKey_BuildFile_decompress = "decompress" -public let PIFKey_BuildFile_headerVisibility = "headerVisibility" -public let PIFKey_BuildFile_migCodegenFiles = "migCodegenFiles" -public let PIFKey_BuildFile_intentsCodegenFiles = "intentsCodegenFiles" -public let PIFKey_BuildFile_intentsCodegenVisibility = "intentsCodegenVisibility" -public let PIFKey_BuildFile_codeSignOnCopy = "codeSignOnCopy" -public let PIFKey_BuildFile_removeHeadersOnCopy = "removeHeadersOnCopy" -public let PIFKey_BuildFile_linkUsingSearchPath = "linkUsingSearchPath" -public let PIFKey_BuildFile_shouldLinkWeakly = "shouldLinkWeakly" +public let PIFKey_BuildFile_fileReference = "fileReference" +public let PIFKey_BuildFile_targetReference = "targetReference" +public let PIFKey_BuildFile_additionalCompilerOptions = "additionalCompilerOptions" +public let PIFKey_BuildFile_assetTags = "assetTags" +public let PIFKey_BuildFile_decompress = "decompress" +public let PIFKey_BuildFile_headerVisibility = "headerVisibility" +public let PIFKey_BuildFile_migCodegenFiles = "migCodegenFiles" +public let PIFKey_BuildFile_intentsCodegenFiles = "intentsCodegenFiles" +public let PIFKey_BuildFile_intentsCodegenVisibility = "intentsCodegenVisibility" +public let PIFKey_BuildFile_codeSignOnCopy = "codeSignOnCopy" +public let PIFKey_BuildFile_removeHeadersOnCopy = "removeHeadersOnCopy" +public let PIFKey_BuildFile_linkUsingSearchPath = "linkUsingSearchPath" +public let PIFKey_BuildFile_shouldLinkWeakly = "shouldLinkWeakly" public let PIFKey_BuildFile_shouldWarnIfNoRuleToProcess = "shouldWarnIfNoRuleToProcess" -public let PIFKey_BuildFile_resourceRule = "resourceRule" +public let PIFKey_BuildFile_resourceRule = "resourceRule" // Keys specific to build rules -public let PIFKey_BuildRule_fileTypeIdentifier = "fileTypeIdentifier" -public let PIFKey_BuildRule_filePatterns = "filePatterns" -public let PIFKey_BuildRule_compilerSpecificationIdentifier = "compilerSpecificationIdentifier" -public let PIFKey_BuildRule_scriptContents = "scriptContents" -public let PIFKey_BuildRule_inputFilePaths = "inputFilePaths" -public let PIFKey_BuildRule_inputFileListPaths = "inputFileListPaths" -public let PIFKey_BuildRule_outputFilePaths = "outputFilePaths" -public let PIFKey_BuildRule_outputFileListPaths = "outputFileListPaths" -public let PIFKey_BuildRule_outputFilesCompilerFlags = "outputFilesCompilerFlags" -public let PIFKey_BuildRule_dependencyFileFormat = "dependencyFileFormat" -public let PIFKey_BuildRule_dependencyFilePaths = "dependencyFilePaths" -public let PIFKey_BuildRule_runOncePerArchitecture = "runOncePerArchitecture" +public let PIFKey_BuildRule_fileTypeIdentifier = "fileTypeIdentifier" +public let PIFKey_BuildRule_filePatterns = "filePatterns" +public let PIFKey_BuildRule_compilerSpecificationIdentifier = "compilerSpecificationIdentifier" +public let PIFKey_BuildRule_scriptContents = "scriptContents" +public let PIFKey_BuildRule_inputFilePaths = "inputFilePaths" +public let PIFKey_BuildRule_inputFileListPaths = "inputFileListPaths" +public let PIFKey_BuildRule_outputFilePaths = "outputFilePaths" +public let PIFKey_BuildRule_outputFileListPaths = "outputFileListPaths" +public let PIFKey_BuildRule_outputFilesCompilerFlags = "outputFilesCompilerFlags" +public let PIFKey_BuildRule_dependencyFileFormat = "dependencyFileFormat" +public let PIFKey_BuildRule_dependencyFilePaths = "dependencyFilePaths" +public let PIFKey_BuildRule_runOncePerArchitecture = "runOncePerArchitecture" // Keys specific to custom tasks -public let PIFKey_CustomTask_commandLine = "commandLine" -public let PIFKey_CustomTask_environment = "environment" -public let PIFKey_CustomTask_workingDirectory = "workingDirectory" -public let PIFKey_CustomTask_executionDescription = "executionDescription" -public let PIFKey_CustomTask_inputFilePaths = "inputFilePaths" -public let PIFKey_CustomTask_outputFilePaths = "outputFilePaths" -public let PIFKey_CustomTask_enableSandboxing = "enableSandboxing" -public let PIFKey_CustomTask_preparesForIndexing = "preparesForIndexing" +public let PIFKey_CustomTask_commandLine = "commandLine" +public let PIFKey_CustomTask_environment = "environment" +public let PIFKey_CustomTask_workingDirectory = "workingDirectory" +public let PIFKey_CustomTask_executionDescription = "executionDescription" +public let PIFKey_CustomTask_inputFilePaths = "inputFilePaths" +public let PIFKey_CustomTask_outputFilePaths = "outputFilePaths" +public let PIFKey_CustomTask_enableSandboxing = "enableSandboxing" +public let PIFKey_CustomTask_preparesForIndexing = "preparesForIndexing" // Special value for PIFKey_BuildRule_fileTypeIdentifier -public let PIFKey_BuildRule_fileTypeIdentifier_pattern_proxy = "pattern.proxy" +public let PIFKey_BuildRule_fileTypeIdentifier_pattern_proxy = "pattern.proxy" // Special value for PIFKey_BuildRule_compilerSpecificationIdentifier -public let PIFKey_BuildRule_compilerSpecificationIdentifier_com_apple_compilers_proxy_script = "com.apple.compilers.proxy.script" +public let PIFKey_BuildRule_compilerSpecificationIdentifier_com_apple_compilers_proxy_script = "com.apple.compilers.proxy.script" // Keys specific to build configurations -public let PIFKey_BuildConfiguration_buildSettings = "buildSettings" -public let PIFKey_BuildConfiguration_baseConfigurationFileReference = "baseConfigurationFileReference" +public let PIFKey_BuildConfiguration_buildSettings = "buildSettings" +public let PIFKey_BuildConfiguration_baseConfigurationFileReference = "baseConfigurationFileReference" diff --git a/Sources/SWBProtocol/PlanningOperationMessages.swift b/Sources/SWBProtocol/PlanningOperationMessages.swift index 80f3fa91..385abea4 100644 --- a/Sources/SWBProtocol/PlanningOperationMessages.swift +++ b/Sources/SWBProtocol/PlanningOperationMessages.swift @@ -15,7 +15,6 @@ import Foundation // MARK: General planning operation messages - /// Inform the client that a planning operation will start. public struct PlanningOperationWillStart: SessionMessage, Equatable { public static let name = "PLANNING_OPERATION_WILL_START" @@ -68,7 +67,6 @@ public struct PlanningOperationDidFinish: SessionMessage, Equatable { } } - // MARK: Getting provisioning task inputs from the client /// The source data to send to the client for it to generate the provisioning task inputs for the service. @@ -142,7 +140,7 @@ public struct ProvisioningTaskInputsSourceData: Serializable, Equatable, Sendabl self.projectEntitlementsFile = try deserializer.deserialize() let projectEntitlementsBytes: [UInt8]? = try deserializer.deserialize() self.projectEntitlements = try projectEntitlementsBytes.map { try PropertyList.fromBytes($0) } - _ = try deserializer.deserialize() as Bool // Deprecated field, kept for compatibility + _ = try deserializer.deserialize() as Bool // Deprecated field, kept for compatibility self.signingCertificateIdentifier = try deserializer.deserialize() self.signingRequiresTeam = try deserializer.deserialize() self.sdkRoot = try deserializer.deserialize() @@ -157,7 +155,7 @@ public struct ProvisioningTaskInputsSourceData: Serializable, Equatable, Sendabl public func serialize(to serializer: T) { serializer.serializeAggregate(21) { serializer.serialize(configurationName) - serializer.serialize(false) // Deprecated field, kept for compatibility + serializer.serialize(false) // Deprecated field, kept for compatibility serializer.serialize(provisioningProfileSupport) serializer.serialize(provisioningProfileSpecifier) serializer.serialize(provisioningProfileUUID) @@ -170,7 +168,7 @@ public struct ProvisioningTaskInputsSourceData: Serializable, Equatable, Sendabl serializer.serialize(projectEntitlementsFile) // FIXME: We have no way to handle any errors in PropertyListItem.asBytes() here. serializer.serialize(projectEntitlements.map { try? $0.asBytes(.binary) } ?? nil) - serializer.serialize(false) // Deprecated field, kept for compatibility + serializer.serialize(false) // Deprecated field, kept for compatibility serializer.serialize(signingCertificateIdentifier) serializer.serialize(signingRequiresTeam) serializer.serialize(sdkRoot) @@ -212,7 +210,7 @@ public struct GetProvisioningTaskInputsRequest: SessionMessage, Equatable { self.targetGUID = try deserializer.deserialize() self.configuredTargetHandle = try deserializer.deserialize() // This used to be the settingsHandle, but it hasn't been used for a long time. - _ = try deserializer.deserialize() as String + _ = try deserializer.deserialize() as String self.sourceData = try deserializer.deserialize() } @@ -261,7 +259,7 @@ public struct ProvisioningTaskInputsResponse: SessionMessage, RequestMessage, Eq public let errors: [[String: String]] public let warnings: [String] - public init(sessionHandle: String, planningOperationHandle: String, configuredTargetHandle: String, identityHash: String?, identitySerialNumber:String?, identityName: String?, profileName: String?, profileUUID: String?, profilePath: String?, designatedRequirements: String?, signedEntitlements: PropertyListItem?, simulatedEntitlements: PropertyListItem?, appIdentifierPrefix: String?, teamIdentifierPrefix: String?, isEnterpriseTeam: Bool?, useSigningTool: Bool?, signingToolKeyPath: String?, signingToolKeyID: String?, signingToolKeyIssuerID: String?, keychainPath: String?, errors: [[String: String]], warnings: [String]) { + public init(sessionHandle: String, planningOperationHandle: String, configuredTargetHandle: String, identityHash: String?, identitySerialNumber: String?, identityName: String?, profileName: String?, profileUUID: String?, profilePath: String?, designatedRequirements: String?, signedEntitlements: PropertyListItem?, simulatedEntitlements: PropertyListItem?, appIdentifierPrefix: String?, teamIdentifierPrefix: String?, isEnterpriseTeam: Bool?, useSigningTool: Bool?, signingToolKeyPath: String?, signingToolKeyID: String?, signingToolKeyIssuerID: String?, keychainPath: String?, errors: [[String: String]], warnings: [String]) { self.sessionHandle = sessionHandle self.planningOperationHandle = planningOperationHandle self.configuredTargetHandle = configuredTargetHandle @@ -366,8 +364,6 @@ public struct ProvisioningTaskInputsResponse: SessionMessage, RequestMessage, Eq } } - - let planningOperationMessageTypes: [any Message.Type] = [ PlanningOperationWillStart.self, PlanningOperationDidFinish.self, diff --git a/Sources/SWBProtocol/ProjectDescriptorTypes.swift b/Sources/SWBProtocol/ProjectDescriptorTypes.swift index 8b1b51b8..ed66e7e3 100644 --- a/Sources/SWBProtocol/ProjectDescriptorTypes.swift +++ b/Sources/SWBProtocol/ProjectDescriptorTypes.swift @@ -202,7 +202,7 @@ public struct DestinationInfo: Codable, Equatable, Hashable, Comparable, Seriali self.isSimulator = isSimulator } - public static func <(lhs: DestinationInfo, rhs: DestinationInfo) -> Bool { + public static func < (lhs: DestinationInfo, rhs: DestinationInfo) -> Bool { return lhs.platformName.localizedCompare(rhs.platformName) == .orderedAscending } @@ -525,7 +525,7 @@ public struct ProductDescription: Equatable, Hashable, Serializable, Sendable { team: String?, infoPlistPath: String?, iconPath: String? - ) { + ) { self.displayName = displayName self.productName = productName self.identifier = identifier diff --git a/Sources/SWBProtocol/ProjectModel/BuildPhase.swift b/Sources/SWBProtocol/ProjectModel/BuildPhase.swift index 38fd1b07..b462c7df 100644 --- a/Sources/SWBProtocol/ProjectModel/BuildPhase.swift +++ b/Sources/SWBProtocol/ProjectModel/BuildPhase.swift @@ -13,7 +13,7 @@ public import SWBUtil public class BuildPhase: PolymorphicSerializable, @unchecked Sendable { - public static let implementations: [SerializableTypeCode : any PolymorphicSerializable.Type] = [ + public static let implementations: [SerializableTypeCode: any PolymorphicSerializable.Type] = [ 0: AppleScriptBuildPhase.self, 1: CopyFilesBuildPhase.self, 2: FrameworksBuildPhase.self, diff --git a/Sources/SWBProtocol/ProjectModel/BuildRule.swift b/Sources/SWBProtocol/ProjectModel/BuildRule.swift index 5a33fe71..9437b2f1 100644 --- a/Sources/SWBProtocol/ProjectModel/BuildRule.swift +++ b/Sources/SWBProtocol/ProjectModel/BuildRule.swift @@ -30,7 +30,8 @@ public struct BuildRule: Sendable { outputs: [ShellScriptOutputInfo], outputFileLists: [MacroExpressionSource], dependencyInfo: DependencyInfo?, - runOncePerArchitecture: Bool) + runOncePerArchitecture: Bool + ) } public struct ShellScriptOutputInfo: Sendable { diff --git a/Sources/SWBProtocol/ProjectModel/CustomTask.swift b/Sources/SWBProtocol/ProjectModel/CustomTask.swift index 68380303..de309de9 100644 --- a/Sources/SWBProtocol/ProjectModel/CustomTask.swift +++ b/Sources/SWBProtocol/ProjectModel/CustomTask.swift @@ -72,4 +72,3 @@ public struct CustomTask: SerializableCodable, Sendable { self.preparesForIndexing = try container.decode(Bool.self, forKey: .preparesForIndexing) } } - diff --git a/Sources/SWBProtocol/ProjectModel/ProvisioningSourceData.swift b/Sources/SWBProtocol/ProjectModel/ProvisioningSourceData.swift index f7dadb64..9bf51dc2 100644 --- a/Sources/SWBProtocol/ProjectModel/ProvisioningSourceData.swift +++ b/Sources/SWBProtocol/ProjectModel/ProvisioningSourceData.swift @@ -42,7 +42,6 @@ extension ProvisioningSourceData: Encodable, Decodable { } } - // MARK: SerializableCodable extension ProvisioningSourceData: PendingSerializableCodable { @@ -59,10 +58,10 @@ extension ProvisioningSourceData: PendingSerializableCodable { self.configurationName = try deserializer.deserialize() self.provisioningStyle = try deserializer.deserialize() if count > 3 { - _ = try deserializer.deserialize() as Bool // appIDHasFeaturesEnabled + _ = try deserializer.deserialize() as Bool // appIDHasFeaturesEnabled } if count > 4 { - _ = try deserializer.deserialize() as String // legacyTeamID + _ = try deserializer.deserialize() as String // legacyTeamID } self.bundleIdentifierFromInfoPlist = try deserializer.deserialize() } diff --git a/Sources/SWBProtocol/ProjectModel/Reference.swift b/Sources/SWBProtocol/ProjectModel/Reference.swift index 7d6b02b5..87c0392b 100644 --- a/Sources/SWBProtocol/ProjectModel/Reference.swift +++ b/Sources/SWBProtocol/ProjectModel/Reference.swift @@ -13,7 +13,7 @@ public import SWBUtil public class Reference: PolymorphicSerializable, @unchecked Sendable { - public static let implementations: [SerializableTypeCode : any PolymorphicSerializable.Type] = [ + public static let implementations: [SerializableTypeCode: any PolymorphicSerializable.Type] = [ 0: FileReference.self, 1: VersionGroup.self, 2: VariantGroup.self, @@ -74,13 +74,13 @@ public struct FileTextEncoding: Hashable, CustomStringConvertible, Sendable { self.rawValue = rawValue } - public static let utf8 = FileTextEncoding("utf-8") // 4 - public static let utf16 = FileTextEncoding("utf-16") // 10 - public static let utf16be = FileTextEncoding("utf-16be") // 0x90000100 - public static let utf16le = FileTextEncoding("utf-16le") // 0x94000100 - public static let utf32 = FileTextEncoding("utf-32") // 0x8c000100 - public static let utf32be = FileTextEncoding("utf-32be") // 0x98000100 - public static let utf32le = FileTextEncoding("utf-32le") // 0x9c000100 + public static let utf8 = FileTextEncoding("utf-8") // 4 + public static let utf16 = FileTextEncoding("utf-16") // 10 + public static let utf16be = FileTextEncoding("utf-16be") // 0x90000100 + public static let utf16le = FileTextEncoding("utf-16le") // 0x94000100 + public static let utf32 = FileTextEncoding("utf-32") // 0x8c000100 + public static let utf32be = FileTextEncoding("utf-32be") // 0x98000100 + public static let utf32le = FileTextEncoding("utf-32le") // 0x9c000100 public var description: String { return rawValue diff --git a/Sources/SWBProtocol/ProjectModel/Target.swift b/Sources/SWBProtocol/ProjectModel/Target.swift index 5be1d244..95fc32f8 100644 --- a/Sources/SWBProtocol/ProjectModel/Target.swift +++ b/Sources/SWBProtocol/ProjectModel/Target.swift @@ -21,7 +21,7 @@ public struct TargetGUID: RawRepresentable, Hashable, Sendable, Codable { } public class Target: PolymorphicSerializable, @unchecked Sendable { - public static let implementations: [SerializableTypeCode : any PolymorphicSerializable.Type] = [ + public static let implementations: [SerializableTypeCode: any PolymorphicSerializable.Type] = [ 0: StandardTarget.self, 1: AggregateTarget.self, 2: ExternalTarget.self, diff --git a/Sources/SWBQNXPlatform/Plugin.swift b/Sources/SWBQNXPlatform/Plugin.swift index 37b9bde2..fd418759 100644 --- a/Sources/SWBQNXPlatform/Plugin.swift +++ b/Sources/SWBQNXPlatform/Plugin.swift @@ -44,7 +44,7 @@ struct QNXPlatformSpecsExtension: SpecificationsExtension { struct QNXEnvironmentExtension: EnvironmentExtension { let plugin: QNXPlugin - func additionalEnvironmentVariables(context: any EnvironmentExtensionAdditionalEnvironmentVariablesContext) async throws -> [String : String] { + func additionalEnvironmentVariables(context: any EnvironmentExtensionAdditionalEnvironmentVariablesContext) async throws -> [String: String] { if let latest = try? await plugin.cachedQNXSDPInstallations(host: context.hostOperatingSystem).first { return .init(latest.environment) } @@ -55,15 +55,18 @@ struct QNXEnvironmentExtension: EnvironmentExtension { struct QNXPlatformExtension: PlatformInfoExtension { func additionalPlatforms(context: any PlatformInfoExtensionAdditionalPlatformsContext) throws -> [(path: Path, data: [String: PropertyListItem])] { [ - (.root, [ - "Type": .plString("Platform"), - "Name": .plString("qnx"), - "Identifier": .plString("qnx"), - "Description": .plString("qnx"), - "FamilyName": .plString("QNX"), - "FamilyIdentifier": .plString("qnx"), - "IsDeploymentPlatform": .plString("YES"), - ]) + ( + .root, + [ + "Type": .plString("Platform"), + "Name": .plString("qnx"), + "Identifier": .plString("qnx"), + "Description": .plString("qnx"), + "FamilyName": .plString("QNX"), + "FamilyIdentifier": .plString("qnx"), + "IsDeploymentPlatform": .plString("YES"), + ] + ) ] } } @@ -71,7 +74,7 @@ struct QNXPlatformExtension: PlatformInfoExtension { struct QNXSDKRegistryExtension: SDKRegistryExtension { let plugin: QNXPlugin - func additionalSDKs(context: any SDKRegistryExtensionAdditionalSDKsContext) async throws -> [(path: Path, platform: SWBCore.Platform?, data: [String : PropertyListItem])] { + func additionalSDKs(context: any SDKRegistryExtensionAdditionalSDKsContext) async throws -> [(path: Path, platform: SWBCore.Platform?, data: [String: PropertyListItem])] { guard let qnxPlatform = context.platformRegistry.lookup(name: "qnx") else { return [] } @@ -105,34 +108,41 @@ struct QNXSDKRegistryExtension: SDKRegistryExtension { "LINKER_DRIVER": "qcc", ] - return [(qnxSdk.sysroot, qnxPlatform, [ - "Type": .plString("SDK"), - "Version": .plString(qnxSdk.version?.description ?? "0.0.0"), - "CanonicalName": .plString("qnx"), - "IsBaseSDK": .plBool(true), - "DefaultProperties": .plDict([ - "PLATFORM_NAME": .plString("qnx"), - "QNX_TARGET": .plString(qnxSdk.path.str), - "QNX_HOST": .plString(qnxSdk.hostPath?.str ?? ""), - ].merging(defaultProperties, uniquingKeysWith: { _, new in new })), - "CustomProperties": .plDict([ - // Unlike most platforms, the QNX version goes on the environment field rather than the system field - // FIXME: Make this configurable in a better way so we don't need to push build settings at the SDK definition level - "LLVM_TARGET_TRIPLE_OS_VERSION": .plString("nto"), - "LLVM_TARGET_TRIPLE_SUFFIX": .plString("-qnx"), - ]), - "SupportedTargets": .plDict([ - "qnx": .plDict([ - "Archs": .plArray([.plString("aarch64"), .plString("x86_64")]), - "LLVMTargetTripleEnvironment": .plString("qnx\(qnxSdk.version?.description ?? "0.0.0")"), - "LLVMTargetTripleSys": .plString("nto"), - "LLVMTargetTripleVendor": .plString("unknown"), // FIXME: pc for x86_64! - ]) - ]), - "Toolchains": .plArray([ - .plString("qnx") - ]) - ])] + return [ + ( + qnxSdk.sysroot, qnxPlatform, + [ + "Type": .plString("SDK"), + "Version": .plString(qnxSdk.version?.description ?? "0.0.0"), + "CanonicalName": .plString("qnx"), + "IsBaseSDK": .plBool(true), + "DefaultProperties": .plDict( + [ + "PLATFORM_NAME": .plString("qnx"), + "QNX_TARGET": .plString(qnxSdk.path.str), + "QNX_HOST": .plString(qnxSdk.hostPath?.str ?? ""), + ].merging(defaultProperties, uniquingKeysWith: { _, new in new }) + ), + "CustomProperties": .plDict([ + // Unlike most platforms, the QNX version goes on the environment field rather than the system field + // FIXME: Make this configurable in a better way so we don't need to push build settings at the SDK definition level + "LLVM_TARGET_TRIPLE_OS_VERSION": .plString("nto"), + "LLVM_TARGET_TRIPLE_SUFFIX": .plString("-qnx"), + ]), + "SupportedTargets": .plDict([ + "qnx": .plDict([ + "Archs": .plArray([.plString("aarch64"), .plString("x86_64")]), + "LLVMTargetTripleEnvironment": .plString("qnx\(qnxSdk.version?.description ?? "0.0.0")"), + "LLVMTargetTripleSys": .plString("nto"), + "LLVMTargetTripleVendor": .plString("unknown"), // FIXME: pc for x86_64! + ]) + ]), + "Toolchains": .plArray([ + .plString("qnx") + ]), + ] + ) + ] } } @@ -158,7 +168,8 @@ struct QNXToolchainRegistryExtension: ToolchainRegistryExtension { defaultSettingsWhenPrimary: [:], executableSearchPaths: [toolchainPath.join("usr").join("bin")], testingLibraryPlatformNames: [], - fs: context.fs) + fs: context.fs + ) ] } } diff --git a/Sources/SWBQNXPlatform/QNXSDP.swift b/Sources/SWBQNXPlatform/QNXSDP.swift index 67a71c49..b1cd2518 100644 --- a/Sources/SWBQNXPlatform/QNXSDP.swift +++ b/Sources/SWBQNXPlatform/QNXSDP.swift @@ -69,11 +69,11 @@ struct QNXSDP: Sendable { case .windows: path.join("host").join("win64").join("x86_64") case .macOS: - path.join("host").join("darwin").join("x86_64") // only supported in QNX SDP 7 + path.join("host").join("darwin").join("x86_64") // only supported in QNX SDP 7 case .linux: path.join("host").join("linux").join("x86_64") default: - nil // unsupported host + nil // unsupported host } } diff --git a/Sources/SWBServiceCore/Request.swift b/Sources/SWBServiceCore/Request.swift index 25f6725c..92868d1b 100644 --- a/Sources/SWBServiceCore/Request.swift +++ b/Sources/SWBServiceCore/Request.swift @@ -47,16 +47,16 @@ public final class Request: Sendable { self.name = name } -#if DEBUG - /// Validate that every request is completed, in debug builds. - deinit { - completed.withLock { completed in - if !completed { - fatalError("unexpected incomplete request: \(self)") + #if DEBUG + /// Validate that every request is completed, in debug builds. + deinit { + completed.withLock { completed in + if !completed { + fatalError("unexpected incomplete request: \(self)") + } } } - } -#endif + #endif /// Send a message to the client. public func send(_ message: any Message) { diff --git a/Sources/SWBServiceCore/Service.swift b/Sources/SWBServiceCore/Service.swift index 4dce3f68..f92843b6 100644 --- a/Sources/SWBServiceCore/Service.swift +++ b/Sources/SWBServiceCore/Service.swift @@ -15,9 +15,9 @@ public import SWBProtocol public import SWBUtil #if canImport(System) -public import System + public import System #else -public import SystemPackage + public import SystemPackage #endif /// A generic named message handler. diff --git a/Sources/SWBServiceCore/ServiceEntryPoint.swift b/Sources/SWBServiceCore/ServiceEntryPoint.swift index 8e3964cf..49a3356e 100644 --- a/Sources/SWBServiceCore/ServiceEntryPoint.swift +++ b/Sources/SWBServiceCore/ServiceEntryPoint.swift @@ -14,19 +14,19 @@ import SWBLibc import SWBUtil #if canImport(System) -public import System + public import System #else -public import SystemPackage + public import SystemPackage #endif extension Service { public static func main(_ setupFileDescriptors: (_ inputFD: FileDescriptor, _ outputFD: FileDescriptor) async throws -> Void) async throws { #if os(macOS) - do { - try await Debugger.waitForXcodeAutoAttachIfEnabled() - } catch { - throw StubError.error("Failed to attach debugger: \(error)") - } + do { + try await Debugger.waitForXcodeAutoAttachIfEnabled() + } catch { + throw StubError.error("Failed to attach debugger: \(error)") + } #endif // When launched as a subprocess, we expect our standard input and output to be the message stream, and standard error to be a console output stream. diff --git a/Sources/SWBServiceCore/ServiceHostConnection.swift b/Sources/SWBServiceCore/ServiceHostConnection.swift index 59d1a172..c35f6f6d 100644 --- a/Sources/SWBServiceCore/ServiceHostConnection.swift +++ b/Sources/SWBServiceCore/ServiceHostConnection.swift @@ -15,9 +15,9 @@ import SWBUtil import Foundation #if canImport(System) -import System + import System #else -import SystemPackage + import SystemPackage #endif /// Convert a count to a UInt8 buffer. @@ -94,12 +94,12 @@ final class ServiceHostConnection: @unchecked Sendable { let totalSize = headerSize + payloadSize #if DEBUG - // A well behaved client would not send a negative payloadSize, but this can happen when we hit . - // In that case, consider all remaining bytes to be bogus and drop them. - if payloadSize < 0 { - count = 0 - break - } + // A well behaved client would not send a negative payloadSize, but this can happen when we hit . + // In that case, consider all remaining bytes to be bogus and drop them. + if payloadSize < 0 { + count = 0 + break + } #endif // If we do not have a complete message, we are done. @@ -141,10 +141,10 @@ final class ServiceHostConnection: @unchecked Sendable { var error: (any Error)? while !self.isSuspended.withLock({ $0 }) { #if DEBUG - // Work around read syscall can sometimes return a value greater than the count parameter - // We set the whole buffer to 0xFF so that parts that are not written will be interpreted as a payload with a negative length. - // This is only a mitigation that may not catch all cases. - tmp.update(repeating: 0xFF) + // Work around read syscall can sometimes return a value greater than the count parameter + // We set the whole buffer to 0xFF so that parts that are not written will be interpreted as a payload with a negative length. + // This is only a mitigation that may not catch all cases. + tmp.update(repeating: 0xFF) #endif // Read data. @@ -164,9 +164,9 @@ final class ServiceHostConnection: @unchecked Sendable { break } #if DEBUG - if result > tmpBufferSize { - log("warning: read returned more bytes than requested: \(result) > \(tmpBufferSize)") - } + if result > tmpBufferSize { + log("warning: read returned more bytes than requested: \(result) > \(tmpBufferSize)") + } #endif // Extract all the messages, combining into a contiguous buffer first if necessary. diff --git a/Sources/SWBTaskConstruction/DiagnosticSupport.swift b/Sources/SWBTaskConstruction/DiagnosticSupport.swift index 640d210f..6a6dca94 100644 --- a/Sources/SWBTaskConstruction/DiagnosticSupport.swift +++ b/Sources/SWBTaskConstruction/DiagnosticSupport.swift @@ -17,10 +17,10 @@ package import SWBUtil func auxiliaryFileCommand(_ path: Path) -> String { switch path.fileExtension { case "hmap": - break // Headermaps are per target, so we can't add any information here. + break // Headermaps are per target, so we can't add any information here. case "LinkFileList": return "produces product '\(path.basenameWithoutSuffix)'" - case "modulemap", "yaml": // VFS or module overlay + case "modulemap", "yaml": // VFS or module overlay if let moduleName = potentialModuleNameFromPath(path) { return "produces module '\(moduleName)'" } diff --git a/Sources/SWBTaskConstruction/ProductPlanning/BuildPlan.swift b/Sources/SWBTaskConstruction/ProductPlanning/BuildPlan.swift index 81649982..4192538b 100644 --- a/Sources/SWBTaskConstruction/ProductPlanning/BuildPlan.swift +++ b/Sources/SWBTaskConstruction/ProductPlanning/BuildPlan.swift @@ -94,11 +94,11 @@ package final class BuildPlan: StaleFileRemovalContext { // Compute a collated list of result contexts and task producers, so we can do a single parallel dispatch. // // This computation is cheap, so this is overall more efficient than trying to interleave them with our current infrastructure. - let messageShortening = planRequest.workspaceContext.userPreferences.activityTextShorteningLevel + let messageShortening = planRequest.workspaceContext.userPreferences.activityTextShorteningLevel let (productPlans, globalProductPlan) = await planner.productPlans() - let productPlanResultContexts = productPlans.map{ ProductPlanResultContext(for: $0) } - let producersToEvaluate = productPlanResultContexts.flatMap{ context in - return context.productPlan.taskProducers.map{ (resultContext: context, producer: $0) } + let productPlanResultContexts = productPlans.map { ProductPlanResultContext(for: $0) } + let producersToEvaluate = productPlanResultContexts.flatMap { context in + return context.productPlan.taskProducers.map { (resultContext: context, producer: $0) } } // Due to the nature of task producers having no relationship with respect to ordering amongst other task producers, it is necessary to allow task producers within a particular context to build up any information that may be necessary for other task producers to consume. It is important to note that this mechanism is not intended to share information across individual task producers, but rather, it is to be used to build up contextual information that can be used from the `generateTasks()` phase. @@ -119,7 +119,8 @@ package final class BuildPlan: StaleFileRemovalContext { var preplannedCount = 0 for await _ in progressStream { preplannedCount += 1 - let statusMessage = messageShortening >= .allDynamicText + let statusMessage = + messageShortening >= .allDynamicText ? "Pre-planning \(activityMessageFractionString(preplannedCount, over: producersToEvaluate.count))" : "Pre-Planning from \(preplannedCount) of \(producersToEvaluate.count) task producers" @@ -156,7 +157,8 @@ package final class BuildPlan: StaleFileRemovalContext { var evaluatedCount = 0 for await _ in progressStream { evaluatedCount += 1 - let statusMessage = messageShortening >= .allDynamicText + let statusMessage = + messageShortening >= .allDynamicText ? "Planning \(activityMessageFractionString(evaluatedCount, over: producersToEvaluate.count))" : "Constructing from \(evaluatedCount) of \(producersToEvaluate.count) task producers" @@ -186,7 +188,7 @@ package final class BuildPlan: StaleFileRemovalContext { await group.waitForAll() } - await aggregationQueue.sync{ } + await aggregationQueue.sync {} if delegate.cancelled { // Reset any deferred producers, which may participate in cycles. for context in productPlanResultContexts { @@ -239,7 +241,7 @@ package final class BuildPlan: StaleFileRemovalContext { } // Wait for task validation. - await aggregationQueue.sync{ } + await aggregationQueue.sync {} if delegate.cancelled { return nil } @@ -289,15 +291,17 @@ package final class BuildPlan: StaleFileRemovalContext { } static func unexpectedDuplicateTasksWithIdentifier(_ tasks: [any PlannedTask], _ workspace: Workspace, _ delegate: any TaskPlanningDelegate) { - delegate.emit(Diagnostic(behavior: .error, - location: .unknown, - data: DiagnosticData("Unexpected duplicate tasks"), - childDiagnostics: tasks.map({ .task($0.execTask) }).richFormattedRuleInfo(workspace: workspace))) + delegate.emit( + Diagnostic( + behavior: .error, + location: .unknown, + data: DiagnosticData("Unexpected duplicate tasks"), + childDiagnostics: tasks.map({ .task($0.execTask) }).richFormattedRuleInfo(workspace: workspace) + ) + ) } } - - /// This context stores the results of task generation for a product plan. It is used by a build plan to collect results of task generation, and once task generation is complete to compute the final set of planned tasks to be used for a product plan by evaluating task validity criteria.. /// /// This class is not thread-safe; the build plan is expected to build up the context in a manner that accounts for that. diff --git a/Sources/SWBTaskConstruction/ProductPlanning/BuildPlanDumping.swift b/Sources/SWBTaskConstruction/ProductPlanning/BuildPlanDumping.swift index a321ccc9..c26da16d 100644 --- a/Sources/SWBTaskConstruction/ProductPlanning/BuildPlanDumping.swift +++ b/Sources/SWBTaskConstruction/ProductPlanning/BuildPlanDumping.swift @@ -28,7 +28,7 @@ package extension BuildPlan { // Emit a separate file for each target, inside a separate subdirectory for each project. for (target, plannedTasks) in tasksByTarget { // Get a hold of the product plan and macro evaluation scope. - let productPlan = productPlans.first{ $0.forTarget == target }! + let productPlan = productPlans.first { $0.forTarget == target }! let scope = target.map { target in productPlan.taskProducerContext.globalProductPlan.getTargetSettings(target).globalScope } ?? productPlan.taskProducerContext.globalProductPlan.getWorkspaceSettings().globalScope // Figure out the name of the project. @@ -65,9 +65,12 @@ package extension BuildPlan { // Emit a version of the command line arguments array that back-maps as many strings as possible to macro refs. let commandLine = task.execTask.commandLineAsStrings.map { (commandLineArg: String) -> String in - literalPathsFromLongestToShortest.reduce(commandLineArg, { (result: String, path: String) -> String in - result.replacingOccurrences(of: path, with: literalPathsToMacroRefs[path]!) - }) + literalPathsFromLongestToShortest.reduce( + commandLineArg, + { (result: String, path: String) -> String in + result.replacingOccurrences(of: path, with: literalPathsToMacroRefs[path]!) + } + ) } output <<< "command: \(commandLine.quotedStringListRepresentation)\n" } diff --git a/Sources/SWBTaskConstruction/ProductPlanning/ProductPlan.swift b/Sources/SWBTaskConstruction/ProductPlanning/ProductPlan.swift index db7e6c7e..a0c16198 100644 --- a/Sources/SWBTaskConstruction/ProductPlanning/ProductPlan.swift +++ b/Sources/SWBTaskConstruction/ProductPlanning/ProductPlan.swift @@ -28,8 +28,7 @@ package protocol GlobalProductPlanDelegate: CoreClientTargetDiagnosticProducingD /// Information on the global build plan. /// /// This class encapsulates the information on the global product plan which is made available to each individual product to use during planning. -package final class GlobalProductPlan: GlobalTargetInfoProvider -{ +package final class GlobalProductPlan: GlobalTargetInfoProvider { /// The build plan request. package let planRequest: BuildPlanRequest @@ -171,17 +170,17 @@ package final class GlobalProductPlan: GlobalTargetInfoProvider let targetToProducingTargetForNearestEnclosingProduct: [ConfiguredTarget: ConfiguredTarget] /// A map of `MH_BUNDLE` targets to any clients of that target. - let clientsOfBundlesByTarget: [ConfiguredTarget:[ConfiguredTarget]] + let clientsOfBundlesByTarget: [ConfiguredTarget: [ConfiguredTarget]] private static let dynamicMachOTypes = ["mh_execute", "mh_dylib", "mh_bundle"] // Checks that we have either been passed a configuration override for packages or we are building Debug/Release. private static func verifyPackageConfigurationOverride(planRequest: BuildPlanRequest) { #if DEBUG - if !planRequest.workspaceContext.workspace.projects.filter({ $0.isPackage }).isEmpty { - let parameters = planRequest.buildRequest.parameters - assert(parameters.configuration == nil || parameters.packageConfigurationOverride != nil || parameters.configuration == "Debug" || parameters.configuration == "Release") - } + if !planRequest.workspaceContext.workspace.projects.filter({ $0.isPackage }).isEmpty { + let parameters = planRequest.buildRequest.parameters + assert(parameters.configuration == nil || parameters.packageConfigurationOverride != nil || parameters.configuration == "Debug" || parameters.configuration == "Release") + } #endif } @@ -255,12 +254,14 @@ package final class GlobalProductPlan: GlobalTargetInfoProvider } // Compute the dependents of targets producing bundles. This information is used later to propagate Info.plist entries from codeless bundles to their clients. - private static func computeBundleClients(buildGraph: TargetBuildGraph, buildRequestContext: BuildRequestContext) -> [ConfiguredTarget:[ConfiguredTarget]] { - var clientsOfBundlesByTarget = [ConfiguredTarget:[ConfiguredTarget]]() - let bundleTargets = Set(buildGraph.allTargets.filter { - let settings = buildRequestContext.getCachedSettings($0.parameters, target: $0.target) - return settings.globalScope.evaluate(BuiltinMacros.MACH_O_TYPE) == "mh_bundle" - }) + private static func computeBundleClients(buildGraph: TargetBuildGraph, buildRequestContext: BuildRequestContext) -> [ConfiguredTarget: [ConfiguredTarget]] { + var clientsOfBundlesByTarget = [ConfiguredTarget: [ConfiguredTarget]]() + let bundleTargets = Set( + buildGraph.allTargets.filter { + let settings = buildRequestContext.getCachedSettings($0.parameters, target: $0.target) + return settings.globalScope.evaluate(BuiltinMacros.MACH_O_TYPE) == "mh_bundle" + } + ) for configuredTarget in buildGraph.allTargets { for match in bundleTargets.intersection(buildGraph.dependencies(of: configuredTarget)) { clientsOfBundlesByTarget[match, default: []].append(configuredTarget) @@ -314,8 +315,7 @@ package final class GlobalProductPlan: GlobalTargetInfoProvider for target in hostedTargets { hostTargetForTargets[target] = hostTarget } - } - else { + } else { // Emit a warning for a target which defines a TEST_HOST which can't be mapped to a target. for hostedTarget in hostedTargets { delegate.warning(.overrideTarget(hostedTarget), "Unable to find a target which creates the host product for value of $(TEST_HOST) '\(hostPath.str)'", location: .buildSetting(BuiltinMacros.TEST_HOST), component: .targetIntegrity) @@ -405,7 +405,8 @@ package final class GlobalProductPlan: GlobalTargetInfoProvider targetsRequiredToBuildForIndexing.formUnion(transitiveClosure([configuredTarget], successors: buildGraph.dependencies(of:)).0) } if targetSettings.platform?.name == targetSettings.globalScope.evaluate(BuiltinMacros.HOST_PLATFORM), - targetSettings.productType?.conformsTo(identifier: "com.apple.product-type.tool") == true { + targetSettings.productType?.conformsTo(identifier: "com.apple.product-type.tool") == true + { let executablePath = targetSettings.globalScope.evaluate(BuiltinMacros.TARGET_BUILD_DIR).join(targetSettings.globalScope.evaluate(BuiltinMacros.EXECUTABLE_PATH)).normalize() targetsByCommandLineToolProductPath[executablePath] = configuredTarget } @@ -515,9 +516,9 @@ package final class GlobalProductPlan: GlobalTargetInfoProvider } /// Compute the build properties imparted on each target in the graph. - private static func computeImpartedBuildProperties(planRequest: BuildPlanRequest, getLinkageGraph: @Sendable () async throws -> TargetLinkageGraph, delegate: any GlobalProductPlanDelegate) async -> ([ConfiguredTarget:[SWBCore.ImpartedBuildProperties]], [ConfiguredTarget:OrderedSet]) { - var impartedBuildPropertiesByTarget = [ConfiguredTarget:[SWBCore.ImpartedBuildProperties]]() - var directlyLinkedDependenciesByTarget = [ConfiguredTarget:OrderedSet]() + private static func computeImpartedBuildProperties(planRequest: BuildPlanRequest, getLinkageGraph: @Sendable () async throws -> TargetLinkageGraph, delegate: any GlobalProductPlanDelegate) async -> ([ConfiguredTarget: [SWBCore.ImpartedBuildProperties]], [ConfiguredTarget: OrderedSet]) { + var impartedBuildPropertiesByTarget = [ConfiguredTarget: [SWBCore.ImpartedBuildProperties]]() + var directlyLinkedDependenciesByTarget = [ConfiguredTarget: OrderedSet]() // We can skip computing contributing properties entirely if no target declares any and if there are no package products in the graph. let targetsContributingProperties = planRequest.buildGraph.allTargets.filter { !$0.target.hasImpartedBuildProperties || $0.target.type == .packageProduct } @@ -531,7 +532,7 @@ package final class GlobalProductPlan: GlobalTargetInfoProvider configuredTargets: AnyCollection(configuredTargets), diagnosticDelegate: delegate ) - var bundleLoaderByTarget = [ConfiguredTarget:ConfiguredTarget]() + var bundleLoaderByTarget = [ConfiguredTarget: ConfiguredTarget]() for (bundleLoaderTarget, targetsUsingThatBundleLoader) in targetsByBundleLoader { for targetUsingBundleLoader in targetsUsingThatBundleLoader { bundleLoaderByTarget[targetUsingBundleLoader] = bundleLoaderTarget @@ -592,7 +593,7 @@ package final class GlobalProductPlan: GlobalTargetInfoProvider } /// Construct the semantic gate nodes used to order work across targets. - private static func constructTargetGateNodes(buildGraph: TargetBuildGraph, provisioningInputs: [ConfiguredTarget: ProvisioningTaskInputs], buildRequestContext: BuildRequestContext, impartedBuildPropertiesByTarget: [ConfiguredTarget:[SWBCore.ImpartedBuildProperties]], enableIndexBuildArena: Bool, nodeCreationDelegate: (any TaskPlanningNodeCreationDelegate)?) -> [ConfiguredTarget: TargetGateNodes] { + private static func constructTargetGateNodes(buildGraph: TargetBuildGraph, provisioningInputs: [ConfiguredTarget: ProvisioningTaskInputs], buildRequestContext: BuildRequestContext, impartedBuildPropertiesByTarget: [ConfiguredTarget: [SWBCore.ImpartedBuildProperties]], enableIndexBuildArena: Bool, nodeCreationDelegate: (any TaskPlanningNodeCreationDelegate)?) -> [ConfiguredTarget: TargetGateNodes] { var targetGateNodes = [ConfiguredTarget: TargetGateNodes]() for configuredTarget in buildGraph.allTargets { // If we have a delegate to do so, then create virtual nodes for the target used to order this target's tasks with respect to other target's tasks - both fundamental target ordering, and orderings for eager compilation. @@ -662,7 +663,7 @@ package final class GlobalProductPlan: GlobalTargetInfoProvider return [:] } - let targetsWithBundleLoaderMap = Dictionary(grouping: targetsWithBundleLoader, by: { $0.0 }).mapValues{ $0.compactMap{ $0.1 } } + let targetsWithBundleLoaderMap = Dictionary(grouping: targetsWithBundleLoader, by: { $0.0 }).mapValues { $0.compactMap { $0.1 } } // We only need to care about targets that actually have some imparted properties. let executablePathToTarget: [(Path, ConfiguredTarget)] = configuredTargets.map { @@ -670,7 +671,7 @@ package final class GlobalProductPlan: GlobalTargetInfoProvider let path = settings.globalScope.evaluate(BuiltinMacros.BUILT_PRODUCTS_DIR).join(settings.globalScope.evaluate(BuiltinMacros.EXECUTABLE_PATH)).normalize() return (path, $0) } - let executablePathToTargetsMap = Dictionary(grouping: executablePathToTarget, by: { $0.0 }).mapValues{ $0.compactMap{ $0.1 } } + let executablePathToTargetsMap = Dictionary(grouping: executablePathToTarget, by: { $0.0 }).mapValues { $0.compactMap { $0.1 } } var result: [ConfiguredTarget: [ConfiguredTarget]] = [:] for (execName, targets) in executablePathToTargetsMap { @@ -707,7 +708,7 @@ package final class GlobalProductPlan: GlobalTargetInfoProvider // // We'll probably want to allow this for root packages in the future. let dependencies = planRequest.buildGraph.dependencies(of: target) - let dependsOnAnUnsafeTarget = dependencies.first{ unsafeFlagsTargets.contains($0) } + let dependsOnAnUnsafeTarget = dependencies.first { unsafeFlagsTargets.contains($0) } if let unsafeTarget = dependsOnAnUnsafeTarget { diagnosticDelegate.error(.overrideTarget(target), "The package product '\(unsafeTarget.target.name)' cannot be used as a dependency of this target because it uses unsafe build flags.", component: .targetIntegrity) } @@ -761,7 +762,7 @@ package final class GlobalProductPlan: GlobalTargetInfoProvider } } - func resolveDiamondProblemsInPackages(dependenciesByTarget: [ConfiguredTarget:OrderedSet], diagnosticDelegate: any TargetDiagnosticProducingDelegate) { + func resolveDiamondProblemsInPackages(dependenciesByTarget: [ConfiguredTarget: OrderedSet], diagnosticDelegate: any TargetDiagnosticProducingDelegate) { // Repeatedly check for diamonds until we do not find any more. This should always converge since worst case we will stop once all static package targets have been converted to dynamic ones, but we will also terminate if we somehow end up in a stable state with more than zero remaining diamonds. var lastDiamonds = 0, currentDiamonds = 0 repeat { @@ -809,7 +810,7 @@ package final class GlobalProductPlan: GlobalTargetInfoProvider } // Checks whether we have any duplicated occurrences of the same package product in the graph. - func checkForDiamondProblemsInPackageProductLinkage(dependenciesByTarget: [ConfiguredTarget:OrderedSet], diagnosticDelegate: any TargetDiagnosticProducingDelegate) -> Int { + func checkForDiamondProblemsInPackageProductLinkage(dependenciesByTarget: [ConfiguredTarget: OrderedSet], diagnosticDelegate: any TargetDiagnosticProducingDelegate) -> Int { func emitError(for name: String, targetName: String, andOther: String, conflicts: Bool = false) { if errorComponentsList.insert(ErrorComponents(name: name, targetName: targetName, andOther: andOther, conflicts: conflicts)).inserted { if conflicts { @@ -821,8 +822,8 @@ package final class GlobalProductPlan: GlobalTargetInfoProvider } // First, we need to determine which top-level targets link a certain package product or target. - var topLevelLinkingTargetsByPackageProduct = [ConfiguredTarget:Set]() - var topLevelLinkingTargetsByPackageTarget = [ConfiguredTarget:Set]() + var topLevelLinkingTargetsByPackageProduct = [ConfiguredTarget: Set]() + var topLevelLinkingTargetsByPackageTarget = [ConfiguredTarget: Set]() for (configuredTarget, dependencies) in dependenciesByTarget { // We are only interested in targets which link dynamically. @@ -839,11 +840,13 @@ package final class GlobalProductPlan: GlobalTargetInfoProvider }.filter { packageProduct in // Ignore if we already converted this to a dynamic target. if dynamicallyBuildingTargets.contains(packageProduct.target.target) { - packageTargetsToSkip.append(contentsOf: packageProduct.target.target.dependencies.compactMap { - planRequest.workspaceContext.workspace.target(for: $0.guid) - }.filter { - $0.type != .packageProduct - }) + packageTargetsToSkip.append( + contentsOf: packageProduct.target.target.dependencies.compactMap { + planRequest.workspaceContext.workspace.target(for: $0.guid) + }.filter { + $0.type != .packageProduct + } + ) return false } // Find the configured targets for target dependencies. @@ -1034,9 +1037,14 @@ package final class GlobalProductPlan: GlobalTargetInfoProvider // FIXME: This is another client that would like to be uncontended on the computation-for-key case. return moduleInfo.getOrInsert(configuredTarget) { let settings = getTargetSettings(configuredTarget) - return GlobalProductPlan.computeModuleInfo(workspaceContext: planRequest.workspaceContext, target: configuredTarget.target, settings: settings, diagnosticHandler: { message, location, component, essential in - delegate.warning(.overrideTarget(configuredTarget), message, location: location, component: component) - }) + return GlobalProductPlan.computeModuleInfo( + workspaceContext: planRequest.workspaceContext, + target: configuredTarget.target, + settings: settings, + diagnosticHandler: { message, location, component, essential in + delegate.warning(.overrideTarget(configuredTarget), message, location: location, component: component) + } + ) } } @@ -1342,8 +1350,7 @@ private class WrappingDelegate: TargetDependencyResolverDelegate { } /// A ProductPlan represents the work required to figure out how to build a ConfiguredTarget within a WorkspaceContext. The work is modeled in the form of a list of TaskProducers which generate the tasks to run, and any shared immutable data which multiple task producers might need access to. -package final class ProductPlan -{ +package final class ProductPlan { /// Rule name used for the task of 'prepare-for-index' a target, before any compilation can occur. package static let preparedForIndexPreCompilationRuleName = "PrepareForIndexPreCompilation" @@ -1367,8 +1374,7 @@ package final class ProductPlan /// The task producer context for this plan. let taskProducerContext: TaskProducerContext - init(path: Path, taskProducers: [any TaskProducer], forTarget: ConfiguredTarget?, targetTaskInfo: TargetGateNodes?, taskProducerContext: TaskProducerContext) - { + init(path: Path, taskProducers: [any TaskProducer], forTarget: ConfiguredTarget?, targetTaskInfo: TargetGateNodes?, taskProducerContext: TaskProducerContext) { self.path = path self.taskProducers = taskProducers self.forTarget = forTarget diff --git a/Sources/SWBTaskConstruction/ProductPlanning/ProductPlanner.swift b/Sources/SWBTaskConstruction/ProductPlanning/ProductPlanner.swift index d6e50110..e2022e08 100644 --- a/Sources/SWBTaskConstruction/ProductPlanning/ProductPlanner.swift +++ b/Sources/SWBTaskConstruction/ProductPlanning/ProductPlanner.swift @@ -24,16 +24,14 @@ import SWBMacro } /// A ProductPlanner is responsible for taking the inputs to a build (the workspace context and build request) and generating a set of product plans containing task producers which can produce tasks which will create those products. There will be one product plan per configured target in the build request. -package struct ProductPlanner -{ +package struct ProductPlanner { /// The request for which the planner is constructing plans. let planRequest: BuildPlanRequest /// The delegate to use to construct planned items. let delegate: any TaskPlanningDelegate - package init(planRequest: BuildPlanRequest, taskPlanningDelegate: any TaskPlanningDelegate) - { + package init(planRequest: BuildPlanRequest, taskPlanningDelegate: any TaskPlanningDelegate) { self.planRequest = planRequest self.delegate = taskPlanningDelegate } @@ -75,14 +73,15 @@ private struct WorkspaceProductPlanBuilder { let targetContexts = targetProductPlans.map(\.taskProducerContext) - var taskProducers: [any TaskProducer] = [ - CreateBuildDirectoryTaskProducer(context: globalTaskProducerContext, targetContexts: targetContexts), - XCFrameworkTaskProducer(context: globalTaskProducerContext, targetContexts: targetContexts), - SDKStatCacheTaskProducer(context: globalTaskProducerContext, targetContexts: targetContexts), - HeadermapVFSTaskProducer(context: globalTaskProducerContext, targetContexts: targetContexts), - PCHModuleMapTaskProducer(context: globalTaskProducerContext, targetContexts: targetContexts), - BuildDependencyInfoTaskProducer(context: globalTaskProducerContext, targetContexts: targetContexts), - ] + (globalProductPlan.planRequest.buildRequest.enableIndexBuildArena ? [IndexBuildVFSDirectoryRemapTaskProducer(context: globalTaskProducerContext)] : []) + var taskProducers: [any TaskProducer] = + [ + CreateBuildDirectoryTaskProducer(context: globalTaskProducerContext, targetContexts: targetContexts), + XCFrameworkTaskProducer(context: globalTaskProducerContext, targetContexts: targetContexts), + SDKStatCacheTaskProducer(context: globalTaskProducerContext, targetContexts: targetContexts), + HeadermapVFSTaskProducer(context: globalTaskProducerContext, targetContexts: targetContexts), + PCHModuleMapTaskProducer(context: globalTaskProducerContext, targetContexts: targetContexts), + BuildDependencyInfoTaskProducer(context: globalTaskProducerContext, targetContexts: targetContexts), + ] + (globalProductPlan.planRequest.buildRequest.enableIndexBuildArena ? [IndexBuildVFSDirectoryRemapTaskProducer(context: globalTaskProducerContext)] : []) for taskProducerExtension in await taskProducerExtensions(globalTaskProducerContext.workspaceContext) { for globalTaskProducer in taskProducerExtension.globalTaskProducers { @@ -94,8 +93,7 @@ private struct WorkspaceProductPlanBuilder { } } -private struct ProductPlanBuilder -{ +private struct ProductPlanBuilder { /// The configured target for which we are creating a plan. let configuredTarget: ConfiguredTarget @@ -105,17 +103,14 @@ private struct ProductPlanBuilder /// The delegate to use to construct planned items. let delegate: any TaskPlanningDelegate - init(configuredTarget: ConfiguredTarget, workspaceContext: WorkspaceContext, delegate: any TaskPlanningDelegate) - { + init(configuredTarget: ConfiguredTarget, workspaceContext: WorkspaceContext, delegate: any TaskPlanningDelegate) { self.configuredTarget = configuredTarget self.workspaceContext = workspaceContext self.delegate = delegate } - /// Create the product plan. - func createProductPlan(_ targetTaskInfo: TargetGateNodes, _ globalProductPlan: GlobalProductPlan) async -> ProductPlan - { + func createProductPlan(_ targetTaskInfo: TargetGateNodes, _ globalProductPlan: GlobalProductPlan) async -> ProductPlan { // Create the context object for the task producers. // FIXME: Either each task producer should get its own file path resolver, or the path resolver's caching logic needs to be thread-safe. let taskProducerContext = TargetTaskProducerContext(configuredTarget: configuredTarget, workspaceContext: workspaceContext, targetTaskInfo: targetTaskInfo, globalProductPlan: globalProductPlan, delegate: delegate) @@ -123,8 +118,7 @@ private struct ProductPlanBuilder // Compute the path of the product. // FIXME: Figure out how to represent targets which don't have a product path (e.g, aggregate and external targets). Maybe use their PIF GUID? var path = Path("placeholder") - if let standardTarget = self.configuredTarget.target as? StandardTarget - { + if let standardTarget = self.configuredTarget.target as? StandardTarget { path = taskProducerContext.settings.filePathResolver.resolveAbsolutePath(standardTarget.productReference) } @@ -139,19 +133,14 @@ private struct ProductPlanBuilder } } - // MARK: Target extensions to create task producers. - -protocol ProductPlanBuilding -{ +protocol ProductPlanBuilding { func taskProducers(_ taskProducerContext: TargetTaskProducerContext) async -> [any TaskProducer] } -extension Target: ProductPlanBuilding -{ - func taskProducers(_ taskProducerContext: TargetTaskProducerContext) async -> [any TaskProducer] - { +extension Target: ProductPlanBuilding { + func taskProducers(_ taskProducerContext: TargetTaskProducerContext) async -> [any TaskProducer] { switch self { case let target as StandardTarget: @@ -172,8 +161,7 @@ extension Target: ProductPlanBuilding } } -extension BuildPhaseTarget -{ +extension BuildPhaseTarget { /// The base name used by phase start and end nodes set up for task producers for this target. func phaseNodeRoot(_ configuredTarget: ConfiguredTarget?) -> String { return configuredTarget?.guid.stringValue ?? "target-\(self.name)-\(self.guid)" @@ -183,31 +171,32 @@ extension BuildPhaseTarget /// /// - parameter startPhaseNode: The start phase node for tasks created by the producer of the first build phase. If passed, then all tasks created by the producer for the first build phase will run after all tasks on which this node depends (presumably as set up by our caller). If nil, then a new start node will be created, and the tasks will run independently of any tasks previously created by the caller. /// - returns: A tuple consisting of the list of task producers created, and the end phase node of the producer for the last build phase. This end phase node can be used by the caller to order further producers to make their tasks run after the tasks for the build phases. - func buildPhaseTargetTaskProducers(_ taskProducerContext: TargetTaskProducerContext, startPhaseNodes: [PlannedVirtualNode]? = nil) -> (taskProducers: [any TaskProducer], endPhaseNode: PlannedVirtualNode) - { + func buildPhaseTargetTaskProducers(_ taskProducerContext: TargetTaskProducerContext, startPhaseNodes: [PlannedVirtualNode]? = nil) -> (taskProducers: [any TaskProducer], endPhaseNode: PlannedVirtualNode) { // All headers phases should run ahead of other phases, unless they follow an unsandboxed script. let earlyHeadersPhaseGUIDs: Set if taskProducerContext.settings.globalScope.evaluate(BuiltinMacros.RESCHEDULE_INDEPENDENT_HEADERS_PHASES) { - earlyHeadersPhaseGUIDs = Set(buildPhases.prefix(while: { - if let shellScriptPhase = $0 as? ShellScriptBuildPhase { - if !taskProducerContext.settings.globalScope.evaluate(BuiltinMacros.ENABLE_USER_SCRIPT_SANDBOXING) { - return false - } - // FIXME: Refactor output file list parsing so we can call into it here. - if shellScriptPhase.outputFileListPaths.count > 0 { - return false - } - for outputExpr in shellScriptPhase.outputFilePaths { - let output = Path(taskProducerContext.settings.globalScope.evaluate(outputExpr)) - if taskProducerContext.lookupFileType(fileName: output.basename)?.conformsToAny(taskProducerContext.compilationRequirementOutputFileTypes) == true { + earlyHeadersPhaseGUIDs = Set( + buildPhases.prefix(while: { + if let shellScriptPhase = $0 as? ShellScriptBuildPhase { + if !taskProducerContext.settings.globalScope.evaluate(BuiltinMacros.ENABLE_USER_SCRIPT_SANDBOXING) { return false } + // FIXME: Refactor output file list parsing so we can call into it here. + if shellScriptPhase.outputFileListPaths.count > 0 { + return false + } + for outputExpr in shellScriptPhase.outputFilePaths { + let output = Path(taskProducerContext.settings.globalScope.evaluate(outputExpr)) + if taskProducerContext.lookupFileType(fileName: output.basename)?.conformsToAny(taskProducerContext.compilationRequirementOutputFileTypes) == true { + return false + } + } + return true + } else { + return true } - return true - } else { - return true - } - }).filter({ $0 is HeadersBuildPhase }).map(\.guid)) + }).filter({ $0 is HeadersBuildPhase }).map(\.guid) + ) } else { earlyHeadersPhaseGUIDs = [] } @@ -231,8 +220,7 @@ extension BuildPhaseTarget var taskProducers = [any TaskProducer]() var startPhaseNodes = startPhaseNodes ?? [taskProducerContext.createVirtualNode(phaseNodeRoot(taskProducerContext.configuredTarget) + "-start")] - for (i, fusedPhase) in fusedPhases.enumerated() - { + for (i, fusedPhase) in fusedPhases.enumerated() { let endFusedPhaseNode = taskProducerContext.createVirtualNode(phaseNodeRoot(taskProducerContext.configuredTarget) + "-fused-phase" + String(i) + "-" + fusedPhase.map { $0.name.asLegalRfc1034Identifier.lowercased() }.joined(separator: "&")) let endFusedPhaseTask = taskProducerContext.createPhaseEndTask(inputs: startPhaseNodes, output: endFusedPhaseNode, mustPrecede: [taskProducerContext.targetEndTask]) @@ -262,13 +250,15 @@ extension BuildPhaseTarget guard taskProducerContext.settings.globalScope.evaluate(BuiltinMacros.FUSE_BUILD_PHASES) else { return false } - guard (taskProducerContext.configuredTarget?.target as? StandardTarget)?.buildRules.contains(where: { - if case .shellScript = $0.actionSpecifier { - return true - } else { - return false - } - }) != true || SWBFeatureFlag.allowBuildPhaseFusionWithCustomShellScriptBuildRules.value else { + guard + (taskProducerContext.configuredTarget?.target as? StandardTarget)?.buildRules.contains(where: { + if case .shellScript = $0.actionSpecifier { + return true + } else { + return false + } + }) != true || SWBFeatureFlag.allowBuildPhaseFusionWithCustomShellScriptBuildRules.value + else { // If the target has a shell script build rule, it may not be safe to parallelize if it specifies incorrect dependencies. return false } @@ -314,10 +304,8 @@ extension BuildPhaseTarget } } -extension StandardTarget -{ - func standardTargetTaskProducers(_ taskProducerContext: TargetTaskProducerContext) async -> [any TaskProducer] - { +extension StandardTarget { + func standardTargetTaskProducers(_ taskProducerContext: TargetTaskProducerContext) async -> [any TaskProducer] { let taskProducerExtensions = await taskProducerExtensions(taskProducerContext.workspaceContext) var taskProducers = [any TaskProducer]() @@ -451,19 +439,15 @@ extension StandardTarget } } -extension AggregateTarget -{ - func aggregateTargetTaskProducers(_ taskProducerContext: TargetTaskProducerContext) -> [any TaskProducer] - { +extension AggregateTarget { + func aggregateTargetTaskProducers(_ taskProducerContext: TargetTaskProducerContext) -> [any TaskProducer] { // TODO: We should probably check that only build phases useful in an aggregate target are present here. return super.buildPhaseTargetTaskProducers(taskProducerContext).taskProducers } } -extension ExternalTarget -{ - func externalTargetTaskProducers(_ taskProducerContext: TaskProducerContext) -> [any TaskProducer] - { +extension ExternalTarget { + func externalTargetTaskProducers(_ taskProducerContext: TaskProducerContext) -> [any TaskProducer] { if !customTasks.isEmpty { taskProducerContext.error("custom tasks are not yet supported in external targets") } @@ -471,10 +455,8 @@ extension ExternalTarget } } -extension PackageProductTarget -{ - func packageProductTargetTaskProducers(_ taskProducerContext: TargetTaskProducerContext) -> [any TaskProducer] - { +extension PackageProductTarget { + func packageProductTargetTaskProducers(_ taskProducerContext: TargetTaskProducerContext) -> [any TaskProducer] { if !customTasks.isEmpty { taskProducerContext.error("custom tasks are not yet supported in package product targets") } diff --git a/Sources/SWBTaskConstruction/TaskProducerSandboxing.swift b/Sources/SWBTaskConstruction/TaskProducerSandboxing.swift index 802b7c98..5ecfc3f0 100644 --- a/Sources/SWBTaskConstruction/TaskProducerSandboxing.swift +++ b/Sources/SWBTaskConstruction/TaskProducerSandboxing.swift @@ -44,7 +44,7 @@ extension TaskProducerContext { // * the generic template for sandboxes sandboxProfileContents <<< "(version 1)" <<< "\n" - sandboxProfileContents <<< "(allow default)" <<< "\n" // Required; Removing this will cause testEnsureProperlyDeclaredInputOutputSucceeds to fail + sandboxProfileContents <<< "(allow default)" <<< "\n" // Required; Removing this will cause testEnsureProperlyDeclaredInputOutputSucceeds to fail // TODO: rdar://87285630 (User script sandboxing: network requests) // Ideally we'd like the build system to track HTTP artifacts and guide the users to include them as dependencies. diff --git a/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/BuildRuleTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/BuildRuleTaskProducer.swift index fd52d6ff..99662615 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/BuildRuleTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/BuildRuleTaskProducer.swift @@ -42,14 +42,15 @@ final class BuildRuleTaskProducer: StandardTaskProducer, TaskProducer, ShellBase } func pathForResolvedFileList(_ scope: MacroEvaluationScope, prefix: String, fileList: Path) -> Path { - let checksumRawData = [ - scope.evaluate(BuiltinMacros.EFFECTIVE_PLATFORM_NAME), - buildPhase.guid, - action.identifier, - scope.evaluate(BuiltinMacros.CURRENT_VARIANT), - scope.evaluate(BuiltinMacros.CURRENT_ARCH), - fileList.str, - ] + cbc.inputs.map { $0.absolutePath.str } + let checksumRawData = + [ + scope.evaluate(BuiltinMacros.EFFECTIVE_PLATFORM_NAME), + buildPhase.guid, + action.identifier, + scope.evaluate(BuiltinMacros.CURRENT_VARIANT), + scope.evaluate(BuiltinMacros.CURRENT_ARCH), + fileList.str, + ] + cbc.inputs.map { $0.absolutePath.str } return scope.evaluate(BuiltinMacros.TEMP_DIR).join("\(prefix)-\(checksumRawData.joined(separator: "\n").md5())-\(fileList.basenameWithoutSuffix)-resolved.xcfilelist") } @@ -95,7 +96,7 @@ final class BuildRuleTaskProducer: StandardTaskProducer, TaskProducer, ShellBase let inputPath = input.absolutePath let inputDir = inputPath.dirname let inputName = inputPath.basename - let (inputBase,inputSuffix) = Path(inputName).splitext() + let (inputBase, inputSuffix) = Path(inputName).splitext() var inputVariables: [MacroDeclaration: String] = [ BuiltinMacros.INPUT_FILE_PATH: inputPath.str, BuiltinMacros.INPUT_FILE_DIR: inputDir.str, @@ -137,7 +138,7 @@ final class BuildRuleTaskProducer: StandardTaskProducer, TaskProducer, ShellBase } // Add the input file variables. - for (macro,name) in inputVariables { + for (macro, name) in inputVariables { environment[macro.name] = name } @@ -196,7 +197,7 @@ final class BuildRuleTaskProducer: StandardTaskProducer, TaskProducer, ShellBase dependencyData = .dependencyInfo(path) case .makefiles(let paths): - let paths = paths.map{ context.makeAbsolute(Path(cbc.scope.evaluate($0, lookup: lookup))).normalize() } + let paths = paths.map { context.makeAbsolute(Path(cbc.scope.evaluate($0, lookup: lookup))).normalize() } outputs.append(contentsOf: paths) dependencyData = .makefiles(paths) } @@ -204,8 +205,8 @@ final class BuildRuleTaskProducer: StandardTaskProducer, TaskProducer, ShellBase dependencyData = nil } - var inputNodes = inputs.map{ createNodeForRule(delegate, cbc: cbc, context: context, path: $0) } - var outputNodes = outputs.map{ delegate.createNode($0) as (any PlannedNode) } + var inputNodes = inputs.map { createNodeForRule(delegate, cbc: cbc, context: context, path: $0) } + var outputNodes = outputs.map { delegate.createNode($0) as (any PlannedNode) } await handleFileLists(&tasks, &inputNodes, &outputNodes, &environment, cbc.scope, inputFileLists, outputFileLists, lookup: lookup) diff --git a/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/CopyFilesTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/CopyFilesTaskProducer.swift index 3daa8d31..5f75ec89 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/CopyFilesTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/CopyFilesTaskProducer.swift @@ -83,10 +83,12 @@ class CopyFilesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, FilesBasedBui // // FIXME: The latter feature here is rarely used, and not very flexible as any target which needs other copy phases won't be able to disable them selectively. let buildComponents = scope.evaluate(BuiltinMacros.BUILD_COMPONENTS) - guard buildComponents.contains("build") + guard + buildComponents.contains("build") || buildComponents.contains("installLoc") || (buildComponents.contains("api") && scope.evaluate(BuiltinMacros.INSTALLAPI_COPY_PHASE)) - || (buildComponents.contains("headers") && scope.evaluate(BuiltinMacros.INSTALLHDRS_COPY_PHASE)) else { + || (buildComponents.contains("headers") && scope.evaluate(BuiltinMacros.INSTALLHDRS_COPY_PHASE)) + else { return [] } @@ -265,8 +267,7 @@ class CopyFilesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, FilesBasedBui if strParts.count > 1, firstPart == firstPartToRemove { let subpath = strParts[1...].joined(separator: Path.pathSeparatorString) subpathsToExclude.append(subpath) - } - else if firstPart != firstPartToRemove { + } else if firstPart != firstPartToRemove { // If string is *only* firstPartToRemove then we don't add it. subpathsToExclude.append(string) } @@ -281,8 +282,7 @@ class CopyFilesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, FilesBasedBui var shouldSkipCopyingBinary = false if mergingTargets.contains(configuredTarget) { shouldSkipCopyingBinary = true - } - else { + } else { for dependency in context.globalProductPlan.dependencies(of: configuredTarget) { if mergingTargets.contains(dependency) { shouldSkipCopyingBinary = true @@ -300,8 +300,7 @@ class CopyFilesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, FilesBasedBui // If this is a standalone binary product then we skip copying it altogether. Otherwise PBXCp will exclude the subpaths we add to the list here. if productType.isWrapper { addSubpath(settings.globalScope.evaluate(BuiltinMacros.EXECUTABLE_PATH).str, removingFirstPartIfEqualTo: settings.globalScope.evaluate(BuiltinMacros.FULL_PRODUCT_NAME).str) - } - else { + } else { // Skip standalone binary altogether by returning out of this method. return } @@ -354,7 +353,6 @@ class CopyFilesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, FilesBasedBui } } - // If we should skip copying it, then we set up the subpaths to exclude. if shouldSkipCopyingBinary { // If this is a standalone binary product then we skip copying it altogether. Otherwise PBXCp will exclude the subpaths we add to the list here. @@ -439,16 +437,14 @@ class CopyFilesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, FilesBasedBui // With deep bundles, we also report the Versions/A folder as an output, because that's what codesign signs. additionalPresumedOutputs.append(delegate.createNode(dst.join(versionRelativePath))) additionalPresumedOutputs.append(delegate.createNode(dst.join(versionRelativePath).join(frameworkName))) - } - else { + } else { additionalPresumedOutputs.append(delegate.createNode(dst.join(frameworkName))) } case let .bundle(shallow): let bundleName = dst.basenameWithoutSuffix if !shallow { additionalPresumedOutputs.append(delegate.createNode(dst.join("Contents/MacOS").join(bundleName))) - } - else { + } else { additionalPresumedOutputs.append(delegate.createNode(dst.join(bundleName))) } case nil: diff --git a/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/FilesBasedBuildPhaseTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/FilesBasedBuildPhaseTaskProducer.swift index 37612eb5..6a0312e1 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/FilesBasedBuildPhaseTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/FilesBasedBuildPhaseTaskProducer.swift @@ -27,17 +27,14 @@ extension FileToBuild { } } - extension TaskProducerContext: InputFileGroupingStrategyContext { public var fs: any FSProxy { return workspaceContext.fs } } - // MARK: - /// Context for grouping files in the build phase, and processing the groups. package final class BuildFilesProcessingContext: BuildFileFilteringContext { package let excludedSourceFileNames: [String] @@ -153,8 +150,7 @@ package final class BuildFilesProcessingContext: BuildFileFilteringContext { // If we get here, then either we validated a rule action, or we decided to add the file-to-build as an ungrouped file. ruleAction = addAsUngrouped ? nil : provisionalRuleAction - } - else { + } else { // If it isn't the output of some task, then we always use the matching rule action (or lack thereof) which we found. ruleAction = provisionalRuleAction } @@ -185,8 +181,7 @@ package final class BuildFilesProcessingContext: BuildFileFilteringContext { if let identFromRuleAction = ruleAction?.inputFileGroupingStrategies.lazy.compactMap({ $0.determineGroupIdentifier(groupable: ftb) }).first { groupIdent = identFromRuleAction isCollectionGroup = true - } - else { + } else { groupIdent = path.str isCollectionGroup = false } @@ -200,12 +195,14 @@ package final class BuildFilesProcessingContext: BuildFileFilteringContext { } // Find or create the group for the identifier we got back. - let group = groupsByIdent[groupIdent] ?? { - // We didn’t already have a group for this identifier, so create one now. - let group = FileToBuildGroup(groupIdent, action: ruleAction) - addFileGroup(group, isCollectionGroup) - return group - }() + let group = + groupsByIdent[groupIdent] + ?? { + // We didn’t already have a group for this identifier, so create one now. + let group = FileToBuildGroup(groupIdent, action: ruleAction) + addFileGroup(group, isCollectionGroup) + return group + }() // Append the file to the group. group.files.append(ftb) @@ -215,8 +212,7 @@ package final class BuildFilesProcessingContext: BuildFileFilteringContext { // Add it to the appropriate list. if isCollectionGroup { collectionGroups.append(group) - } - else { + } else { singletonGroups.append(group) } // We skip adding the group to the index if instructed - typically because the build phase isn't resolving build rules. @@ -326,7 +322,7 @@ extension PluginManager { /// /// Asset Catalogs would be one example of this, so that they can generate symbols. func fileTypesProducingGeneratedSources() -> [String] { - var compileToSwiftFileTypes : [String] = [] + var compileToSwiftFileTypes: [String] = [] for groupingStragegyExtensions in extensions(of: InputFileGroupingStrategyExtensionPoint.self) { compileToSwiftFileTypes.append(contentsOf: groupingStragegyExtensions.fileTypesCompilingToSwiftSources()) } @@ -336,7 +332,6 @@ extension PluginManager { // MARK: - /// Protocol for build phase tasks producers which are based around processing file references. protocol FilesBasedBuildPhaseTaskProducer: AnyObject, TaskProducer { /// The type of build phase managed by this producer. @@ -478,12 +473,11 @@ package class FilesBasedBuildPhaseTaskProducerBase: PhasedTaskProducer { // If we're processing a single language for installLoc, then unwrap the variant group into a separate item for each of its child references. if installlocSpecificLanguages { try unwrapResolveAndAdd(variantGroup: asVariantGroup, for: buildFile) - } - else { + } else { // If the first item in the variant group is an IB document with Base localization, then we need to handle it specially, since it may contain a mix of IB documents and strings files. // An exception to this is when String Catalogs are in play. if let baseReference = asVariantGroup.children.first as? SWBCore.FileReference, SpecRegistry.interfaceBuilderDocumentFileTypeIdentifiers.contains(baseReference.fileTypeIdentifier), baseReference.regionVariantName == "Base" { - var ibDocRefs = [SWBCore.FileReference]() // These are specifically override nibs, not including Base. + var ibDocRefs = [SWBCore.FileReference]() // These are specifically override nibs, not including Base. var hasStringCatalog = false // Iterate over the children of the group - skipping the first reference - to put them into buckets. @@ -496,19 +490,15 @@ package class FilesBasedBuildPhaseTaskProducerBase: PhasedTaskProducer { if SpecRegistry.interfaceBuilderDocumentFileTypeIdentifiers.contains(fileRef.fileTypeIdentifier) { // If it's an IB document, then remember it. ibDocRefs.append(fileRef) - } - else if fileRef.fileTypeIdentifier == "text.plist.strings" { + } else if fileRef.fileTypeIdentifier == "text.plist.strings" { // If it's a .strings file, then we'll just include it in the variant group. - } - else if fileRef.fileTypeIdentifier == "text.json.xcstrings" { + } else if fileRef.fileTypeIdentifier == "text.json.xcstrings" { // If it has a String Catalog, remember that. hasStringCatalog = true - } - else { + } else { // FIXME: If there's something in the variant group that isn't an IB document or a .strings file, then what should we do about it? } - } - else { + } else { // FIXME: If this isn't a file reference, what should we do about it? } } @@ -542,14 +532,12 @@ package class FilesBasedBuildPhaseTaskProducerBase: PhasedTaskProducer { addResolvedItem(buildFile: nil, path: path, reference: reference, fileType: fileType) } } - } - else { + } else { // If the first reference is not an IB document, or does not have base localization, then we unwrap the variant group into a separate item for each of its child references. try unwrapResolveAndAdd(variantGroup: asVariantGroup, for: buildFile) } } - } - else { + } else { func isXCFrameworkWrapper(_ path: Path, fileType: FileTypeSpec) -> Bool { guard let xcframeworkFileSpec = context.lookupFileType(identifier: "wrapper.xcframework") else { return false @@ -582,15 +570,13 @@ package class FilesBasedBuildPhaseTaskProducerBase: PhasedTaskProducer { if let fileType = fileTypeSpec(for: library) { addResolvedItem(buildFile: buildFile, path: libraryTargetPath, reference: reference, fileType: fileType) - } - else { + } else { // This error should actually never be reached as we should have provided this error earlier. context.error("Unsupported library type: '\(libraryTargetPath.fileSuffix)'") } } } - } - else { + } else { // The reference is not a variant group, so add it as a single item. addResolvedItem(buildFile: buildFile, path: path, reference: reference, fileType: fileType) } @@ -618,7 +604,7 @@ package class FilesBasedBuildPhaseTaskProducerBase: PhasedTaskProducer { var compileToSwiftFiles = [ResolvedBuildFile]() var otherBuildFiles = [ResolvedBuildFile]() for resolvedBuildFile in resolvedBuildFiles { - if compileToSwiftFileTypes.contains (where: { identifier in resolvedBuildFile.fileTypeSpec.conformsTo(identifier: identifier)}) { + if compileToSwiftFileTypes.contains(where: { identifier in resolvedBuildFile.fileTypeSpec.conformsTo(identifier: identifier) }) { compileToSwiftFiles.append(resolvedBuildFile) } else { otherBuildFiles.append(resolvedBuildFile) @@ -747,21 +733,21 @@ package class FilesBasedBuildPhaseTaskProducerBase: PhasedTaskProducer { } /// Add the tasks for the given rule. - private func addTasksForRule( groupContext: inout GroupContext, _ producer: T, _ rule: any BuildRuleAction, _ group: FileToBuildGroup, _ buildFilesContext: BuildFilesProcessingContext, _ scope: MacroEvaluationScope, _ tasks: inout [any PlannedTask]) async { + private func addTasksForRule(groupContext: inout GroupContext, _ producer: T, _ rule: any BuildRuleAction, _ group: FileToBuildGroup, _ buildFilesContext: BuildFilesProcessingContext, _ scope: MacroEvaluationScope, _ tasks: inout [any PlannedTask]) async { // Do the necessary work, ending up with an array of output paths. let outputs: [FileToBuild] if let priorOutputs = outputsProducedByArchNeutralToolSpec[group] { // If we already have the output paths because the input group has previously been processed by an architecture-neutral tool spec (for a different arch or variant), then we re-use those paths. outputs = priorOutputs - } - else { + } else { // Otherwise we construct tasks for the input group. let result = await appendGeneratedTasks(&tasks) { delegate in - let scope = !rule.isArchitectureNeutral ? scope : ( - scope + let scope = + !rule.isArchitectureNeutral + ? scope + : (scope .subscope(binding: BuiltinMacros.variantCondition, to: "normal") - .subscope(binding: BuiltinMacros.archCondition, to: "undefined_arch") - ) + .subscope(binding: BuiltinMacros.archCondition, to: "undefined_arch")) await constructTasksForRule(rule, group, buildFilesContext, scope, delegate) } outputs = result.outputs @@ -840,8 +826,8 @@ package class FilesBasedBuildPhaseTaskProducerBase: PhasedTaskProducer { do { try context.workspaceContext.fs.traverse(path) { subPath in if let relativePath = subPath.relativeSubpath(from: path), - context.workspaceContext.fs.isDirectory(subPath) && - subPath.join(".").isValidLocalizedContent(scope) { + context.workspaceContext.fs.isDirectory(subPath) && subPath.join(".").isValidLocalizedContent(scope) + { localizedContent.append(Path(relativePath)) } } @@ -882,14 +868,14 @@ package class FilesBasedBuildPhaseTaskProducerBase: PhasedTaskProducer { // Don't process output files which are already in a product headers folder. let publicDestDirPath = TargetHeaderInfo.destDirPath(for: HeaderVisibility.public, scope: scope) - guard !publicDestDirPath.isAncestor(of: ftb.absolutePath) else { - return false - } + guard !publicDestDirPath.isAncestor(of: ftb.absolutePath) else { + return false + } let privateDestDirPath = TargetHeaderInfo.destDirPath(for: HeaderVisibility.private, scope: scope) - guard !privateDestDirPath.isAncestor(of: ftb.absolutePath) else { - return false - } + guard !privateDestDirPath.isAncestor(of: ftb.absolutePath) else { + return false + } return true } diff --git a/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/HeadersTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/HeadersTaskProducer.swift index 80037abe..1b1ea6d2 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/HeadersTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/HeadersTaskProducer.swift @@ -60,7 +60,8 @@ final class HeadersTaskProducer: FilesBasedBuildPhaseTaskProducerBase, FilesBase // If `-b` was passed we can map directly from the copied header to the original. If it isn't, all the // lines will be out of sync and thus we can't add the mapping. if let constructedTask = generatedTasks.tasks.only as? ConstructedTask, - constructedTask.commandLine.contains(where: { $0 == "-b" }) { + constructedTask.commandLine.contains(where: { $0 == "-b" }) + { context.addCopiedPath(src: ftb.absolutePath.str, dst: output.str) } } else { diff --git a/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/ResourcesTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/ResourcesTaskProducer.swift index aee06f4f..65183082 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/ResourcesTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/ResourcesTaskProducer.swift @@ -21,8 +21,7 @@ final class ResourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, FilesBa func shouldProcessResources(_ scope: MacroEvaluationScope) -> Bool { // Resources are processed only when the "build" component is present. - guard scope.evaluate(BuiltinMacros.BUILD_COMPONENTS).contains("build") || - scope.evaluate(BuiltinMacros.BUILD_COMPONENTS).contains("installLoc") else { return false } + guard scope.evaluate(BuiltinMacros.BUILD_COMPONENTS).contains("build") || scope.evaluate(BuiltinMacros.BUILD_COMPONENTS).contains("installLoc") else { return false } // Resources are processed only if $(UNLOCALIZED_RESOURCES_FOLDER_PATH) is defined. guard !scope.evaluate(BuiltinMacros.UNLOCALIZED_RESOURCES_FOLDER_PATH).isEmpty else { return false } @@ -76,7 +75,7 @@ final class ResourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, FilesBa self.createOnDemandResourcesPlistTask(assetPacks, delegate) // Emit AssetPackManifest[Template].plist (check for ) - let hasCustomAssetPackManifest = resourcesTasks.contains{ $0.outputs.contains { ["AssetPackManifest.plist", "AssetPackManifestTemplate.plist"].contains($0.path.basename) } } + let hasCustomAssetPackManifest = resourcesTasks.contains { $0.outputs.contains { ["AssetPackManifest.plist", "AssetPackManifestTemplate.plist"].contains($0.path.basename) } } if !hasCustomAssetPackManifest { let orderingInputs: [any PlannedNode] = resourcesTasks.flatMap { $0.outputs } self.context.createAssetPackManifestSpec.constructAssetPackManifestTask(self.context, assetPacks, orderingInputs: orderingInputs, scope, delegate) @@ -229,12 +228,13 @@ final class ResourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, FilesBa // Return any files provided by the product type if let productType = context.productType { - additionalFilesToBuild += productType.buildPhaseFileRefAdditions["com.apple.buildphase.resources"]?.compactMap { file in - let path = Path(scope.evaluate(file.path)) - guard path.isAbsolute else { context.error("unexpected non-absolute path from \(productType.identifier) buildPhaseFileRefAdditions \(file.path.stringRep): \(path.str)"); return nil } + additionalFilesToBuild += + productType.buildPhaseFileRefAdditions["com.apple.buildphase.resources"]?.compactMap { file in + let path = Path(scope.evaluate(file.path)) + guard path.isAbsolute else { context.error("unexpected non-absolute path from \(productType.identifier) buildPhaseFileRefAdditions \(file.path.stringRep): \(path.str)"); return nil } - return FileToBuild(absolutePath: path, inferringTypeUsing: context, regionVariantName: scope.evaluate(file.regionVariantName).nilIfEmpty) - } ?? [] + return FileToBuild(absolutePath: path, inferringTypeUsing: context, regionVariantName: scope.evaluate(file.regionVariantName).nilIfEmpty) + } ?? [] } return additionalFilesToBuild diff --git a/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/ShellBasedTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/ShellBasedTaskProducer.swift index 0bb632e7..42446146 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/ShellBasedTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/ShellBasedTaskProducer.swift @@ -16,7 +16,7 @@ import SWBMacro import Foundation protocol ShellBasedTaskProducer { - func handleFileLists(_ tasks: inout [any PlannedTask], _ inputs: inout [any PlannedNode], _ outputs: inout [any PlannedNode], _ environment: inout [String: String] , _ scope: MacroEvaluationScope, _ inputFileLists: [any PlannedNode], _ outputFileLists: [any PlannedNode], lookup: @escaping ((MacroDeclaration) -> MacroExpression?)) async + func handleFileLists(_ tasks: inout [any PlannedTask], _ inputs: inout [any PlannedNode], _ outputs: inout [any PlannedNode], _ environment: inout [String: String], _ scope: MacroEvaluationScope, _ inputFileLists: [any PlannedNode], _ outputFileLists: [any PlannedNode], lookup: @escaping ((MacroDeclaration) -> MacroExpression?)) async func pathForResolvedFileList(_ scope: MacroEvaluationScope, prefix: String, fileList: Path) -> Path @@ -30,7 +30,8 @@ protocol ShellBasedTaskProducer { extension ShellBasedTaskProducer where Self: StandardTaskProducer { func parseXCFileList(_ path: Path, scope: MacroEvaluationScope, lookup: ((MacroDeclaration) -> MacroExpression?)? = nil, transform: (Path) -> T = { $0 }) throws -> [T] { let contents = try readFileContents(path).asString - return contents + return + contents .split(separator: "\n") .compactMap { line in let line = line.trimmingCharacters(in: .whitespaces) @@ -50,8 +51,7 @@ extension ShellBasedTaskProducer where Self: StandardTaskProducer { return try parseXCFileList(path, scope: scope, lookup: lookup) { path -> (any PlannedNode) in if isInputList && (SWBFeatureFlag.treatScriptInputsAsDirectoryNodes.value || scope.evaluate(BuiltinMacros.USE_RECURSIVE_SCRIPT_INPUTS_IN_SCRIPT_PHASES)) { return context.createDirectoryTreeNode(context.makeAbsolute(path).normalize(), excluding: []) - } - else { + } else { return context.createNode(context.makeAbsolute(path).normalize()) } } @@ -62,7 +62,7 @@ extension ShellBasedTaskProducer where Self: StandardTaskProducer { } } - func handleFileLists(_ tasks: inout [any PlannedTask], _ inputs: inout [any PlannedNode], _ outputs: inout [any PlannedNode], _ environment: inout [String: String] , _ scope: MacroEvaluationScope, _ inputFileLists: [any PlannedNode], _ outputFileLists: [any PlannedNode], lookup: @escaping ((MacroDeclaration) -> MacroExpression?) = { _ in nil }) async { + func handleFileLists(_ tasks: inout [any PlannedTask], _ inputs: inout [any PlannedNode], _ outputs: inout [any PlannedNode], _ environment: inout [String: String], _ scope: MacroEvaluationScope, _ inputFileLists: [any PlannedNode], _ outputFileLists: [any PlannedNode], lookup: @escaping ((MacroDeclaration) -> MacroExpression?) = { _ in nil }) async { // The set of both the input and the output file lists need to be tracked as inputs to the script as they are required to exist before the task can actually run. inputs += inputFileLists inputs += outputFileLists diff --git a/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/ShellScriptTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/ShellScriptTaskProducer.swift index fedc42f6..a56e2f3e 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/ShellScriptTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/ShellScriptTaskProducer.swift @@ -46,7 +46,8 @@ final class ShellScriptTaskProducer: PhasedTaskProducer, TaskProducer, ShellBase } // Get all of the paths from the xcfilelist; we'll process them later. - let groups: [[Path]] = xcfilelistPaths + let groups: [[Path]] = + xcfilelistPaths .map { xcfilelist in let outputs: [Path] do { @@ -168,7 +169,6 @@ final class ShellScriptTaskProducer: PhasedTaskProducer, TaskProducer, ShellBase // Lastly, we need to inspect the contents of the file lists and append their paths accordingly. await handleFileLists(&tasks, &inputs, &outputs, &environment, scope, inputFileLists, outputFileLists) - let enabledIndexBuildArena = scope.evaluate(BuiltinMacros.INDEX_ENABLE_BUILD_ARENA) let disableScriptExecutionForIndexBuild = scope.evaluate(BuiltinMacros.INDEX_DISABLE_SCRIPT_EXECUTION) let forceScriptExecutionForIndexBuild = scope.evaluate(BuiltinMacros.INDEX_FORCE_SCRIPT_EXECUTION) @@ -214,7 +214,7 @@ final class ShellScriptTaskProducer: PhasedTaskProducer, TaskProducer, ShellBase isSandboxingEnabled = ShellScriptTaskProducer.isSandboxingEnabled(context, shellScriptBuildPhase) } - inputs.append(scriptFileNode) // The generated script file is also an input. + inputs.append(scriptFileNode) // The generated script file is also an input. // Determine if the shell script should always be run. NOTE!! This must come before the virtual output node creation. let alwaysExecuteTask = outputs.isEmpty || self.shellScriptBuildPhase.alwaysOutOfDate @@ -263,8 +263,8 @@ final class ShellScriptTaskProducer: PhasedTaskProducer, TaskProducer, ShellBase dependencyData = .dependencyInfo(path) case .makefiles(let paths): - let paths = paths.map{ context.makeAbsolute(Path(scope.evaluate($0))).normalize() } - outputs.append(contentsOf: paths.map{ context.createNode($0) }) + let paths = paths.map { context.makeAbsolute(Path(scope.evaluate($0))).normalize() } + outputs.append(contentsOf: paths.map { context.createNode($0) }) dependencyData = .makefiles(paths) } } else { @@ -303,8 +303,6 @@ final class ShellScriptTaskProducer: PhasedTaskProducer, TaskProducer, ShellBase return tasks } - - /// Construct the tasks for an individual shell-script build rule. /// /// NOTE: External targets are basically shell scripts. It lives here because the behavior shares some significant logical pieces with the behavior of shell script build phases. diff --git a/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/SourcesTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/SourcesTaskProducer.swift index c8e6296f..6f80bbc4 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/SourcesTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/SourcesTaskProducer.swift @@ -308,7 +308,7 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F /// /// We override this to auto-attach tasks to the generated headers completion ordering gate. @discardableResult - package override func appendGeneratedTasks( _ tasks: inout [any PlannedTask], options: TaskOrderingOptions? = nil, body: (any TaskGenerationDelegate) async -> Void) async -> (tasks: [any PlannedTask], outputs: [FileToBuild]) { + package override func appendGeneratedTasks(_ tasks: inout [any PlannedTask], options: TaskOrderingOptions? = nil, body: (any TaskGenerationDelegate) async -> Void) async -> (tasks: [any PlannedTask], outputs: [FileToBuild]) { return await super.appendGeneratedTasks(&tasks, options: options) { delegate in await body(SourcesPhaseBasedTaskGenerationDelegate(producer: self, userPreferences: context.workspaceContext.userPreferences, delegate: delegate)) } @@ -354,7 +354,7 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F } case .namedReference(let name, let fileTypeIdentifier): settingsForRef = nil - absolutePath = Path(name) // This path isn't actually absolute, but `LinkerSpec.LibrarySpecifier` supports that case. + absolutePath = Path(name) // This path isn't actually absolute, but `LinkerSpec.LibrarySpecifier` supports that case. if let type = context.lookupFileType(identifier: fileTypeIdentifier) { fileType = type } else { @@ -475,8 +475,7 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F if fileType.isEmbeddableInProduct { return absolutePath } - } - catch {} + } catch {} default: return nil } @@ -554,8 +553,7 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F topLevelItemPath = absolutePath if shouldGenerateDSYM(settingsForRef.globalScope) { dsymPath = scope.evaluate(BuiltinMacros.DWARF_DSYM_FOLDER_PATH).join(scope.evaluate(BuiltinMacros.DWARF_DSYM_FILE_NAME)) - } - else { + } else { dsymPath = nil } } else { @@ -659,8 +657,8 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F let metadata: ArtifactBundleMetadata do { metadata = try context.globalProductPlan.artifactBundleMetadataCache.getOrInsert(absolutePath) { - try ArtifactBundleMetadata.parse(at: absolutePath, fileSystem: context.fs) - } + try ArtifactBundleMetadata.parse(at: absolutePath, fileSystem: context.fs) + } } catch { context.error("failed to parse artifact bundle metadata for '\(absolutePath)': \(error.localizedDescription)") return nil @@ -674,9 +672,11 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F var foundMatch = false let currentTripleString = scope.evaluate(BuiltinMacros.SWIFT_TARGET_TRIPLE) for variant in artifact.variants { - if variant.supportedTriples == nil || variant.supportedTriples?.contains(where: { - normalizedTriplesCompareDisregardingOSVersions($0, currentTripleString) - }) == true { + if variant.supportedTriples == nil + || variant.supportedTriples?.contains(where: { + normalizedTriplesCompareDisregardingOSVersions($0, currentTripleString) + }) == true + { foundMatch = true return LinkerSpec.LibrarySpecifier( kind: .static, @@ -717,7 +717,7 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F let newInputs = inputs.filter { !prepareTargetForIndexInputsObjectSet.contains(ObjectIdentifier($0)) } prepareTargetForIndexInputs.append(contentsOf: newInputs) - prepareTargetForIndexInputsObjectSet.formUnion(newInputs.map{ ObjectIdentifier($0) }) + prepareTargetForIndexInputsObjectSet.formUnion(newInputs.map { ObjectIdentifier($0) }) } package func prepare() { @@ -828,7 +828,7 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F let buildVariants = scope.evaluate(BuiltinMacros.BUILD_VARIANTS) var dsymBundle: Path! var dsymutilOutputs = [Path]() - var perVariantOutputPaths: [String:Set] = [:] + var perVariantOutputPaths: [String: Set] = [:] var allLinkedLibraries = [LinkerSpec.LibrarySpecifier]() for variant in buildVariants { // Enter the per-variant scope. @@ -849,7 +849,7 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F assert( (linkedBinaryPreviewDylibNode == nil && linkedBinaryPreviewBlankInjectionDylibNode == nil) - || (linkedBinaryPreviewDylibNode != nil && linkedBinaryPreviewBlankInjectionDylibNode != nil), + || (linkedBinaryPreviewDylibNode != nil && linkedBinaryPreviewBlankInjectionDylibNode != nil), "A debug dylib and blank injection dylib are either both present or absent." ) @@ -877,35 +877,43 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F // FIXME: We should do this in parallel. let buildFilesContext = BuildFilesProcessingContext(scope, belongsToPreferredArch: preferredArch == nil || preferredArch == arch, currentArchSpec: currentArchSpec) var perArchTasks: [any PlannedTask] = [] - await groupAndAddTasksForFiles(self, buildFilesContext, scope, filterToAPIRules: isForAPI, filterToHeaderRules: isForHeaders, &perArchTasks, extraResolvedBuildFiles: { - var result: [(Path, FileTypeSpec, Bool)] = [] - - if let generateVersionInfoFileTask { - result.append((generateVersionInfoFileTask.outputs.first!.path, context.lookupFileType(languageDialect: .c)!, /* shouldUsePrefixHeader */ false)) - } + await groupAndAddTasksForFiles( + self, + buildFilesContext, + scope, + filterToAPIRules: isForAPI, + filterToHeaderRules: isForHeaders, + &perArchTasks, + extraResolvedBuildFiles: { + var result: [(Path, FileTypeSpec, Bool)] = [] + + if let generateVersionInfoFileTask { + result.append((generateVersionInfoFileTask.outputs.first!.path, context.lookupFileType(languageDialect: .c)!, /* shouldUsePrefixHeader */ false)) + } - if let generateKernelExtensionModuleInfoFileTask { - result.append((generateKernelExtensionModuleInfoFileTask.outputs.first!.path, context.lookupFileType(languageDialect: .c)!, /* shouldUsePrefixHeader */ false)) - } + if let generateKernelExtensionModuleInfoFileTask { + result.append((generateKernelExtensionModuleInfoFileTask.outputs.first!.path, context.lookupFileType(languageDialect: .c)!, /* shouldUsePrefixHeader */ false)) + } - if let packageTargetBundleAccessorResult { - result.append((packageTargetBundleAccessorResult.fileToBuild, packageTargetBundleAccessorResult.fileToBuildFileType, /* shouldUsePrefixHeader */ false)) - } + if let packageTargetBundleAccessorResult { + result.append((packageTargetBundleAccessorResult.fileToBuild, packageTargetBundleAccessorResult.fileToBuildFileType, /* shouldUsePrefixHeader */ false)) + } - if let bundleLookupHelperResult { - result.append((bundleLookupHelperResult.fileToBuild, bundleLookupHelperResult.fileToBuildFileType, /* shouldUsePrefixHeader */ false)) - } + if let bundleLookupHelperResult { + result.append((bundleLookupHelperResult.fileToBuild, bundleLookupHelperResult.fileToBuildFileType, /* shouldUsePrefixHeader */ false)) + } - if let embedInCodeAccessorResult { - result.append((embedInCodeAccessorResult.fileToBuild, embedInCodeAccessorResult.fileToBuildFileType, /* shouldUsePrefixHeader */ false)) - } + if let embedInCodeAccessorResult { + result.append((embedInCodeAccessorResult.fileToBuild, embedInCodeAccessorResult.fileToBuildFileType, /* shouldUsePrefixHeader */ false)) + } - if scope.evaluate(BuiltinMacros.GENERATE_TEST_ENTRY_POINT) { - result.append((scope.evaluate(BuiltinMacros.GENERATED_TEST_ENTRY_POINT_PATH), context.lookupFileType(fileName: "sourcecode.swift")!, /* shouldUsePrefixHeader */ false)) - } + if scope.evaluate(BuiltinMacros.GENERATE_TEST_ENTRY_POINT) { + result.append((scope.evaluate(BuiltinMacros.GENERATED_TEST_ENTRY_POINT_PATH), context.lookupFileType(fileName: "sourcecode.swift")!, /* shouldUsePrefixHeader */ false)) + } - return result - }()) + return result + }() + ) // Collect the list of object files. var linkerInputNodes: [any PlannedNode] = [] @@ -948,8 +956,8 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F allLinkedLibraries.append(contentsOf: librariesToLink) // Insert the object files present in the framework build phase to the linker inputs. - let objectsInFrameworkPhase = librariesToLink.filter{ $0.kind == .object } - linkerInputNodes.append(contentsOf: objectsInFrameworkPhase.map{ $0.path }.map(context.createNode)) + let objectsInFrameworkPhase = librariesToLink.filter { $0.kind == .object } + linkerInputNodes.append(contentsOf: objectsInFrameworkPhase.map { $0.path }.map(context.createNode)) if !SWBFeatureFlag.enableLinkerInputsFromLibrarySpecifiers.value { // If this flag isn't enabled we still want the dylib to be a dependency for this task. @@ -1027,8 +1035,7 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F if let outputPreviewDylib, let outputPreviewBlankInjectionDylib { singleArchPreviewDylibBinaries.append(outputPreviewDylib) singleArchInjectionDylibBinaries.append(outputPreviewBlankInjectionDylib) - } - else { + } else { singleArchBinaries.append(output) } @@ -1146,32 +1153,35 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F // rdar://127248825 (Pre-link the debug dylib and emit a new empty dylib that Previews can load to get in front of dyld) libraries = [outputPreviewDylibLibrary] - ldflags = [ - // Create a __TEXT section with the relative path to the preview dylib (we don't want to statically link it; instead the entry point provided by libPreviewsJITStubExecutor.a will load it on demand) - "-Xlinker", "-sectcreate", - "-Xlinker", "__TEXT", - "-Xlinker", "__debug_dylib", - "-Xlinker", previewsDylibRelativePathFile.str, - ] + (entryPointFile.map { + ldflags = [ - // Create a __TEXT section with the name of the original entry point + // Create a __TEXT section with the relative path to the preview dylib (we don't want to statically link it; instead the entry point provided by libPreviewsJITStubExecutor.a will load it on demand) "-Xlinker", "-sectcreate", "-Xlinker", "__TEXT", - "-Xlinker", "__debug_entry", - "-Xlinker", $0.str, + "-Xlinker", "__debug_dylib", + "-Xlinker", previewsDylibRelativePathFile.str, ] - } ?? []) + (installNameFile.map { - [ - // Create a __TEXT section with the client name of the original binary, which - // becomes the install name of the debug/blank dylib - "-Xlinker", "-sectcreate", - "-Xlinker", "__TEXT", - "-Xlinker", "__debug_instlnm", - "-Xlinker", $0.str, + + (entryPointFile.map { + [ + // Create a __TEXT section with the name of the original entry point + "-Xlinker", "-sectcreate", + "-Xlinker", "__TEXT", + "-Xlinker", "__debug_entry", + "-Xlinker", $0.str, + ] + } ?? []) + + (installNameFile.map { + [ + // Create a __TEXT section with the client name of the original binary, which + // becomes the install name of the debug/blank dylib + "-Xlinker", "-sectcreate", + "-Xlinker", "__TEXT", + "-Xlinker", "__debug_instlnm", + "-Xlinker", $0.str, + ] + } ?? []) + [ + "-Xlinker", "-filelist", "-Xlinker", executorLinkFileListPath.str, ] - } ?? []) + [ - "-Xlinker", "-filelist", "-Xlinker", executorLinkFileListPath.str, - ] } else { libraries = [outputPreviewDylibLibrary] ldflags = [] @@ -1221,19 +1231,27 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F // FIXME: We should do this in parallel. let buildFilesContext = BuildFilesProcessingContext(scope, belongsToPreferredArch: preferredArch == nil || preferredArch == arch, currentArchSpec: currentArchSpec) var perArchTasks: [any PlannedTask] = [] - await groupAndAddTasksForFiles(self, buildFilesContext, scope, filterToAPIRules: isForAPI, filterToHeaderRules: isForHeaders, &perArchTasks, extraResolvedBuildFiles: { - var result: [(Path, FileTypeSpec, Bool)] = [] - - if let packageTargetBundleAccessorResult { - result.append((packageTargetBundleAccessorResult.fileToBuild, packageTargetBundleAccessorResult.fileToBuildFileType, /* shouldUsePrefixHeader */ false)) - } + await groupAndAddTasksForFiles( + self, + buildFilesContext, + scope, + filterToAPIRules: isForAPI, + filterToHeaderRules: isForHeaders, + &perArchTasks, + extraResolvedBuildFiles: { + var result: [(Path, FileTypeSpec, Bool)] = [] + + if let packageTargetBundleAccessorResult { + result.append((packageTargetBundleAccessorResult.fileToBuild, packageTargetBundleAccessorResult.fileToBuildFileType, /* shouldUsePrefixHeader */ false)) + } - if let bundleLookupHelperResult { - result.append((bundleLookupHelperResult.fileToBuild, bundleLookupHelperResult.fileToBuildFileType, /* shouldUsePrefixHeader */ false)) - } + if let bundleLookupHelperResult { + result.append((bundleLookupHelperResult.fileToBuild, bundleLookupHelperResult.fileToBuildFileType, /* shouldUsePrefixHeader */ false)) + } - return result - }()) + return result + }() + ) // Add all the collected per-arch tasks. tasks.append(contentsOf: perArchTasks) @@ -1253,8 +1271,7 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F await appendGeneratedTasks(&tasks, options: [.linking, .linkingRequirement, .unsignedProductRequirement]) { delegate in await context.lipoSpec.constructTasks(CommandBuildContext(producer: context, scope: scope, inputs: singleArchBinaries.map { FileToBuild(context: context, absolutePath: $0) }, output: binaryOutput, commandOrderingOutputs: [linkedBinaryNode]), delegate) } - } - else if singleArchBinaries.count == 1, archs.count > 1, let singleArchBinaryPath = singleArchBinaries.first { + } else if singleArchBinaries.count == 1, archs.count > 1, let singleArchBinaryPath = singleArchBinaries.first { // If there's only one binary but multiple architectures defined for the target, then for some reason we didn't produce a binary for any of the others - probably due to a strange target configuration. If so, then we should create a copy task to copy the single-arch binary to the final location. let productBinaryPath = scope.evaluate(BuiltinMacros.TARGET_BUILD_DIR).join(scope.evaluate(BuiltinMacros.EXECUTABLE_PATH)) if singleArchBinaryPath != productBinaryPath { @@ -1266,7 +1283,8 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F if singleArchPreviewDylibBinaries.count > 1, let binaryPreviewDylibOutput, - let linkedBinaryPreviewDylibNode { + let linkedBinaryPreviewDylibNode + { await appendGeneratedTasks(&tasks, options: [.linking, .linkingRequirement, .unsignedProductRequirement]) { delegate in await context.lipoSpec.constructTasks(CommandBuildContext(producer: context, scope: scope, inputs: singleArchPreviewDylibBinaries.map { FileToBuild(context: context, absolutePath: $0) }, output: binaryPreviewDylibOutput, commandOrderingOutputs: [linkedBinaryPreviewDylibNode]), delegate) } @@ -1274,7 +1292,8 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F if singleArchInjectionDylibBinaries.count > 1, binaryPreviewDylibOutput != nil, - let linkedBinaryPreviewBlankInjectionDylibNode { + let linkedBinaryPreviewBlankInjectionDylibNode + { await appendGeneratedTasks(&tasks, options: [.linking, .unsignedProductRequirement]) { delegate in await context.lipoSpec.constructTasks(CommandBuildContext(producer: context, scope: scope, inputs: singleArchInjectionDylibBinaries.map { FileToBuild(context: context, absolutePath: $0) }, output: binaryPreviewBlankInjectionDylibOutput, commandOrderingOutputs: [linkedBinaryPreviewBlankInjectionDylibNode]), delegate) } @@ -1296,7 +1315,8 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F let binary = binaryPreviewDylibOutput ?? binaryOutput let binaryOrderingInput = linkedBinaryPreviewDylibNode ?? linkedBinaryNode - let output = dsymBundle + let output = + dsymBundle .join("Contents").join("Resources").join("DWARF") .join(binary.basename) @@ -1326,8 +1346,7 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F // If the library has a known dSYM file, then pass the path to the directory of that dSYM. If there isn'r one, then pass the path to the directory containing the library, since our best guess is that the dSYM will be alongside the library. if let dsymPath = library.dsymPath { dsymSearchPaths.append(dsymPath.dirname.str) - } - else { + } else { dsymSearchPaths.append(libraryPath.dirname.str) } } @@ -1341,7 +1360,8 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F if binaryPreviewDylibOutput != nil { let inputs = [FileToBuild(context: context, absolutePath: binaryOutput)] - let output = dsymBundle + let output = + dsymBundle .join("Contents").join("Resources").join("DWARF") .join(binaryOutput.basename) await appendGeneratedTasks(&tasks, usePhasedOrdering: false) { delegate in @@ -1470,8 +1490,7 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F if strParts.count > 1, firstPart == firstPartToRemove { let subpath = strParts[1...].joined(separator: Path.pathSeparatorString) subpathsToInclude.append(subpath) - } - else if firstPart != firstPartToRemove { + } else if firstPart != firstPartToRemove { // If string is *only* firstPartToRemove then we don't add it. subpathsToInclude.append(string) } @@ -1506,13 +1525,11 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F } additionalPresumedOutputs.append(pathToSign.join(copiedFileSettings.globalScope.evaluate(BuiltinMacros.EXECUTABLE_NAME))) } - } - else { + } else { // Don't add any paths - this will result in us just copying the unwrapped product. } } - } - else if let xcframeworkSourcePath = library.xcframeworkSourcePath { + } else if let xcframeworkSourcePath = library.xcframeworkSourcePath { // Copying an XCFramework component which is marked as mergeable. var xcFramework: XCFramework? = nil do { @@ -1559,7 +1576,7 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F addSubpath(contentsPath.join("_CodeSignature").str, removingFirstPartIfEqualTo: library.libraryPath.str) addSubpath(contentsPath.join("Info.plist").str, removingFirstPartIfEqualTo: library.libraryPath.str) if library.supportedPlatform == "macos" { - // For deep frameworks (macOS), we also need to copy the symlinks for the binary and the Versions/Current directory, since those are how dyld accesses the binary. + // For deep frameworks (macOS), we also need to copy the symlinks for the binary and the Versions/Current directory, since those are how dyld accesses the binary. addSubpath(binaryPath.basename, removingFirstPartIfEqualTo: library.libraryPath.str) addSubpath(contentsPath.dirname.join("Current").str, removingFirstPartIfEqualTo: library.libraryPath.str) } @@ -1866,51 +1883,51 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F let escapedBundleName = bundleName.asLegalCIdentifier let headerFileContents = """ - #import + #import - __BEGIN_DECLS + __BEGIN_DECLS - NSBundle* \(escapedBundleName)_SWIFTPM_MODULE_BUNDLE(void); + NSBundle* \(escapedBundleName)_SWIFTPM_MODULE_BUNDLE(void); - #define SWIFTPM_MODULE_BUNDLE \(escapedBundleName)_SWIFTPM_MODULE_BUNDLE() + #define SWIFTPM_MODULE_BUNDLE \(escapedBundleName)_SWIFTPM_MODULE_BUNDLE() - __END_DECLS - """ + __END_DECLS + """ let implFileContents = """ - #import + #import - NS_ASSUME_NONNULL_BEGIN + NS_ASSUME_NONNULL_BEGIN - @interface \(escapedBundleName)_SWIFTPM_MODULE_BUNDLER_FINDER : NSObject - @end + @interface \(escapedBundleName)_SWIFTPM_MODULE_BUNDLER_FINDER : NSObject + @end - @implementation \(escapedBundleName)_SWIFTPM_MODULE_BUNDLER_FINDER - @end + @implementation \(escapedBundleName)_SWIFTPM_MODULE_BUNDLER_FINDER + @end - NSBundle* \(escapedBundleName)_SWIFTPM_MODULE_BUNDLE() { - NSString *bundleName = @"\(escapedBundleName)"; + NSBundle* \(escapedBundleName)_SWIFTPM_MODULE_BUNDLE() { + NSString *bundleName = @"\(escapedBundleName)"; - NSArray *candidates = @[ - NSBundle.mainBundle.resourceURL, - [NSBundle bundleForClass:[\(escapedBundleName)_SWIFTPM_MODULE_BUNDLER_FINDER class]].resourceURL, - NSBundle.mainBundle.bundleURL - ]; + NSArray *candidates = @[ + NSBundle.mainBundle.resourceURL, + [NSBundle bundleForClass:[\(escapedBundleName)_SWIFTPM_MODULE_BUNDLER_FINDER class]].resourceURL, + NSBundle.mainBundle.bundleURL + ]; - for (NSURL* candidate in candidates) { - NSURL *bundlePath = [candidate URLByAppendingPathComponent:[NSString stringWithFormat:@"%@.bundle", bundleName]]; + for (NSURL* candidate in candidates) { + NSURL *bundlePath = [candidate URLByAppendingPathComponent:[NSString stringWithFormat:@"%@.bundle", bundleName]]; - NSBundle *bundle = [NSBundle bundleWithURL:bundlePath]; - if (bundle != nil) { - return bundle; + NSBundle *bundle = [NSBundle bundleWithURL:bundlePath]; + if (bundle != nil) { + return bundle; + } } - } - @throw [[NSException alloc] initWithName:@"SwiftPMResourcesAccessor" reason:[NSString stringWithFormat:@"unable to find bundle named %@", bundleName] userInfo:nil]; - } + @throw [[NSException alloc] initWithName:@"SwiftPMResourcesAccessor" reason:[NSString stringWithFormat:@"unable to find bundle named %@", bundleName] userInfo:nil]; + } - NS_ASSUME_NONNULL_END - """ + NS_ASSUME_NONNULL_END + """ var tasks = [any PlannedTask]() await appendGeneratedTasks(&tasks) { delegate in @@ -1925,7 +1942,9 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F private func generatePackageTargetBundleAccessorForSwift(_ scope: MacroEvaluationScope, bundleName: String) async -> GeneratedResourceAccessorResult { let filePath = scope.evaluate(BuiltinMacros.DERIVED_SOURCES_DIR).join("resource_bundle_accessor.swift") - let contents = bundleName.isEmpty ? """ + let contents = + bundleName.isEmpty + ? """ import class Foundation.Bundle extension Foundation.Bundle { @@ -1934,7 +1953,8 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F return Foundation.Bundle(for: BundleFinder.self) }() } - """ : """ + """ + : """ import class Foundation.Bundle import class Foundation.ProcessInfo import struct Foundation.URL @@ -2065,10 +2085,12 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F } private func generateKernelExtensionModuleInfoFile(_ scope: MacroEvaluationScope) -> (Path, ByteString) { - let module = (name: scope.evaluate(BuiltinMacros.MODULE_NAME), - version: scope.evaluate(BuiltinMacros.MODULE_VERSION), - start: scope.evaluate(BuiltinMacros.MODULE_START), - stop: scope.evaluate(BuiltinMacros.MODULE_STOP)) + let module = ( + name: scope.evaluate(BuiltinMacros.MODULE_NAME), + version: scope.evaluate(BuiltinMacros.MODULE_VERSION), + start: scope.evaluate(BuiltinMacros.MODULE_START), + stop: scope.evaluate(BuiltinMacros.MODULE_STOP) + ) let path = scope.evaluate(BuiltinMacros.DERIVED_FILE_DIR).join(scope.evaluate(BuiltinMacros.PRODUCT_NAME) + "_info.c") @@ -2108,16 +2130,16 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F /// enabled and we should return that as a library to link! private func previewsDylibForTestHost() -> [LinkerSpec.LibrarySpecifier] { guard let target = self.context.configuredTarget, - let testHost = self.context.globalProductPlan.hostTargetForTargets[target] else - { + let testHost = self.context.globalProductPlan.hostTargetForTargets[target] + else { return [] } // Only consider linking the debug dylib if it is present in the test host. let hostSettings = self.context.globalProductPlan.getTargetSettings(testHost) guard let targetBuildDir = hostSettings.globalScope.evaluate(BuiltinMacros.TARGET_BUILD_DIR).nilIfEmpty, - let previewsDylibPath = hostSettings.globalScope.evaluate(BuiltinMacros.EXECUTABLE_DEBUG_DYLIB_PATH).nilIfEmpty else - { + let previewsDylibPath = hostSettings.globalScope.evaluate(BuiltinMacros.EXECUTABLE_DEBUG_DYLIB_PATH).nilIfEmpty + else { return [] } @@ -2129,14 +2151,16 @@ package final class SourcesTaskProducer: FilesBasedBuildPhaseTaskProducerBase, F let fullPath = targetBuildDir.join(Path(previewsDylibPath)) - return [.init( - kind: .dynamic, - path: fullPath, - mode: .normal, - useSearchPaths: false, - swiftModulePaths: [:], - swiftModuleAdditionalLinkerArgResponseFilePaths: [:] - )] + return [ + .init( + kind: .dynamic, + path: fullPath, + mode: .normal, + useSearchPaths: false, + swiftModulePaths: [:], + swiftModuleAdditionalLinkerArgResponseFilePaths: [:] + ) + ] } } diff --git a/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/SwiftPackageCopyFilesTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/SwiftPackageCopyFilesTaskProducer.swift index 814cb269..e8d4665e 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/SwiftPackageCopyFilesTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/BuildPhaseTaskProducers/SwiftPackageCopyFilesTaskProducer.swift @@ -115,7 +115,7 @@ final class SwiftPackageCopyFilesTaskProducer: CopyFilesTaskProducer { } case .unknown(_): - return false // Embedding unknown files will likely only end up tripping up validation, so we should not do that. + return false // Embedding unknown files will likely only end up tripping up validation, so we should not do that. } } @@ -161,13 +161,13 @@ final class SwiftPackageCopyFilesTaskProducer: CopyFilesTaskProducer { let platformFilters = buildFiles.map { $0.platformFilters } let aggregatedPlatformFilters: Set if platformFilters.contains(where: { $0.isEmpty }) { - aggregatedPlatformFilters = [] // If any build file supports all platforms, we embed for all platforms. + aggregatedPlatformFilters = [] // If any build file supports all platforms, we embed for all platforms. } else { aggregatedPlatformFilters = platformFilters.reduce([]) { $0.union($1) } } let target: Target - if case .targetProduct(let guid) = firstBuildFile.buildableItem, let _target = context.workspaceContext.workspace.target(for: guid) { + if case .targetProduct(let guid) = firstBuildFile.buildableItem, let _target = context.workspaceContext.workspace.target(for: guid) { target = _target } else { // If this isn't a target product reference, it has to be a `binaryTarget` which does not support platform filters by definition, so we can return the first build file instead. @@ -187,7 +187,7 @@ final class SwiftPackageCopyFilesTaskProducer: CopyFilesTaskProducer { private let generatedBuildPhase: CopyFilesBuildPhase init(_ context: TargetTaskProducerContext, phaseStartNodes: [any PlannedNode], phaseEndNode: any PlannedNode, phaseEndTask: any PlannedTask, frameworksBuildPhase: FrameworksBuildPhase?) { - let configuredTarget = context.configuredTarget! // We assume a `TargetTaskProducerContext` always has an associated configured target. + let configuredTarget = context.configuredTarget! // We assume a `TargetTaskProducerContext` always has an associated configured target. let guid = "\(configuredTarget.target.guid)-package-copy-files-phase" let buildFiles = Self.buildFilesForPackages(context: context, frameworksBuildPhase: frameworksBuildPhase) diff --git a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/CopySwiftPackageResourcesTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/CopySwiftPackageResourcesTaskProducer.swift index 1a7938e7..886d8b3e 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/CopySwiftPackageResourcesTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/CopySwiftPackageResourcesTaskProducer.swift @@ -29,10 +29,12 @@ final class CopySwiftPackageResourcesTaskProducer: PhasedTaskProducer, TaskProdu // // FIXME: The latter feature here is rarely used, and not very flexible as any target which needs other copy phases won't be able to disable them selectively. let buildComponents = scope.evaluate(BuiltinMacros.BUILD_COMPONENTS) - guard buildComponents.contains("build") + guard + buildComponents.contains("build") || buildComponents.contains("installLoc") || (buildComponents.contains("api") && scope.evaluate(BuiltinMacros.INSTALLAPI_COPY_PHASE)) - || (buildComponents.contains("headers") && scope.evaluate(BuiltinMacros.INSTALLHDRS_COPY_PHASE)) else { + || (buildComponents.contains("headers") && scope.evaluate(BuiltinMacros.INSTALLHDRS_COPY_PHASE)) + else { return [] } diff --git a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/CustomTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/CustomTaskProducer.swift index 823ca369..42cc1949 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/CustomTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/CustomTaskProducer.swift @@ -79,7 +79,8 @@ final class CustomTaskProducer: PhasedTaskProducer, TaskProducer { execDescription: context.settings.globalScope.evaluate(customTask.executionDescription), preparesForIndexing: customTask.preparesForIndexing, enableSandboxing: customTask.enableSandboxing, - showEnvironment: true) + showEnvironment: true + ) } } diff --git a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/GenerateAppPlaygroundAssetCatalogTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/GenerateAppPlaygroundAssetCatalogTaskProducer.swift index 3d6ec042..be3617cd 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/GenerateAppPlaygroundAssetCatalogTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/GenerateAppPlaygroundAssetCatalogTaskProducer.swift @@ -19,8 +19,9 @@ extension BuildPhaseTarget { let buildFilesProcessingContext = BuildFilesProcessingContext(scope) return buildFiles.compactMap { buildFile in guard let resolvedBuildFileInfo = try? context.resolveBuildFileReference(buildFile), - !buildFilesProcessingContext.isExcluded(resolvedBuildFileInfo.absolutePath, filters: buildFile.platformFilters), - resolvedBuildFileInfo.fileType.conformsTo(fileType) else { + !buildFilesProcessingContext.isExcluded(resolvedBuildFileInfo.absolutePath, filters: buildFile.platformFilters), + resolvedBuildFileInfo.fileType.conformsTo(fileType) + else { return nil } @@ -42,18 +43,18 @@ final class GenerateAppPlaygroundAssetCatalogTaskProducer: PhasedTaskProducer, T if !scope.evaluate(BuiltinMacros.APP_PLAYGROUND_GENERATE_ASSET_CATALOG) { return [] } - let assetCatalogsBeingBuilt = (context.configuredTarget?.target as? BuildPhaseTarget)?.assetCatalogsToBuild( - context: context, - scope: scope - ) ?? [] + let assetCatalogsBeingBuilt = + (context.configuredTarget?.target as? BuildPhaseTarget)?.assetCatalogsToBuild( + context: context, + scope: scope + ) ?? [] let assetCatalogToBeGenerated = scope.evaluate(BuiltinMacros.APP_PLAYGROUND_GENERATED_ASSET_CATALOG_FILE) let specialArgs: [String] if !assetCatalogsBeingBuilt.isEmpty { specialArgs = ["-assetCatalogResourcePaths"] + assetCatalogsBeingBuilt.map { $0.absolutePath.str } - } - else { + } else { specialArgs = [] } diff --git a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/HeadermapTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/HeadermapTaskProducer.swift index 06e81ce1..1f6ce34a 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/HeadermapTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/HeadermapTaskProducer.swift @@ -19,7 +19,7 @@ final class HeadermapTaskProducer: PhasedTaskProducer, TaskProducer { return .immediate } - func generateTasks() async -> [any PlannedTask] { + func generateTasks() async -> [any PlannedTask] { let scope = context.settings.globalScope // Headermaps are generated only when the "build" or when "api" component is present. @@ -165,7 +165,6 @@ final class HeadermapTaskProducer: PhasedTaskProducer, TaskProducer { return (hmap, diagnostics) } - /// Construct the headermap for the "own target headers" separate headermap. /// /// This headermap is used to ensure that a targets own headers are always found first, and that they can be found via any angle bracket style ('' or ''). It is also used to make available the target's "project headers" via bracket style includes. The *intent* (judgement aside) was that this allowed projects to change headers from internal (Swift sense) to public without needing to update source code. @@ -373,7 +372,6 @@ final class HeadermapTaskProducer: PhasedTaskProducer, TaskProducer { } - /// Performance testing entry point to headermap construction. package func perfTestHeadermapProducer(planRequest: BuildPlanRequest, delegate: any TaskPlanningDelegate) async -> [String: [any PlannedTask]] { let targetTaskInfo = TargetGateNodes(startNode: MakePlannedPathNode(Path("a")), endNode: MakePlannedPathNode(Path("b")), unsignedProductReadyNode: MakePlannedPathNode(Path("c")), willSignNode: MakePlannedPathNode(Path("d"))) @@ -382,14 +380,14 @@ package func perfTestHeadermapProducer(planRequest: BuildPlanRequest, delegate: for configuredTarget in planRequest.buildGraph.allTargets { let context = TargetTaskProducerContext(configuredTarget: configuredTarget, workspaceContext: planRequest.workspaceContext, targetTaskInfo: targetTaskInfo, globalProductPlan: globalProductPlan, delegate: delegate) let headermapProducer = HeadermapTaskProducer(context, phaseStartNodes: [context.createVirtualNode("headermap-start")], phaseEndNode: context.createVirtualNode("headermap-end")) - let tasks = await headermapProducer.generateTasks().map { $0 } + context.takeDeferredProducers().flatMap{ await $0() } + let tasks = await headermapProducer.generateTasks().map { $0 } + context.takeDeferredProducers().flatMap { await $0() } result[configuredTarget.target.name] = tasks } return result } extension Sequence { - fileprivate func flatMap(_ transform: (Self.Element) async throws -> SegmentOfResult) async rethrows -> [SegmentOfResult.Element] where SegmentOfResult : Sequence { + fileprivate func flatMap(_ transform: (Self.Element) async throws -> SegmentOfResult) async rethrows -> [SegmentOfResult.Element] where SegmentOfResult: Sequence { var result: [SegmentOfResult.Element] = [] for element in self { try await result.append(contentsOf: transform(element)) diff --git a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/InfoPlistTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/InfoPlistTaskProducer.swift index 4e2c8a49..b659e93b 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/InfoPlistTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/InfoPlistTaskProducer.swift @@ -34,15 +34,11 @@ final class InfoPlistTaskProducer: PhasedTaskProducer, TaskProducer { } } - // MARK: Product Type Extensions - -private extension ProductTypeSpec -{ +private extension ProductTypeSpec { /// Add the Info.plist tasks required by the product. - func addInfoPlistTasks(_ producer: StandardTaskProducer, _ scope: MacroEvaluationScope, _ tasks: inout [any PlannedTask]) async - { + func addInfoPlistTasks(_ producer: StandardTaskProducer, _ scope: MacroEvaluationScope, _ tasks: inout [any PlannedTask]) async { // FIXME: We cannot yet use inheritance based mechanisms to implement this. switch self { @@ -106,10 +102,8 @@ private extension ProductTypeSpec } } -private extension BundleProductTypeSpec -{ - func archForUIRequiredDeviceCapabilities(_ scope: MacroEvaluationScope) -> String? - { +private extension BundleProductTypeSpec { + func archForUIRequiredDeviceCapabilities(_ scope: MacroEvaluationScope) -> String? { // BUILD: Set appropriate build/Info.plist flags for apps building arm64 only // Xcode does not include UIRequiredDeviceCapabilities key in an extension of tvOS app @@ -136,7 +130,7 @@ private extension BundleProductTypeSpec case "appletvos": let validIdentifiers = Set([ "com.apple.product-type.application", - "com.apple.product-type.tv-app-extension" + "com.apple.product-type.tv-app-extension", ]) return validIdentifiers.contains(identifier) ? requiredArch : nil @@ -145,8 +139,7 @@ private extension BundleProductTypeSpec } } - func addBundleInfoPlistTasks(_ producer: StandardTaskProducer, _ scope: MacroEvaluationScope, _ tasks: inout [any PlannedTask]) async - { + func addBundleInfoPlistTasks(_ producer: StandardTaskProducer, _ scope: MacroEvaluationScope, _ tasks: inout [any PlannedTask]) async { let context = producer.context let buildComponents = scope.evaluate(BuiltinMacros.BUILD_COMPONENTS) @@ -173,7 +166,7 @@ private extension BundleProductTypeSpec let targetBuildDirPkginfoPath: Path? if scope.evaluate(BuiltinMacros.GENERATE_PKGINFO_FILE) && !pkginfoPath.isEmpty && !scope.evaluate(BuiltinMacros.BUILD_COMPONENTS).contains("installLoc") { targetBuildDirPkginfoPath = targetBuildDir.join(pkginfoPath) - } else { + } else { targetBuildDirPkginfoPath = nil } @@ -226,42 +219,34 @@ private extension BundleProductTypeSpec } } - -private extension ToolProductTypeSpec -{ - func addToolInfoPlistTasks(_ producer: StandardTaskProducer, _ scope: MacroEvaluationScope, _ tasks: inout [any PlannedTask]) async - { +private extension ToolProductTypeSpec { + func addToolInfoPlistTasks(_ producer: StandardTaskProducer, _ scope: MacroEvaluationScope, _ tasks: inout [any PlannedTask]) async { let context = producer.context // Only add Info.plist tasks when building. guard scope.evaluate(BuiltinMacros.BUILD_COMPONENTS).contains("build") else { return } // Check if we are creating an Info.plist section for a tool. - if scope.evaluate(BuiltinMacros.CREATE_INFOPLIST_SECTION_IN_BINARY) - { + if scope.evaluate(BuiltinMacros.CREATE_INFOPLIST_SECTION_IN_BINARY) { // Process the Info.plist file, if used. let infoplistFile = scope.effectiveInputInfoPlistPath() let infoplistPath = scope.evaluate(BuiltinMacros.INFOPLIST_PATH) - if !infoplistFile.isEmpty - { + if !infoplistFile.isEmpty { let rawPlistPath = context.makeAbsolute(infoplistFile) // Create the "empty.plist" file, if needed. await addCreateEmptyInfoPlistTaskIfNeeded(producer, scope, &tasks) // Create the processed output. This is done per-variant-per-arch since there may be content in the source which varies based on those factors. And each such slice of the final tool ends up with a separate final Info.plist embedded in it. - for variant in scope.evaluate(BuiltinMacros.BUILD_VARIANTS) - { + for variant in scope.evaluate(BuiltinMacros.BUILD_VARIANTS) { let scope = scope.subscope(binding: BuiltinMacros.variantCondition, to: variant) - for arch in scope.evaluate(BuiltinMacros.ARCHS) - { + for arch in scope.evaluate(BuiltinMacros.ARCHS) { let scope = scope.subscopeBindingArchAndTriple(arch: arch) // Preprocess the file, if requested. let preprocessedPlistPath = await addInfoPlistPreprocessTaskIfNeeded(rawPlistPath, basename: infoplistPath.basename, producer, scope, &tasks) ?? rawPlistPath - await producer.appendGeneratedTasks(&tasks) - { delegate in + await producer.appendGeneratedTasks(&tasks) { delegate in await context.infoPlistSpec.constructInfoPlistTasks(CommandBuildContext(producer: context, scope: scope, inputs: [FileToBuild(context: context, absolutePath: preprocessedPlistPath)], output: scope.evaluate(BuiltinMacros.PROCESSED_INFOPLIST_PATH)), delegate) } } diff --git a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/ModuleMapTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/ModuleMapTaskProducer.swift index 26124e5c..ff9b2cc8 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/ModuleMapTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/ModuleMapTaskProducer.swift @@ -162,7 +162,7 @@ final class ModuleMapTaskProducer: PhasedTaskProducer, TaskProducer { } @discardableResult - override func appendGeneratedTasks( _ tasks: inout [any PlannedTask], options: TaskOrderingOptions? = nil, body: (any TaskGenerationDelegate) async -> Void) async -> (tasks: [any PlannedTask], outputs: [FileToBuild]) { + override func appendGeneratedTasks(_ tasks: inout [any PlannedTask], options: TaskOrderingOptions? = nil, body: (any TaskGenerationDelegate) async -> Void) async -> (tasks: [any PlannedTask], outputs: [FileToBuild]) { return await super.appendGeneratedTasks(&tasks, options: options) { delegate in // await body(ModuleMapPhaseBasedTaskGenerationDelegate(delegate: delegate, copyHeadersCompletionTasks: copyHeadersCompletionTasks)) @@ -202,7 +202,7 @@ final class ModuleMapTaskProducer: PhasedTaskProducer, TaskProducer { let moduleMapExtensionPath = moduleMapTmpPath.appendingFileNameSuffix("-swiftextension") let moduleMapExtensionFile = FileToBuild(context: context, absolutePath: context.makeAbsolute(moduleMapExtensionPath)) await appendGeneratedTasks(&tasks) { delegate in - context.writeFileSpec.constructFileTasks(CommandBuildContext(producer: context, scope: scope, inputs: [], output: moduleMapExtensionPath), delegate, contents: moduleMapExtension, permissions: nil, preparesForIndexing: true, additionalTaskOrderingOptions: [.immediate, .ignorePhaseOrdering]) + context.writeFileSpec.constructFileTasks(CommandBuildContext(producer: context, scope: scope, inputs: [], output: moduleMapExtensionPath), delegate, contents: moduleMapExtension, permissions: nil, preparesForIndexing: true, additionalTaskOrderingOptions: [.immediate, .ignorePhaseOrdering]) if unifdef { let originalModuleMapPath = moduleMapTmpPath.appendingFileNameSuffix("-original") await context.unifdefSpec.constructTasks(CommandBuildContext(producer: context, scope: scope, inputs: [moduleMapSourceFile], output: originalModuleMapPath), delegate) @@ -392,7 +392,7 @@ final class ModuleMapTaskProducer: PhasedTaskProducer, TaskProducer { let moduleName = scope.evaluate(BuiltinMacros.PRODUCT_MODULE_NAME) let interfaceHeaderName = scope.evaluate(BuiltinMacros.SWIFT_OBJC_INTERFACE_HEADER_NAME) - assert(!interfaceHeaderName.isEmpty) // implied by exportsSwiftObjCAPI + assert(!interfaceHeaderName.isEmpty) // implied by exportsSwiftObjCAPI // Swift only module map contents is a top level framework module. Swift contents // for a mixed module map is a submodule of the top level framework module (whose @@ -418,7 +418,7 @@ final class ModuleMapTaskProducer: PhasedTaskProducer, TaskProducer { let moduleName = scope.evaluate(BuiltinMacros.PRODUCT_MODULE_NAME) let interfaceHeaderName = scope.evaluate(BuiltinMacros.SWIFT_OBJC_INTERFACE_HEADER_NAME) - assert(!interfaceHeaderName.isEmpty) // implied by exportsSwiftObjCAPI + assert(!interfaceHeaderName.isEmpty) // implied by exportsSwiftObjCAPI outputStream <<< "\n" outputStream <<< "module \(try moduleName.asModuleIdentifierString()).__Swift {\n" diff --git a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/ModuleVerifierTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/ModuleVerifierTaskProducer.swift index 82628e96..ce612f1e 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/ModuleVerifierTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/ModuleVerifierTaskProducer.swift @@ -129,8 +129,9 @@ final class ModuleVerifierTaskProducer: PhasedTaskProducer, TaskProducer { var fallbackToExternal = false var deferredTasks: [any PlannedTask] = [] await self.appendGeneratedTasks(&deferredTasks, usePhasedOrdering: false, options: .compilation) { delegate in - let targetDiagnostics = ModuleVerifierTargetSet.verifyTargets(targets: targets, targetVariants: targetVariants) - + ModuleVerifierTargetSet.verifyLanguages(languages: languages, standards: languageStandards) + let targetDiagnostics = + ModuleVerifierTargetSet.verifyTargets(targets: targets, targetVariants: targetVariants) + + ModuleVerifierTargetSet.verifyLanguages(languages: languages, standards: languageStandards) var failed = false for diag in targetDiagnostics { delegate.diagnosticsEngine.emit(diag) @@ -167,7 +168,8 @@ final class ModuleVerifierTaskProducer: PhasedTaskProducer, TaskProducer { main: outputPath.join("Test.\(fileExtension)"), header: outputPath.join("Test.framework/Headers/Test.h"), moduleMap: outputPath.join("Test.framework/Modules/module.modulemap"), - dir: outputPath) + dir: outputPath + ) inputsByLanguage[language] = outputs let inputContext = CommandBuildContext(producer: self.context, scope: scope, inputs: inputs, commandOrderingInputs: commandOrderingInputs) await self.context.clangModuleVerifierInputGeneratorSpec.constructTasks(inputContext, delegate, alwaysExecuteTask: alwaysExecuteTask, language: language.rawValue, mainOutput: outputs.main, headerOutput: outputs.header, moduleMapOutput: outputs.moduleMap) @@ -251,7 +253,7 @@ final class ModuleVerifierTaskProducer: PhasedTaskProducer, TaskProducer { otherCFlags += otherVerifierFlags.drop(while: { $0 != "--" }).dropFirst() var otherCPlusPlusFlags = workspaceScope.evaluate(BuiltinMacros.OTHER_CPLUSPLUSFLAGS) otherCPlusPlusFlags += [ - "-fcxx-modules", + "-fcxx-modules" ] otherCPlusPlusFlags += otherCFlags @@ -295,7 +297,7 @@ final class ModuleVerifierTaskProducer: PhasedTaskProducer, TaskProducer { passthrough(BuiltinMacros.CLANG_ENABLE_COMPILE_CACHE) passthrough(BuiltinMacros.COMPILATION_CACHE_CAS_PATH) passthrough(BuiltinMacros.SDK_STAT_CACHE_PATH) - passthrough(BuiltinMacros.INDEX_ENABLE_BUILD_ARENA) // Needed by clang explicit modules + passthrough(BuiltinMacros.INDEX_ENABLE_BUILD_ARENA) // Needed by clang explicit modules passthrough(BuiltinMacros.CLANG_EXPLICIT_MODULES_OUTPUT_PATH) passthrough(BuiltinMacros.CLANG_USE_RESPONSE_FILE) table.push(BuiltinMacros.CLANG_MODULE_LSV, literal: enableLSV) @@ -311,13 +313,14 @@ final class ModuleVerifierTaskProducer: PhasedTaskProducer, TaskProducer { let buildScope = MacroEvaluationScope(table: table) let buildInputs = [ - FileToBuild(context: self.context, absolutePath: inputs.main), - ] - let buildOrderingInputs = commandOrderingInputs + [ - delegate.createNode(fileNameMapPath), - delegate.createNode(inputs.header), - delegate.createNode(inputs.moduleMap), + FileToBuild(context: self.context, absolutePath: inputs.main) ] + let buildOrderingInputs = + commandOrderingInputs + [ + delegate.createNode(fileNameMapPath), + delegate.createNode(inputs.header), + delegate.createNode(inputs.moduleMap), + ] let buildContext = CommandBuildContext(producer: self.context, scope: buildScope, inputs: buildInputs, commandOrderingInputs: buildOrderingInputs) await self.context.clangModuleVerifierSpec.constructTasks(buildContext, delegate) @@ -419,9 +422,11 @@ final class ModuleVerifierTaskProducer: PhasedTaskProducer, TaskProducer { let specRegistry = context.workspaceContext.core.specRegistry let headerFileTypes = specRegistry.headerFileTypes - let moduleAffectingCombinations = [(BuiltinMacros.PUBLIC_HEADERS_FOLDER_PATH, headerFileTypes), - (BuiltinMacros.PRIVATE_HEADERS_FOLDER_PATH, headerFileTypes), - (BuiltinMacros.MODULES_FOLDER_PATH, [specRegistry.modulemapFileType])] + let moduleAffectingCombinations = [ + (BuiltinMacros.PUBLIC_HEADERS_FOLDER_PATH, headerFileTypes), + (BuiltinMacros.PRIVATE_HEADERS_FOLDER_PATH, headerFileTypes), + (BuiltinMacros.MODULES_FOLDER_PATH, [specRegistry.modulemapFileType]), + ] for (macro, fileTypes) in moduleAffectingCombinations { for rootDirectory in rootDirectories { addPaths(for: macro, joinedTo: rootDirectory, fileTypes: fileTypes) diff --git a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/ProductPostprocessingTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/ProductPostprocessingTaskProducer.swift index 8fbddd3c..a95b2bf3 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/ProductPostprocessingTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/ProductPostprocessingTaskProducer.swift @@ -163,7 +163,6 @@ final class ProductPostprocessingTaskProducer: PhasedTaskProducer, TaskProducer return tasks } - // MARK: Copy aside /// Creates a task to copy aside the symboled product to the SYMROOT during an install build. @@ -191,13 +190,11 @@ final class ProductPostprocessingTaskProducer: PhasedTaskProducer, TaskProducer // Since a build might have multiple targets which generate the same product name, we want to avoid collisions when computing the destination here. So for installed products we place them at their INSTALL_PATH relative to the SYMROOT, and for uninstalled products we place them at $(SYMROOT)/UninstalledProducts/$(PROJECT_NAME)/$(TARGET_NAME). if scope.evaluate(BuiltinMacros.SKIP_INSTALL) || scope.evaluate(BuiltinMacros.INSTALL_PATH).isEmpty { output = buildDir.join("UninstalledProducts").join(context.settings.project?.name).join(context.settings.target?.name).join(fullProductName) - } - else { + } else { let installPath = scope.evaluate(Static { BuiltinMacros.namespace.parseString("$(INSTALL_PATH)$(TARGET_BUILD_SUBPATH)") }) - output = buildDir.join(installPath, preserveRoot:true).join(fullProductName) + output = buildDir.join(installPath, preserveRoot: true).join(fullProductName) } - } - else { + } else { // Right now we use the old, flat layout until we can enable the hierarchical layout by default. output = buildDir.join(fullProductName) } @@ -207,7 +204,6 @@ final class ProductPostprocessingTaskProducer: PhasedTaskProducer, TaskProducer } } - // MARK: Stripping private func addStripSymbolsTasks(_ scope: MacroEvaluationScope, _ tasks: inout [any PlannedTask]) { @@ -372,7 +368,6 @@ final class ProductPostprocessingTaskProducer: PhasedTaskProducer, TaskProducer } } - // MARK: Code signing /// Returns a list of files to sign for the current product. @@ -560,7 +555,6 @@ final class ProductPostprocessingTaskProducer: PhasedTaskProducer, TaskProducer } } - // MARK: Product Type Extensions extension ProductTypeSpec { @@ -724,7 +718,7 @@ private extension ProductTypeSpec { // FIXME: headerDestPaths should only be used for framework targets when determining outputPath // until rdar://81762676 (Dylib targets inconsistently writes headers to BUILD_PRODUCTS_DIR // based install vs normal build) has been resolved. - let outputPath : Path + let outputPath: Path let tapiVisibility: TAPIFileList.HeaderVisibility switch visibility { case .public?: @@ -774,17 +768,24 @@ private extension ProductTypeSpec { postProcessingProducer.uniqueAuxiliaryFilePaths.insert(jsonPath) } - return jsonPath } } /// Adds the InstallAPI tasks that are common between framework and dylib targets. /// This function will determine if Swift and TAPI-based InstallAPI actions took place during the build. -func addCommonInstallAPITasks(_ producer: PhasedTaskProducer, _ scope: MacroEvaluationScope, inputs: [FileToBuild], - headerDependencyInputs: [any PlannedNode], - tapiOutputNode: PlannedPathNode, tapiOrderingNode: PlannedVirtualNode, phaseStartNodes: [any PlannedNode], - phaseEndTask: any PlannedTask, jsonPath: Path?, destination: InstallAPIDestination) async -> [any PlannedTask] { +func addCommonInstallAPITasks( + _ producer: PhasedTaskProducer, + _ scope: MacroEvaluationScope, + inputs: [FileToBuild], + headerDependencyInputs: [any PlannedNode], + tapiOutputNode: PlannedPathNode, + tapiOrderingNode: PlannedVirtualNode, + phaseStartNodes: [any PlannedNode], + phaseEndTask: any PlannedTask, + jsonPath: Path?, + destination: InstallAPIDestination +) async -> [any PlannedTask] { let buildComponents = scope.evaluate(BuiltinMacros.BUILD_COMPONENTS) var dependencyInputs = headerDependencyInputs // Only add dSYM dependency iff this the task is installAPI verification. @@ -792,7 +793,7 @@ func addCommonInstallAPITasks(_ producer: PhasedTaskProducer, _ scope: MacroEval if buildComponents.contains("build") && !(destination == .eagerLinkingTBDDir) && tapiReadDSYM { let dsymBundle = scope.evaluate(BuiltinMacros.DWARF_DSYM_FOLDER_PATH) .join(scope.evaluate(BuiltinMacros.DWARF_DSYM_FILE_NAME)) - dependencyInputs.append(producer.context.createDirectoryTreeNode(dsymBundle, excluding:[""])) + dependencyInputs.append(producer.context.createDirectoryTreeNode(dsymBundle, excluding: [""])) } let variant = scope.evaluate(BuiltinMacros.CURRENT_VARIANT) @@ -861,7 +862,8 @@ private extension FrameworkProductTypeSpec { if !scope.evaluate(BuiltinMacros.SUPPORTS_TEXT_BASED_API) && !producer.targetContext.supportsEagerLinking(scope: scope) { // If the target has no installed headers, it is not an error for it not to support InstallAPI. guard let targetInfo = targetHeaderInfo, - !targetInfo.publicHeaders.isEmpty || !targetInfo.privateHeaders.isEmpty else { + !targetInfo.publicHeaders.isEmpty || !targetInfo.privateHeaders.isEmpty + else { return } @@ -898,7 +900,7 @@ private extension FrameworkProductTypeSpec { let tapiInputNode = producer.context.createNode(scope.evaluate(BuiltinMacros.TARGET_BUILD_DIR).join(scope.evaluate(BuiltinMacros.WRAPPER_NAME))) guard let jsonPath = await addFileListInstallAPITasks(targetHeaderInfo, true, producer, tapiInfo: tapiInfo, scope, &tasks, &tapiInputNodes) else { - return // we've already emitted an error + return // we've already emitted an error } // NOTE: These must be captured here; they are mutable and used to define the task order gating. @@ -908,8 +910,18 @@ private extension FrameworkProductTypeSpec { producer.context.addDeferredProducer { let inputs = [FileToBuild(context: producer.context, absolutePath: tapiInputNode.path)] - return await addCommonInstallAPITasks(producer, scope, inputs: inputs, headerDependencyInputs: tapiInputNodes, tapiOutputNode: tapiOutputNode, tapiOrderingNode: tapiOrderingNode, - phaseStartNodes: phaseStartNodes, phaseEndTask: phaseEndTask, jsonPath: jsonPath, destination: destination) + return await addCommonInstallAPITasks( + producer, + scope, + inputs: inputs, + headerDependencyInputs: tapiInputNodes, + tapiOutputNode: tapiOutputNode, + tapiOrderingNode: tapiOrderingNode, + phaseStartNodes: phaseStartNodes, + phaseEndTask: phaseEndTask, + jsonPath: jsonPath, + destination: destination + ) } } @@ -982,7 +994,8 @@ private extension LibraryProductTypeSpec { if !scope.evaluate(BuiltinMacros.SUPPORTS_TEXT_BASED_API) && !producer.targetContext.supportsEagerLinking(scope: scope) { // If the target has no installed headers, it is not an error for it not to support InstallAPI. guard let targetHeaderInfo = targetHeaderInfo, - !targetHeaderInfo.publicHeaders.isEmpty || !targetHeaderInfo.privateHeaders.isEmpty else { + !targetHeaderInfo.publicHeaders.isEmpty || !targetHeaderInfo.privateHeaders.isEmpty + else { return } @@ -1015,7 +1028,7 @@ private extension LibraryProductTypeSpec { var tapiInputNodes = [any PlannedNode]() guard let jsonPath = await addFileListInstallAPITasks(targetHeaderInfo, false, producer, tapiInfo: tapiInfo, scope, &tasks, &tapiInputNodes) else { - return // we've already emitted an error + return // we've already emitted an error } // NOTE: These must be captured here; they are mutable and used to define the task order gating. @@ -1023,8 +1036,18 @@ private extension LibraryProductTypeSpec { let phaseEndTask = producer.phaseEndTask producer.context.addDeferredProducer { - return await addCommonInstallAPITasks(producer, scope, inputs: [], headerDependencyInputs: tapiInputNodes, tapiOutputNode: tapiOutputNode, tapiOrderingNode: tapiOrderingNode, - phaseStartNodes: phaseStartNodes, phaseEndTask: phaseEndTask, jsonPath: jsonPath, destination: destination) + return await addCommonInstallAPITasks( + producer, + scope, + inputs: [], + headerDependencyInputs: tapiInputNodes, + tapiOutputNode: tapiOutputNode, + tapiOrderingNode: tapiOrderingNode, + phaseStartNodes: phaseStartNodes, + phaseEndTask: phaseEndTask, + jsonPath: jsonPath, + destination: destination + ) } } diff --git a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/ProductStructureTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/ProductStructureTaskProducer.swift index 633ae43f..d5edc113 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/ProductStructureTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/ProductStructureTaskProducer.swift @@ -19,8 +19,7 @@ final class ProductStructureTaskProducer: PhasedTaskProducer, TaskProducer { return .immediate } - func generateTasks() async -> [any PlannedTask] - { + func generateTasks() async -> [any PlannedTask] { var tasks = [any PlannedTask]() let settings = context.settings let scope = settings.globalScope @@ -31,8 +30,7 @@ final class ProductStructureTaskProducer: PhasedTaskProducer, TaskProducer { // Generate tasks to create directories defining the product structure. let targetBuildDir = self.context.settings.globalScope.evaluate(BuiltinMacros.TARGET_BUILD_DIR) var outputPaths = Set() - for directory in PackageTypeSpec.productStructureDirectories - { + for directory in PackageTypeSpec.productStructureDirectories { let buildSetting = directory.buildSetting let subDir = context.settings.globalScope.evaluate(buildSetting) @@ -67,9 +65,9 @@ final class ProductStructureTaskProducer: PhasedTaskProducer, TaskProducer { if !scope.evaluate(BuiltinMacros.BUILD_COMPONENTS).contains("installLoc") { // Generate tasks to create symbolic links in the product structure. - for descriptor in productType.productStructureSymlinkDescriptors(scope) - { - let destinationPath = descriptor.toPath.isAbsolute + for descriptor in productType.productStructureSymlinkDescriptors(scope) { + let destinationPath = + descriptor.toPath.isAbsolute ? descriptor.toPath : descriptor.location.dirname.join(descriptor.effectiveToPath ?? descriptor.toPath).normalize() @@ -105,15 +103,11 @@ final class ProductStructureTaskProducer: PhasedTaskProducer, TaskProducer { } - // MARK: Product Type Extensions - -private extension ProductTypeSpec -{ +private extension ProductTypeSpec { /// Create the tasks to make the symlinks to the products in the `BUILT_PRODUCTS_DIR`, if appropriate. - func addBuiltProductsDirSymlinkTasks(_ producer: StandardTaskProducer, _ settings: Settings, _ tasks: inout [any PlannedTask]) async - { + func addBuiltProductsDirSymlinkTasks(_ producer: StandardTaskProducer, _ settings: Settings, _ tasks: inout [any PlannedTask]) async { let scope = settings.globalScope // Only create symlink tasks when using deployment locations. @@ -131,42 +125,33 @@ private extension ProductTypeSpec if let asXCTestBundle = self as? XCTestBundleProductTypeSpec { await asXCTestBundle.addXCTestBundleBuiltProductsDirSymlinkTasks(producer, scope, &tasks) } - } - else if let asDynamicLibrary = self as? DynamicLibraryProductTypeSpec { + } else if let asDynamicLibrary = self as? DynamicLibraryProductTypeSpec { await asDynamicLibrary.addDynamicLibraryBuiltProductsDirSymlinkTasks(producer, settings, &tasks) - } - else if let asStandalone = self as? StandaloneExecutableProductTypeSpec { + } else if let asStandalone = self as? StandaloneExecutableProductTypeSpec { await asStandalone.addStandaloneExecutableBuiltProductsDirSymlinkTasks(producer, scope, &tasks) - } - else { + } else { fatalError("unknown product type: \(self)") } } } -private extension BundleProductTypeSpec -{ +private extension BundleProductTypeSpec { /// Create the task to make the symlink to the product in the `BUILT_PRODUCTS_DIR`, if appropriate. - func addBundleBuiltProductsDirSymlinkTasks(_ producer: StandardTaskProducer, _ scope: MacroEvaluationScope, _ tasks: inout [any PlannedTask]) async - { + func addBundleBuiltProductsDirSymlinkTasks(_ producer: StandardTaskProducer, _ scope: MacroEvaluationScope, _ tasks: inout [any PlannedTask]) async { let context = producer.context // FIXME: This is in essence the same logic as for standalone products except for using WRAPPER_NAME, just diverged because the variants are top-level for them. We should reconcile, maybe by introducing a generic notion for "why" this is different. let targetWrapper = scope.evaluate(BuiltinMacros.TARGET_BUILD_DIR).join(scope.evaluate(BuiltinMacros.WRAPPER_NAME)) let builtWrapper = scope.evaluate(BuiltinMacros.BUILT_PRODUCTS_DIR).join(scope.evaluate(BuiltinMacros.WRAPPER_NAME)) - await producer.appendGeneratedTasks(&tasks) - { delegate in + await producer.appendGeneratedTasks(&tasks) { delegate in context.symlinkSpec.constructSymlinkTask(CommandBuildContext(producer: context, scope: scope, inputs: [], output: builtWrapper, preparesForIndexing: true), delegate, toPath: targetWrapper, makeRelative: true, repairViaOwnershipAnalysis: true) } } } - -private extension DynamicLibraryProductTypeSpec -{ +private extension DynamicLibraryProductTypeSpec { /// Create the tasks to make the symlink(s) to the dynamic library(s) in the `BUILT_PRODUCTS_DIR`, if appropriate. There will be one such symlink per build variant. - func addDynamicLibraryBuiltProductsDirSymlinkTasks(_ producer: StandardTaskProducer, _ settings: Settings, _ tasks: inout [any PlannedTask]) async - { + func addDynamicLibraryBuiltProductsDirSymlinkTasks(_ producer: StandardTaskProducer, _ settings: Settings, _ tasks: inout [any PlannedTask]) async { let scope = settings.globalScope // Only add symlink tasks when building API or just building. @@ -175,8 +160,9 @@ private extension DynamicLibraryProductTypeSpec let shouldUseInstallAPI = ProductPostprocessingTaskProducer.shouldUseInstallAPI(scope, settings) // Condensed from LibraryProductTypeSpec.addDynamicLibraryInstallAPITasks(:::::). - let willProduceTBD = (buildComponents.contains("api") || (addDynamicLibrarySymlinks && scope.evaluate(BuiltinMacros.TAPI_ENABLE_VERIFICATION_MODE))) - && (scope.evaluate(BuiltinMacros.SUPPORTS_TEXT_BASED_API) || (((producer as? PhasedTaskProducer)?.targetContext.supportsEagerLinking(scope: scope)) ?? false)) + let willProduceTBD = + (buildComponents.contains("api") || (addDynamicLibrarySymlinks && scope.evaluate(BuiltinMacros.TAPI_ENABLE_VERIFICATION_MODE))) + && (scope.evaluate(BuiltinMacros.SUPPORTS_TEXT_BASED_API) || (((producer as? PhasedTaskProducer)?.targetContext.supportsEagerLinking(scope: scope)) ?? false)) // Only make a symlink for targets that use the default extension/suffix. Some projects have multiple dynamic libraries // with the same product name but different executable extensions. They all end up with the same TAPI_OUTPUT_PATH, and // there's no good way to resolve that, so only make symlinks for tbds that go with dylibs. @@ -191,22 +177,18 @@ private extension DynamicLibraryProductTypeSpec } // Add a symlink per-variant. - for variant in scope.evaluate(BuiltinMacros.BUILD_VARIANTS) - { - if addTBDSymlinks - { + for variant in scope.evaluate(BuiltinMacros.BUILD_VARIANTS) { + if addTBDSymlinks { await addDynamicLibraryTBDBuiltProductsDirSymlinkTasks(producer, scope, variant, &tasks) } - if addDynamicLibrarySymlinks - { + if addDynamicLibrarySymlinks { await addStandaloneExecutableBuiltProductsDirSymlinkTasks(producer, scope, variant, &tasks) } } } /// Create the task to make the symlink to the TBD in the `BUILT_PRODUCTS_DIR` for a single build variant, if appropriate. - func addDynamicLibraryTBDBuiltProductsDirSymlinkTasks(_ producer: StandardTaskProducer, _ scope: MacroEvaluationScope, _ variant: String, _ tasks: inout [any PlannedTask]) async - { + func addDynamicLibraryTBDBuiltProductsDirSymlinkTasks(_ producer: StandardTaskProducer, _ scope: MacroEvaluationScope, _ variant: String, _ tasks: inout [any PlannedTask]) async { // Enter the per-variant scope. let scope = scope.subscope(binding: BuiltinMacros.variantCondition, to: variant) @@ -217,18 +199,15 @@ private extension DynamicLibraryProductTypeSpec let relativeTargetWrapper = targetWrapper.relativeSubpath(from: scope.evaluate(BuiltinMacros.TARGET_BUILD_DIR)) let builtWrapper = scope.evaluate(BuiltinMacros.BUILT_PRODUCTS_DIR).join(relativeTargetWrapper) - await producer.appendGeneratedTasks(&tasks) - { delegate in + await producer.appendGeneratedTasks(&tasks) { delegate in context.symlinkSpec.constructSymlinkTask(CommandBuildContext(producer: context, scope: scope, inputs: [], output: builtWrapper, preparesForIndexing: true), delegate, toPath: targetWrapper, makeRelative: true, repairViaOwnershipAnalysis: false) } } } -private extension StandaloneExecutableProductTypeSpec -{ +private extension StandaloneExecutableProductTypeSpec { /// Create the tasks to make the symlink(s) to the product(s) in the `BUILT_PRODUCTS_DIR`, if appropriate. There will be one such symlink per build variant. - func addStandaloneExecutableBuiltProductsDirSymlinkTasks(_ producer: StandardTaskProducer, _ scope: MacroEvaluationScope, _ tasks: inout [any PlannedTask]) async - { + func addStandaloneExecutableBuiltProductsDirSymlinkTasks(_ producer: StandardTaskProducer, _ scope: MacroEvaluationScope, _ tasks: inout [any PlannedTask]) async { // Only add symlink tasks when building. guard scope.evaluate(BuiltinMacros.BUILD_COMPONENTS).contains("build") else { return } @@ -238,15 +217,13 @@ private extension StandaloneExecutableProductTypeSpec } // Add a symlink per-variant. - for variant in scope.evaluate(BuiltinMacros.BUILD_VARIANTS) - { + for variant in scope.evaluate(BuiltinMacros.BUILD_VARIANTS) { await addStandaloneExecutableBuiltProductsDirSymlinkTasks(producer, scope, variant, &tasks) } } /// Create the task to make the symlink to the product in the `BUILT_PRODUCTS_DIR` for a single build variant, if appropriate. - func addStandaloneExecutableBuiltProductsDirSymlinkTasks(_ producer: StandardTaskProducer, _ scope: MacroEvaluationScope, _ variant: String, _ tasks: inout [any PlannedTask]) async - { + func addStandaloneExecutableBuiltProductsDirSymlinkTasks(_ producer: StandardTaskProducer, _ scope: MacroEvaluationScope, _ variant: String, _ tasks: inout [any PlannedTask]) async { // Enter the per-variant scope. let scope = scope.subscope(binding: BuiltinMacros.variantCondition, to: variant) @@ -255,15 +232,13 @@ private extension StandaloneExecutableProductTypeSpec let targetWrapper = scope.evaluate(BuiltinMacros.TARGET_BUILD_DIR).join(scope.evaluate(BuiltinMacros.EXECUTABLE_PATH)) let builtWrapper = scope.evaluate(BuiltinMacros.BUILT_PRODUCTS_DIR).join(scope.evaluate(BuiltinMacros.EXECUTABLE_PATH)) - await producer.appendGeneratedTasks(&tasks) - { delegate in + await producer.appendGeneratedTasks(&tasks) { delegate in context.symlinkSpec.constructSymlinkTask(CommandBuildContext(producer: context, scope: scope, inputs: [], output: builtWrapper, preparesForIndexing: true), delegate, toPath: targetWrapper, makeRelative: true, repairViaOwnershipAnalysis: false) } } } -private extension XCTestBundleProductTypeSpec -{ +private extension XCTestBundleProductTypeSpec { func addXCTestBundleBuiltProductsDirSymlinkTasks(_ producer: StandardTaskProducer, _ scope: MacroEvaluationScope, _ tasks: inout [any PlannedTask]) async { let buildComponents = scope.evaluate(BuiltinMacros.BUILD_COMPONENTS) @@ -276,8 +251,7 @@ private extension XCTestBundleProductTypeSpec let targetWrapper = scope.unmodifiedTargetBuildDir.join(scope.evaluate(BuiltinMacros.XCTRUNNER_PRODUCT_NAME)) let builtWrapper = scope.evaluate(BuiltinMacros.BUILT_PRODUCTS_DIR).join(scope.evaluate(BuiltinMacros.XCTRUNNER_PRODUCT_NAME)) - await producer.appendGeneratedTasks(&tasks) - { delegate in + await producer.appendGeneratedTasks(&tasks) { delegate in context.symlinkSpec.constructSymlinkTask(CommandBuildContext(producer: context, scope: scope, inputs: [], output: builtWrapper, preparesForIndexing: true), delegate, toPath: targetWrapper, makeRelative: true, repairViaOwnershipAnalysis: true) } } diff --git a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/SDKStatCacheTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/SDKStatCacheTaskProducer.swift index 887b8419..0398d80d 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/SDKStatCacheTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/SDKStatCacheTaskProducer.swift @@ -31,16 +31,23 @@ final class SDKStatCacheTaskProducer: StandardTaskProducer, TaskProducer { var toolPath: Path } - let cacheDescriptors = await Dictionary(targetContexts.asyncFilter { - await $0.shouldUseSDKStatCache() - }.map { - (CacheDescriptor(sdkRoot: $0.settings.globalScope.evaluate(BuiltinMacros.SDKROOT).str, - cachePath: Path($0.settings.globalScope.evaluate(BuiltinMacros.SDK_STAT_CACHE_PATH)).normalize(), - toolPath: $0.executableSearchPaths.lookup(Path("clang-stat-cache"))?.normalize() ?? Path("clang-stat-cache")), - $0.settings.globalScope.evaluate(BuiltinMacros.SDK_STAT_CACHE_VERBOSE_LOGGING)) - }, uniquingKeysWith: { - $0 || $1 - }) + let cacheDescriptors = await Dictionary( + targetContexts.asyncFilter { + await $0.shouldUseSDKStatCache() + }.map { + ( + CacheDescriptor( + sdkRoot: $0.settings.globalScope.evaluate(BuiltinMacros.SDKROOT).str, + cachePath: Path($0.settings.globalScope.evaluate(BuiltinMacros.SDK_STAT_CACHE_PATH)).normalize(), + toolPath: $0.executableSearchPaths.lookup(Path("clang-stat-cache"))?.normalize() ?? Path("clang-stat-cache") + ), + $0.settings.globalScope.evaluate(BuiltinMacros.SDK_STAT_CACHE_VERBOSE_LOGGING) + ) + }, + uniquingKeysWith: { + $0 || $1 + } + ) await appendGeneratedTasks(&tasks) { delegate in for (cacheDescriptor, verbose) in cacheDescriptors { diff --git a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/SanitizerTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/SanitizerTaskProducer.swift index 0783e5de..8ee12993 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/SanitizerTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/SanitizerTaskProducer.swift @@ -121,7 +121,7 @@ final class SanitizerTaskProducer: PhasedTaskProducer, TaskProducer { guard context.fs.exists(libraryPath) else { if sanitizer.errorForMissingLibrary(on: context.settings.platform) { - context.error("Unable to copy \(sanitizerName) Sanitizer library: Could not determine where it lives." ) + context.error("Unable to copy \(sanitizerName) Sanitizer library: Could not determine where it lives.") } return } diff --git a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/SwiftFrameworkABICheckerTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/SwiftFrameworkABICheckerTaskProducer.swift index 6b52df82..82e6fc0e 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/SwiftFrameworkABICheckerTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/SwiftFrameworkABICheckerTaskProducer.swift @@ -20,8 +20,7 @@ fileprivate func supportSwiftABIChecking(_ context: TaskProducerContext) -> Bool // swift-api-digester is run only when the "build" component is present. guard scope.evaluate(BuiltinMacros.BUILD_COMPONENTS).contains("build") else { return false } - guard scope.evaluate(BuiltinMacros.SWIFT_API_DIGESTER_MODE) == .api || - (scope.evaluate(BuiltinMacros.SWIFT_EMIT_MODULE_INTERFACE) && scope.evaluate(BuiltinMacros.SWIFT_ENABLE_LIBRARY_EVOLUTION)) else { + guard scope.evaluate(BuiltinMacros.SWIFT_API_DIGESTER_MODE) == .api || (scope.evaluate(BuiltinMacros.SWIFT_EMIT_MODULE_INTERFACE) && scope.evaluate(BuiltinMacros.SWIFT_ENABLE_LIBRARY_EVOLUTION)) else { // BUILD_LIBRARY_FOR_DISTRIBUTION is the option clients should use (it's also what is exposed in the // Build Settings editor) and is what SWIFT_EMIT_MODULE_INTERFACE uses by default, but they are // configurable independently. @@ -60,8 +59,7 @@ final class SwiftFrameworkABICheckerTaskProducer: PhasedTaskProducer, TaskProduc return .immediate } - func generateTasks() async -> [any PlannedTask] - { + func generateTasks() async -> [any PlannedTask] { var tasks = [any PlannedTask]() let scope = context.settings.globalScope // If running this tool is disabled via build setting, then we can abort this task provider. diff --git a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/SwiftStandardLibrariesTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/SwiftStandardLibrariesTaskProducer.swift index 1735c324..d9303f9d 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/SwiftStandardLibrariesTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/SwiftStandardLibrariesTaskProducer.swift @@ -19,8 +19,7 @@ final class SwiftStandardLibrariesTaskProducer: PhasedTaskProducer, TaskProducer return [.immediate, .unsignedProductRequirement] } - func generateTasks() async -> [any PlannedTask] - { + func generateTasks() async -> [any PlannedTask] { var tasks = [any PlannedTask]() let scope = context.settings.globalScope @@ -41,13 +40,11 @@ final class SwiftStandardLibrariesTaskProducer: PhasedTaskProducer, TaskProducer let buildingAnySwiftSourceFiles = (context.configuredTarget?.target as? BuildPhaseTarget)?.sourcesBuildPhase?.containsSwiftSources(context.workspaceContext.workspace, context, scope, context.filePathResolver) ?? false // Determine whether we want to embed swift libraries. - var shouldEmbedSwiftLibraries = (buildingAnySwiftSourceFiles && productType.supportsEmbeddingSwiftStandardLibraries(producer: context)) + var shouldEmbedSwiftLibraries = (buildingAnySwiftSourceFiles && productType.supportsEmbeddingSwiftStandardLibraries(producer: context)) // If ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES then we will override our earlier reasoning if the product is a wrapper. - if !shouldEmbedSwiftLibraries && scope.evaluate(BuiltinMacros.ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES) - { + if !shouldEmbedSwiftLibraries && scope.evaluate(BuiltinMacros.ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES) { // If the product is not a wrapper, then we emit a warning that we won't run the tool, and return. - guard productType.isWrapper else - { + guard productType.isWrapper else { context.warning("Not running swift-stdlib-tool: ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES is enabled, but the product type '\(productType.identifier)' is not a wrapper type.") return [] } @@ -55,8 +52,7 @@ final class SwiftStandardLibrariesTaskProducer: PhasedTaskProducer, TaskProducer } // Run the swift-stdlib-tool if we've determined that we should do so. - if shouldEmbedSwiftLibraries - { + if shouldEmbedSwiftLibraries { // Cache fo evaluated build settings we'll need multiple times. let targetBuildDir = scope.evaluate(BuiltinMacros.TARGET_BUILD_DIR) @@ -71,22 +67,22 @@ final class SwiftStandardLibrariesTaskProducer: PhasedTaskProducer, TaskProducer BuiltinMacros.SYSTEM_EXTENSIONS_FOLDER_PATH, BuiltinMacros.EXTENSIONS_FOLDER_PATH, ] - foldersToScan.append(contentsOf: embeddedContentDirectoryMacros.compactMap { macro in - guard let subpath = scope.evaluate(macro).nilIfEmpty else { - return nil + foldersToScan.append( + contentsOf: embeddedContentDirectoryMacros.compactMap { macro in + guard let subpath = scope.evaluate(macro).nilIfEmpty else { + return nil + } + return targetBuildDir.join(subpath).str } - return targetBuildDir.join(subpath).str - }) + ) // Add explicit paths to any linked frameworks, even if they are not copied in to the product. // // This allows EMBEDDED_CONTENT_CONTAINS_SWIFT to be used for things like Objective-C unit test bundles which are testing Swift frameworks. - if let frameworksBuildPhase = (context.configuredTarget!.target as? BuildPhaseTarget)?.frameworksBuildPhase - { + if let frameworksBuildPhase = (context.configuredTarget!.target as? BuildPhaseTarget)?.frameworksBuildPhase { for buildFile in frameworksBuildPhase.buildFiles where currentPlatformFilter.matches(buildFile.platformFilters) { guard let (_, refPath, fileType) = try? context.resolveBuildFileReference(buildFile) else { continue } - if refPath.isAbsolute && fileType.conformsTo(context.getSpec("wrapper.framework") as! FileTypeSpec) - { + if refPath.isAbsolute && fileType.conformsTo(context.getSpec("wrapper.framework") as! FileTypeSpec) { foldersToScan.append(refPath.str) } } @@ -107,8 +103,8 @@ final class SwiftStandardLibrariesTaskProducer: PhasedTaskProducer, TaskProducer let supportsSpanNatively = context.platform?.supportsSwiftSpanNatively(scope, forceNextMajorVersion: false, considerTargetDeviceOSVersion: true) let backDeploySwiftSpan = supportsSpanNatively != nil && supportsSpanNatively != true - let cbc = CommandBuildContext(producer: context, scope: scope, inputs: [ input ]) - let foldersToScanExpr: MacroStringListExpression? = foldersToScan.count > 0 ? scope.namespace.parseLiteralStringList(foldersToScan): nil + let cbc = CommandBuildContext(producer: context, scope: scope, inputs: [input]) + let foldersToScanExpr: MacroStringListExpression? = foldersToScan.count > 0 ? scope.namespace.parseLiteralStringList(foldersToScan) : nil await appendGeneratedTasks(&tasks) { delegate in await context.swiftStdlibToolSpec.constructSwiftStdLibraryToolTask(cbc, delegate, foldersToScan: foldersToScanExpr, filterForSwiftOS: filterForSwiftOS, backDeploySwiftConcurrency: backDeploySwiftConcurrency, backDeploySwiftSpan: backDeploySwiftSpan) } diff --git a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/TAPISymbolExtractorTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/TAPISymbolExtractorTaskProducer.swift index d823806e..c604e1f5 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/TAPISymbolExtractorTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/TAPISymbolExtractorTaskProducer.swift @@ -177,12 +177,13 @@ final class TAPISymbolExtractorTaskProducer: PhasedTaskProducer, TaskProducer { // If this header is among the target's header build files we check if it has any platform filters. // If this header is generated then it won't have any platform filters. - let platformFilters = headerBuildFiles.filter({ - if let resolvedFile = try? producer.context.resolveBuildFileReference($0) { - return resolvedFile.absolutePath == path - } - return false - }).only?.platformFilters ?? [] + let platformFilters = + headerBuildFiles.filter({ + if let resolvedFile = try? producer.context.resolveBuildFileReference($0) { + return resolvedFile.absolutePath == path + } + return false + }).only?.platformFilters ?? [] // Skip the header if it is excluded or filtered out. guard !buildFilesContext.isExcluded(path, filters: platformFilters) else { @@ -192,7 +193,7 @@ final class TAPISymbolExtractorTaskProducer: PhasedTaskProducer, TaskProducer { // FIXME: headerDestPaths should only be used for framework targets when determining outputPath // until rdar://81762676 (Dylib targets inconsistently writes headers to BUILD_PRODUCTS_DIR // based install vs normal build) has been resolved. - let outputPath : Path + let outputPath: Path switch visibility { case .public: outputPath = isFramework ? headerDestPaths.publicPath.join(path.basename) : TargetHeaderInfo.outputPath(for: path, visibility: .public, scope: scope) @@ -211,7 +212,7 @@ final class TAPISymbolExtractorTaskProducer: PhasedTaskProducer, TaskProducer { // without the truncation for the command ordering input nodes. let inputNodePath: Path if isFramework { - let outputPathVisibility: HeaderVisibility = visibility == .public ? .public : .private // convert between two different visibility enums + let outputPathVisibility: HeaderVisibility = visibility == .public ? .public : .private // convert between two different visibility enums inputNodePath = TargetHeaderInfo.outputPath(for: path, visibility: outputPathVisibility, scope: scope) } else { inputNodePath = outputPath @@ -239,7 +240,7 @@ final class TAPISymbolExtractorTaskProducer: PhasedTaskProducer, TaskProducer { } } } - if headerVisibilityToProcess.contains(nil) { // project visible headers + if headerVisibilityToProcess.contains(nil) { // project visible headers for fileRef in documentationHeaderInfo.projectHeaders { if let header = computeProductHeader(for: fileRef, isFramework: isFramework, visibility: .project, inputNodes: &inputNodes) { headers.append(header) @@ -265,7 +266,7 @@ final class TAPISymbolExtractorTaskProducer: PhasedTaskProducer, TaskProducer { // If the build system generates a module map, the umbrella header can be accessed via the module info. If there's an existing // module map file we need to parse the potential umbrella header information from that file instead. if let umbrellaHeaderName = findUmbrellaHeaderFromGeneratedModuleMap(moduleInfo) ?? findUmbrellaHeaderFromExistingModuleMap(moduleInfo), - let umbrellaHeaderIndex = headerList.firstIndex(where: { $0.visibility == .public && $0.path.basename == umbrellaHeaderName }) + let umbrellaHeaderIndex = headerList.firstIndex(where: { $0.visibility == .public && $0.path.basename == umbrellaHeaderName }) { let umbrellaHeader = headerList.remove(at: umbrellaHeaderIndex) headerList.insert(umbrellaHeader, at: 0) diff --git a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/TargetOrderTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/TargetOrderTaskProducer.swift index eba021d2..ecd15820 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/TargetOrderTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/TargetOrderTaskProducer.swift @@ -207,11 +207,15 @@ final class TargetOrderTaskProducer: StandardTaskProducer, TaskProducer { inputs.append(contentsOf: context.globalProductPlan.xcframeworkContext.outputFiles(for: configuredTarget).map(context.createNode)) } - return context.createGateTask(inputs, output: output, taskConfiguration: { - $0.forTarget = context.configuredTarget - $0.makeGate() - $0.targetDependencies = resolvedTargetDependencies - }) + return context.createGateTask( + inputs, + output: output, + taskConfiguration: { + $0.forTarget = context.configuredTarget + $0.makeGate() + $0.targetDependencies = resolvedTargetDependencies + } + ) } private var _allowEagerCompilation: Bool? @@ -287,7 +291,7 @@ final class TargetOrderTaskProducer: StandardTaskProducer, TaskProducer { // The inputs are (the appropriate gate nodes from the lookup closure of) all of the targets the configuredTarget immediately depends on. This is a superset of the dependencies, which are all of the immediate (explicit + implicit) targets the configuredTarget is declared to depend on. var inputs = [any PlannedNode]() let dependencies = context.globalProductPlan.resolvedDependencies(of: configuredTarget) - for dependency in dependencies { + for dependency in dependencies { if dependency.target !== configuredTarget { // FIXME: If lookup() returns nil here, doesn't this mean the two lists we return will be out-of-sync? Is that bad? if let input = lookup(dependency.target, configuredTarget) { diff --git a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/XCTestProductTypeTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/XCTestProductTypeTaskProducer.swift index 02ad1ea6..2dfcd116 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/XCTestProductTypeTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/OtherTaskProducers/XCTestProductTypeTaskProducer.swift @@ -58,10 +58,8 @@ final class XCTestProductTaskProducer: PhasedTaskProducer, TaskProducer { } } - // MARK: - /// The `XCTestProductPostprocessingTaskProducer` generates postprocessing tasks specific the XCTest product type. This producer's tasks involve assembling or modifying content outside of the test target's own product, often in an enclosing wrapper (the application being tested, or the test runner), so this producer is ordered after the test target's own postprocessing task producer. final class XCTestProductPostprocessingTaskProducer: PhasedTaskProducer, TaskProducer { func generateTasks() async -> [any PlannedTask] { @@ -149,34 +147,40 @@ final class XCTestProductPostprocessingTaskProducer: PhasedTaskProducer, TaskPro outputPath = testRunnerDstPath.join(subpath) let orderingOutput = context.createVirtualNode("Preprocess \(outputPath.str)") let cbc = CommandBuildContext(producer: context, scope: scope, inputs: [FileToBuild(absolutePath: inputPath, inferringTypeUsing: context)] + (infoPlistPath.map { [FileToBuild(context: context, absolutePath: $0)] } ?? []), output: outputPath, commandOrderingOutputs: [orderingOutput]) - await context.copyPlistSpec.constructTasks(cbc, delegate, specialArgs: [ - "--macro-expansion", - "WRAPPEDPRODUCTNAME", - scope.evaluate(BuiltinMacros.PRODUCT_NAME) + "-Runner", - - "--macro-expansion", - "WRAPPEDPRODUCTBUNDLEIDENTIFIER", - wrappedBundleIdentifier(for: scope.evaluate(BuiltinMacros.PRODUCT_BUNDLE_IDENTIFIER)), - - "--macro-expansion", - "TESTPRODUCTNAME", - scope.evaluate(BuiltinMacros.PRODUCT_NAME), - - "--macro-expansion", - "TESTPRODUCTBUNDLEIDENTIFIER", - scope.evaluate(BuiltinMacros.PRODUCT_BUNDLE_IDENTIFIER), - ] + (infoPlistPath.map { - ["--copy-value", "UIDeviceFamily", $0.str] - } ?? []), toolLookup: { decl in - switch decl { - case BuiltinMacros.VALIDATE_PLIST_FILES_WHILE_COPYING: - return scope.namespace.parseLiteralString("YES") - case BuiltinMacros.PLIST_FILE_OUTPUT_FORMAT: - return scope.namespace.parseLiteralString("XML") - default: - return nil + await context.copyPlistSpec.constructTasks( + cbc, + delegate, + specialArgs: [ + "--macro-expansion", + "WRAPPEDPRODUCTNAME", + scope.evaluate(BuiltinMacros.PRODUCT_NAME) + "-Runner", + + "--macro-expansion", + "WRAPPEDPRODUCTBUNDLEIDENTIFIER", + wrappedBundleIdentifier(for: scope.evaluate(BuiltinMacros.PRODUCT_BUNDLE_IDENTIFIER)), + + "--macro-expansion", + "TESTPRODUCTNAME", + scope.evaluate(BuiltinMacros.PRODUCT_NAME), + + "--macro-expansion", + "TESTPRODUCTBUNDLEIDENTIFIER", + scope.evaluate(BuiltinMacros.PRODUCT_BUNDLE_IDENTIFIER), + ] + + (infoPlistPath.map { + ["--copy-value", "UIDeviceFamily", $0.str] + } ?? []), + toolLookup: { decl in + switch decl { + case BuiltinMacros.VALIDATE_PLIST_FILES_WHILE_COPYING: + return scope.namespace.parseLiteralString("YES") + case BuiltinMacros.PLIST_FILE_OUTPUT_FORMAT: + return scope.namespace.parseLiteralString("XML") + default: + return nil + } } - }) + ) runnerSigningInputNodes.append(orderingOutput) continue @@ -224,8 +228,7 @@ final class XCTestProductPostprocessingTaskProducer: PhasedTaskProducer, TaskPro runnerSigningInputNodes.append(orderingOutput) } - } - catch { + } catch { // FIXME: For some reason using error.localizedDescription here results in a link error... delegate.error("unable to create tasks to copy XCTRunner.app: unknown error") return @@ -233,9 +236,10 @@ final class XCTestProductPostprocessingTaskProducer: PhasedTaskProducer, TaskPro if !scope.evaluate(BuiltinMacros.SKIP_COPYING_TEST_FRAMEWORKS) { // Copy the testing frameworks into the runner's Frameworks directory and re-sign them with the developer's identity. We treat these tasks as peers to the tasks above which copy the runner. - var frameworkPaths = Self.xctestLibraryAndFrameworkPaths(scope, context.platform, context.workspaceContext.fs) + [ - Self.swiftTestingFrameworkPath(scope, context.platform, context.workspaceContext.fs) - ] + var frameworkPaths = + Self.xctestLibraryAndFrameworkPaths(scope, context.platform, context.workspaceContext.fs) + [ + Self.swiftTestingFrameworkPath(scope, context.platform, context.workspaceContext.fs) + ] for platformExtension in context.workspaceContext.core.pluginManager.extensions(of: PlatformInfoExtensionPoint.self) { frameworkPaths.append(contentsOf: platformExtension.additionalTestLibraryPaths(scope: scope, platform: context.platform, fs: context.workspaceContext.fs)) @@ -263,7 +267,8 @@ final class XCTestProductPostprocessingTaskProducer: PhasedTaskProducer, TaskPro runnerSigningInputNodes.append(context.createVirtualNode("CodeSign \(testBundlePath.normalize().str)")) // Pass the runner's binary as an input and an output since it is being mutated. - let runnerBinarySubpathExpr = !scope.evaluate(BuiltinMacros._WRAPPER_CONTENTS_DIR).isEmpty + let runnerBinarySubpathExpr = + !scope.evaluate(BuiltinMacros._WRAPPER_CONTENTS_DIR).isEmpty ? scope.namespace.parseString("$(_WRAPPER_CONTENTS_DIR)/MacOS/$(PRODUCT_NAME)-Runner") : scope.namespace.parseString("/$(PRODUCT_NAME)-Runner") let runnerBinaryPath = testRunnerDstPath.join(scope.evaluate(runnerBinarySubpathExpr), preserveRoot: true) @@ -297,10 +302,8 @@ final class XCTestProductPostprocessingTaskProducer: PhasedTaskProducer, TaskPro } } - // MARK: Utility methods - static fileprivate func copyAndReSignTestFramework(from srcPath: Path, to dstPath: Path, _ producer: StandardTaskProducer, _ scope: MacroEvaluationScope, _ delegate: any TaskGenerationDelegate, commandOrderingInputs: [any PlannedNode] = [], commandOrderingOutput: inout (any PlannedNode)?) async { // Copy the test framework. let context = producer.context @@ -322,98 +325,96 @@ final class XCTestProductPostprocessingTaskProducer: PhasedTaskProducer, TaskPro /// The path to the copy of `Testing.framework` which should be used by clients. static fileprivate func swiftTestingFrameworkPath(_ scope: MacroEvaluationScope, _ platform: Platform?, _ fs: any FSProxy) -> Path { - let testingFrameworkPath = Path(scope.evaluate(BuiltinMacros.PLATFORM_DIR)).join("Developer/Library/Frameworks/Testing.framework") - return fs.exists(testingFrameworkPath) ? testingFrameworkPath : Path("") - } - - /// The paths to libraries and frameworks produced by the XCTest project, including `XCTest.framework` and some - /// of its dependencies, which should be used by clients. - /// - /// - Note: This does not include `Testing.framework`, since it is not produced as part of the XCTest project, - /// despite the fact that some of the XCTest libraries depend on it. - /// - /// The directory where the libraries and frameworks whose paths this function returns are located may be - /// overridden if `scope` sets `INTERNAL_TEST_LIBRARIES_OVERRIDE_PATH`. - static fileprivate func xctestLibraryAndFrameworkPaths( - includingBundleInject includeBundleInject: Bool = false, - _ scope: MacroEvaluationScope, - _ platform: Platform?, - _ fs: any FSProxy - ) -> [Path] { - func testLibrariesOverridePath() -> Path? { - let path = Path(scope.evaluate(BuiltinMacros.INTERNAL_TEST_LIBRARIES_OVERRIDE_PATH)) - if !path.isEmpty, path.isAbsolute { - return path - } else { - return nil - } - } - let testLibrariesOverridePath = testLibrariesOverridePath() - - let frameworksDir = testLibrariesOverridePath ?? XCTestBundleProductTypeSpec.getPlatformDeveloperVariantLibraryPath(scope, platform).join("Frameworks") - let privateFrameworksDir = testLibrariesOverridePath ?? XCTestBundleProductTypeSpec.getPlatformDeveloperVariantLibraryPath(scope, platform).join("PrivateFrameworks") - let usrLibDir = testLibrariesOverridePath ?? XCTestBundleProductTypeSpec.getPlatformDeveloperVariantLibraryPath(scope, platform).dirname.join("usr/lib") - - var result: [Path] = [] - - let publicFrameworkNames = [ - "XCTest.framework", - ] - for publicFrameworkName in publicFrameworkNames { - result.append(frameworksDir.join(publicFrameworkName)) - } - - let subFrameworkNames = [ - "XCUnit.framework", - "XCUIAutomation.framework", - ] - for subFrameworkName in subFrameworkNames { - let pathInPublicFrameworksDir = frameworksDir.join(subFrameworkName) - lazy var pathInPrivateFrameworksDir = privateFrameworksDir.join(subFrameworkName) - if fs.exists(pathInPublicFrameworksDir) { - result.append(pathInPublicFrameworksDir) - } else if fs.exists(pathInPrivateFrameworksDir) { - result.append(pathInPrivateFrameworksDir) - } - } - - let privateFrameworkNames = [ - "XCTestCore.framework", - "XCTestSupport.framework", - "XCTAutomationSupport.framework", - ] - for privateFrameworkName in privateFrameworkNames { - result.append(privateFrameworksDir.join(privateFrameworkName)) - } - - var libraryNames = [ - "libXCTestSwiftSupport.dylib", - ] - if includeBundleInject { - libraryNames.append("libXCTestBundleInject.dylib") - } - for libraryName in libraryNames { - result.append(usrLibDir.join(libraryName)) - } - - return result - } -} + let testingFrameworkPath = Path(scope.evaluate(BuiltinMacros.PLATFORM_DIR)).join("Developer/Library/Frameworks/Testing.framework") + return fs.exists(testingFrameworkPath) ? testingFrameworkPath : Path("") + } + /// The paths to libraries and frameworks produced by the XCTest project, including `XCTest.framework` and some + /// of its dependencies, which should be used by clients. + /// + /// - Note: This does not include `Testing.framework`, since it is not produced as part of the XCTest project, + /// despite the fact that some of the XCTest libraries depend on it. + /// + /// The directory where the libraries and frameworks whose paths this function returns are located may be + /// overridden if `scope` sets `INTERNAL_TEST_LIBRARIES_OVERRIDE_PATH`. + static fileprivate func xctestLibraryAndFrameworkPaths( + includingBundleInject includeBundleInject: Bool = false, + _ scope: MacroEvaluationScope, + _ platform: Platform?, + _ fs: any FSProxy + ) -> [Path] { + func testLibrariesOverridePath() -> Path? { + let path = Path(scope.evaluate(BuiltinMacros.INTERNAL_TEST_LIBRARIES_OVERRIDE_PATH)) + if !path.isEmpty, path.isAbsolute { + return path + } else { + return nil + } + } + let testLibrariesOverridePath = testLibrariesOverridePath() -// MARK: + let frameworksDir = testLibrariesOverridePath ?? XCTestBundleProductTypeSpec.getPlatformDeveloperVariantLibraryPath(scope, platform).join("Frameworks") + let privateFrameworksDir = testLibrariesOverridePath ?? XCTestBundleProductTypeSpec.getPlatformDeveloperVariantLibraryPath(scope, platform).join("PrivateFrameworks") + let usrLibDir = testLibrariesOverridePath ?? XCTestBundleProductTypeSpec.getPlatformDeveloperVariantLibraryPath(scope, platform).dirname.join("usr/lib") + var result: [Path] = [] + + let publicFrameworkNames = [ + "XCTest.framework" + ] + for publicFrameworkName in publicFrameworkNames { + result.append(frameworksDir.join(publicFrameworkName)) + } + + let subFrameworkNames = [ + "XCUnit.framework", + "XCUIAutomation.framework", + ] + for subFrameworkName in subFrameworkNames { + let pathInPublicFrameworksDir = frameworksDir.join(subFrameworkName) + lazy var pathInPrivateFrameworksDir = privateFrameworksDir.join(subFrameworkName) + if fs.exists(pathInPublicFrameworksDir) { + result.append(pathInPublicFrameworksDir) + } else if fs.exists(pathInPrivateFrameworksDir) { + result.append(pathInPrivateFrameworksDir) + } + } + + let privateFrameworkNames = [ + "XCTestCore.framework", + "XCTestSupport.framework", + "XCTAutomationSupport.framework", + ] + for privateFrameworkName in privateFrameworkNames { + result.append(privateFrameworksDir.join(privateFrameworkName)) + } + + var libraryNames = [ + "libXCTestSwiftSupport.dylib" + ] + if includeBundleInject { + libraryNames.append("libXCTestBundleInject.dylib") + } + for libraryName in libraryNames { + result.append(usrLibDir.join(libraryName)) + } + + return result + } +} + +// MARK: /// The `XCTestHostTaskProducer` generates tasks relevant to a target whose product is the host for products of XCTest targets. For example, embedding the XCTest-related frameworks and libraries in the host product. final class XCTestHostTaskProducer: PhasedTaskProducer, TaskProducer { func generateTasks() async -> [any PlannedTask] { let scope = context.settings.globalScope var tasks: [any PlannedTask] = [] - if isTestHostTarget() { - await appendGeneratedTasks(&tasks) { delegate in - await generateTestHostTasks(scope, delegate) - } + if isTestHostTarget() { + await appendGeneratedTasks(&tasks) { delegate in + await generateTestHostTasks(scope, delegate) } + } return tasks } @@ -434,9 +435,10 @@ final class XCTestHostTaskProducer: PhasedTaskProducer, TaskProducer { let frameworksPath = scope.evaluate(BuiltinMacros.TARGET_BUILD_DIR).join(scope.evaluate(BuiltinMacros.FRAMEWORKS_FOLDER_PATH)) // NOTE: If any new paths are added here, they may also need to be added to the list of those not to be scanned by the CopySwiftLibs task, in executableIsXCTestSupportLibrary() in EmbedSwiftStdLibTaskAction.swift. // NOTE: If any new paths are added here, they may also need to be added to the list of those not to be scanned by the CopySwiftLibs task, in executableIsTestSupportLibrary() in EmbedSwiftStdLibTaskAction.swift. - var srcPaths = XCTestProductPostprocessingTaskProducer.xctestLibraryAndFrameworkPaths(includingBundleInject: true, scope, context.platform, context.workspaceContext.fs) + [ - XCTestProductPostprocessingTaskProducer.swiftTestingFrameworkPath(scope, context.platform, context.workspaceContext.fs) - ] + var srcPaths = + XCTestProductPostprocessingTaskProducer.xctestLibraryAndFrameworkPaths(includingBundleInject: true, scope, context.platform, context.workspaceContext.fs) + [ + XCTestProductPostprocessingTaskProducer.swiftTestingFrameworkPath(scope, context.platform, context.workspaceContext.fs) + ] for platformExtension in context.workspaceContext.core.pluginManager.extensions(of: PlatformInfoExtensionPoint.self) { srcPaths.append(contentsOf: platformExtension.additionalTestLibraryPaths(scope: scope, platform: context.platform, fs: context.workspaceContext.fs)) diff --git a/Sources/SWBTaskConstruction/TaskProducers/StandardTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/StandardTaskProducer.swift index f7825797..db3f7924 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/StandardTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/StandardTaskProducer.swift @@ -66,7 +66,7 @@ open class StandardTaskProducer { let delegate = ProducerBasedTaskGenerationDelegate(producer: self, context: context, taskOptions: options) await body(delegate) tasks.append(contentsOf: delegate.tasks) - return (tasks: delegate.tasks, outputs: delegate.outputs ) + return (tasks: delegate.tasks, outputs: delegate.outputs) } func diagnosticFilenameMapContents() async throws -> ByteString { @@ -85,7 +85,6 @@ open class StandardTaskProducer { } } - /// Abstract base class for phased task producers. All tasks generated by such a producer will be grouped between two nodes so they will all run after tasks before the start node, and before tasks after the end node. It is the responsibility of the creator of these producers to create them with nodes which reflect the desired ordering. open class PhasedTaskProducer: StandardTaskProducer { /// The phase start nodes. diff --git a/Sources/SWBTaskConstruction/TaskProducers/TaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/TaskProducer.swift index 7a2e024e..4d857c33 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/TaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/TaskProducer.swift @@ -30,8 +30,7 @@ enum TaskProducerPhase { } /// A TaskProducer embodies a set of work which needs to be done create the tasks which, when run, will generate all or part of the product of a ProductPlan. -package protocol TaskProducer -{ +package protocol TaskProducer { /// Immutable data available to the task producer. var context: TaskProducerContext { get } @@ -45,15 +44,13 @@ package protocol TaskProducer func prepare() async } -extension TaskProducer -{ +extension TaskProducer { /// By default, most task producers should not need to make use of this functionality. package func prepare() async {} } /// Context of immutable data available to a task producer. -public class TaskProducerContext: StaleFileRemovalContext, BuildFileResolution -{ +public class TaskProducerContext: StaleFileRemovalContext, BuildFileResolution { /// The workspace context. public let workspaceContext: WorkspaceContext @@ -294,8 +291,7 @@ public class TaskProducerContext: StaleFileRemovalContext, BuildFileResolution /// - parameter workspaceContext: The containing workspace and context. /// - parameter globalProductPlan: The high-level global build information. /// - parameter delegate: The delegate to use for task construction. - init(configuredTarget: ConfiguredTarget? = nil, workspaceContext: WorkspaceContext, globalProductPlan: GlobalProductPlan, delegate: any TaskPlanningDelegate) - { + init(configuredTarget: ConfiguredTarget? = nil, workspaceContext: WorkspaceContext, globalProductPlan: GlobalProductPlan, delegate: any TaskPlanningDelegate) { self.workspaceContext = workspaceContext self.configuredTarget = configuredTarget self.globalProductPlan = globalProductPlan @@ -329,7 +325,7 @@ public class TaskProducerContext: StaleFileRemovalContext, BuildFileResolution } self.buildRuleSet = LeveledBuildRuleSet(ruleSets: [ BasicBuildRuleSet(rules: projectBuildRules), - DisambiguatingBuildRuleSet(rules: settings.systemBuildRules, enableDebugActivityLogs: workspaceContext.userPreferences.enableDebugActivityLogs) + DisambiguatingBuildRuleSet(rules: settings.systemBuildRules, enableDebugActivityLogs: workspaceContext.userPreferences.enableDebugActivityLogs), ]) self.project = configuredTarget.map { workspaceContext.workspace.project(for: $0.target) } @@ -760,24 +756,26 @@ public class TaskProducerContext: StaleFileRemovalContext, BuildFileResolution // Otherwise, we are producing a binary if we have a Sources build phase and either that phase is not empty or we're generating a versioning stub file. guard let target = configuredTarget?.target as? SWBCore.BuildPhaseTarget, - let sourcesBuildPhase = target.sourcesBuildPhase else { + let sourcesBuildPhase = target.sourcesBuildPhase + else { return false } let context = BuildFilesProcessingContext(scope) - let hasObjectProducingSources = sourcesBuildPhase.buildFiles.filter { - guard let buildFile = try? resolveBuildFileReference($0), !context.isExcluded(buildFile.absolutePath, filters: $0.platformFilters) else { - return false - } + let hasObjectProducingSources = + sourcesBuildPhase.buildFiles.filter { + guard let buildFile = try? resolveBuildFileReference($0), !context.isExcluded(buildFile.absolutePath, filters: $0.platformFilters) else { + return false + } - // AppleScript files don't produce object files either directly or transitively, so they cannot (for most definitions of "cannot") contribute to a linked Mach-O being produced. - if buildFile.fileType.identifier == "sourcecode.applescript" { - return false - } + // AppleScript files don't produce object files either directly or transitively, so they cannot (for most definitions of "cannot") contribute to a linked Mach-O being produced. + if buildFile.fileType.identifier == "sourcecode.applescript" { + return false + } - return true - }.count > 0 || scope.generatesAppleGenericVersioningFile(context) || scope.generatesKernelExtensionModuleInfoFile(context, settings, sourcesBuildPhase) + return true + }.count > 0 || scope.generatesAppleGenericVersioningFile(context) || scope.generatesKernelExtensionModuleInfoFile(context, settings, sourcesBuildPhase) // We will produce a binary if we have sources. if hasObjectProducingSources { @@ -944,17 +942,21 @@ public class TaskProducerContext: StaleFileRemovalContext, BuildFileResolution /// Report an error that is caused by a missing package product. func missingPackageProduct(_ packageName: String, _ buildFile: BuildFile, _ buildPhase: BuildPhase) { - error("Missing package product '\(packageName)'", + error( + "Missing package product '\(packageName)'", location: .buildFile(buildFileGUID: buildFile.guid, buildPhaseGUID: buildPhase.guid, targetGUID: configuredTarget?.target.guid ?? ""), - component: .packageResolution) + component: .packageResolution + ) } func missingNamedReference(_ name: String, _ buildFile: BuildFile, _ buildPhase: BuildPhase) { // TODO: Semantic build file locations end up going to the General tab in Xcode, but here we have our first use case where we ALWAYS want it to go to the Build Phases tab. // Will need to figure out a way to abstract this into the diagnostics model which doesn't directly map to Xcode's UI. Perhaps an `exact` flag? - error("This \(buildPhase.name) build phase contains a reference to a missing file '\(name)'.", + error( + "This \(buildPhase.name) build phase contains a reference to a missing file '\(name)'.", location: .buildFile(buildFileGUID: buildFile.guid, buildPhaseGUID: buildPhase.guid, targetGUID: configuredTarget?.target.guid ?? ""), - component: .targetIntegrity) + component: .targetIntegrity + ) } func emitFileExclusionDiagnostic(_ exclusionReason: BuildFileExclusionReason, _ context: any BuildFileFilteringContext, _ path: Path, _ filters: Set, _ buildFileLocation: Diagnostic.Location?) { @@ -970,10 +972,18 @@ public class TaskProducerContext: StaleFileRemovalContext, BuildFileResolution case let .patternLists(excludePattern): let excl = context.excludedSourceFileNames.joined(separator: " ") let incl = context.includedSourceFileNames.joined(separator: " ") - self.delegate.emit(configuredTarget.map { .overrideTarget($0) } ?? .default, Diagnostic(behavior: .note, location: buildFileLocation ?? .unknown, data: DiagnosticData("Skipping '\(path.str)' because it is excluded by EXCLUDED_SOURCE_FILE_NAMES pattern: \(excludePattern)"), childDiagnostics: [ - Diagnostic(behavior: .note, location: .buildSetting(name: BuiltinMacros.EXCLUDED_SOURCE_FILE_NAMES.name), data: DiagnosticData("EXCLUDED_SOURCE_FILE_NAMES: \(excl)")), - Diagnostic(behavior: .note, location: .buildSetting(name: BuiltinMacros.INCLUDED_SOURCE_FILE_NAMES.name), data: DiagnosticData("INCLUDED_SOURCE_FILE_NAMES: \(incl)")), - ])) + self.delegate.emit( + configuredTarget.map { .overrideTarget($0) } ?? .default, + Diagnostic( + behavior: .note, + location: buildFileLocation ?? .unknown, + data: DiagnosticData("Skipping '\(path.str)' because it is excluded by EXCLUDED_SOURCE_FILE_NAMES pattern: \(excludePattern)"), + childDiagnostics: [ + Diagnostic(behavior: .note, location: .buildSetting(name: BuiltinMacros.EXCLUDED_SOURCE_FILE_NAMES.name), data: DiagnosticData("EXCLUDED_SOURCE_FILE_NAMES: \(excl)")), + Diagnostic(behavior: .note, location: .buildSetting(name: BuiltinMacros.INCLUDED_SOURCE_FILE_NAMES.name), data: DiagnosticData("INCLUDED_SOURCE_FILE_NAMES: \(incl)")), + ] + ) + ) } } @@ -1015,13 +1025,11 @@ public class TaskProducerContext: StaleFileRemovalContext, BuildFileResolution private func onDemandResourcesAssetTagPriority(tag: String) -> Double? { if onDemandResourcesInitialInstallTags.contains(tag) { return 1.0 - } - else if let index = onDemandResourcesPrefetchOrder.firstIndex(of: tag) { + } else if let index = onDemandResourcesPrefetchOrder.firstIndex(of: tag) { let count = onDemandResourcesPrefetchOrder.count if count == 1 { return 0.5 - } - else { + } else { return (Double(count - 1 - index) / Double(count - 1)) * 0.9 + 0.05 } } @@ -1208,7 +1216,7 @@ extension TaskProducerContext: Hashable { hasher.combine(ObjectIdentifier(self)) } - public static func ==(lhs: TaskProducerContext, rhs: TaskProducerContext) -> Bool { + public static func == (lhs: TaskProducerContext, rhs: TaskProducerContext) -> Bool { return lhs === rhs } } @@ -1319,8 +1327,8 @@ extension TaskProducerContext: CommandProducer { // a facility for doing that, we approximate by checking if the target contains // any Swift sources. guard let standardTarget = targetDependency.target as? SWBCore.StandardTarget, - let sourcesBuildPhase = standardTarget.sourcesBuildPhase, - sourcesBuildPhase.containsSwiftSources(self.workspaceContext.workspace, self, dependencyScope, self.filePathResolver) + let sourcesBuildPhase = standardTarget.sourcesBuildPhase, + sourcesBuildPhase.containsSwiftSources(self.workspaceContext.workspace, self, dependencyScope, self.filePathResolver) else { return nil } @@ -1337,16 +1345,18 @@ extension TaskProducerContext: CommandProducer { return nil } // Currently there is no more than one compatibility arch; first might not be correct otherwise - guard let compatibleArch = archSpec.compatibilityArchs.first(where: { compatibleArch in - dependencyArchs.contains(compatibleArch) - }) else { + guard + let compatibleArch = archSpec.compatibilityArchs.first(where: { compatibleArch in + dependencyArchs.contains(compatibleArch) + }) + else { return nil } arch = compatibleArch } - - return dependencyScope + return + dependencyScope .subscope(binding: BuiltinMacros.variantCondition, to: variant) .subscopeBindingArchAndTriple(arch: arch) } @@ -1360,14 +1370,12 @@ extension TaskProducerContext: CommandProducer { public func supportsEagerLinking(scope: MacroEvaluationScope) -> Bool { let buildComponents = scope.evaluate(BuiltinMacros.BUILD_COMPONENTS) // Currently, eager linking (using TBDs to unblock linking early within a build invocation) and building with installapi (using TBDs to unblock linking between build invocations) are mutually exclusive. - return buildComponents.contains("build") && - !scope.evaluate(BuiltinMacros.SUPPORTS_TEXT_BASED_API) && - scope.evaluate(BuiltinMacros.SWIFT_USE_INTEGRATED_DRIVER) && // Prerequisite for eager linking - !SwiftCompilerSpec.shouldUseWholeModuleOptimization(for: scope).result && // off for WMO - scope.evaluate(BuiltinMacros.EAGER_LINKING) && // Optimization is currently opt-in via this build setting - settings.productType?.supportsEagerLinking == true && // The optimization is only valid for supported product types - compileSourcesExportOnlySwiftSymbols(scope: scope) && // All exported symbols from compile sources must be from Swift sources - !linkedLibrariesMayIntroduceExportedSymbols(scope: scope) // We must not be linking anything that introduces exported symbols + return buildComponents.contains("build") && !scope.evaluate(BuiltinMacros.SUPPORTS_TEXT_BASED_API) && scope.evaluate(BuiltinMacros.SWIFT_USE_INTEGRATED_DRIVER) // Prerequisite for eager linking + && !SwiftCompilerSpec.shouldUseWholeModuleOptimization(for: scope).result // off for WMO + && scope.evaluate(BuiltinMacros.EAGER_LINKING) // Optimization is currently opt-in via this build setting + && settings.productType?.supportsEagerLinking == true // The optimization is only valid for supported product types + && compileSourcesExportOnlySwiftSymbols(scope: scope) // All exported symbols from compile sources must be from Swift sources + && !linkedLibrariesMayIntroduceExportedSymbols(scope: scope) // We must not be linking anything that introduces exported symbols } public func projectHeaderInfo(for target: Target) async -> ProjectHeaderInfo? { @@ -1388,7 +1396,8 @@ extension TaskProducerContext: CommandProducer { let isBundleProductType = productType?.conformsTo(identifier: "com.apple.product-type.bundle") ?? false let isStaticLibrary = scope.evaluate(BuiltinMacros.MACH_O_TYPE) == "staticlib" let isObject = scope.evaluate(BuiltinMacros.MACH_O_TYPE) == "mh_object" - let result = (isBuild || isLocExport) + let result = + (isBuild || isLocExport) && !indexEnableBuildArena && (isBundleProductType || isStaticLibrary || isObject) && isApplePlatform @@ -1405,10 +1414,7 @@ extension TaskProducerContext: CommandProducer { let indexEnableBuildArena = scope.evaluate(BuiltinMacros.INDEX_ENABLE_BUILD_ARENA) let machOType = scope.evaluate(BuiltinMacros.MACH_O_TYPE) let isBundleProductType = productType?.conformsTo(identifier: "com.apple.product-type.bundle") ?? false - return ((isBuild || isLocInstall) && - !indexEnableBuildArena && - machOType != "staticlib" && - isBundleProductType) + return ((isBuild || isLocInstall) && !indexEnableBuildArena && machOType != "staticlib" && isBundleProductType) } public var targetRequiredToBuildForIndexing: Bool { @@ -1433,7 +1439,8 @@ extension TaskProducerContext: CommandProducer { extension TaskProducerContext { private func compileSourcesExportOnlySwiftSymbols(scope: MacroEvaluationScope) -> Bool { guard let buildPhaseTarget = configuredTarget?.target as? BuildPhaseTarget, - let sourcesBuildPhase = buildPhaseTarget.sourcesBuildPhase else { return false } + let sourcesBuildPhase = buildPhaseTarget.sourcesBuildPhase + else { return false } // Ensure that the sources build phase only includes swift source files or source files which won't contribute exported symbols. // FIXME: Various types of sources which generate code that doesn't export any symbols could probably be added here. @@ -1444,25 +1451,26 @@ extension TaskProducerContext { scope.subscopeBindingArchAndTriple(arch: arch) }) { let context = BuildFilesProcessingContext(archSpecificSubscope) - guard !sourcesBuildPhase.buildFiles.contains(where: { buildFile in - guard let resolvedBuildFileInfo = try? resolveBuildFileReference(buildFile), - !context.isExcluded(resolvedBuildFileInfo.absolutePath, filters: buildFile.platformFilters) else { return false } - - var fileIsOfOtherType = true - for type in [swiftFileType, applescriptFileType, doccFileType] { - if resolvedBuildFileInfo.fileType.conformsTo(type) == true { - fileIsOfOtherType = false - break + guard + !sourcesBuildPhase.buildFiles.contains(where: { buildFile in + guard let resolvedBuildFileInfo = try? resolveBuildFileReference(buildFile), + !context.isExcluded(resolvedBuildFileInfo.absolutePath, filters: buildFile.platformFilters) + else { return false } + + var fileIsOfOtherType = true + for type in [swiftFileType, applescriptFileType, doccFileType] { + if resolvedBuildFileInfo.fileType.conformsTo(type) == true { + fileIsOfOtherType = false + break + } } - } - return fileIsOfOtherType - }) else { return false } + return fileIsOfOtherType + }) + else { return false } // Check that we're not generating any C sources with exported symbols based on build settings. guard !archSpecificSubscope.generatesKernelExtensionModuleInfoFile(context, settings, sourcesBuildPhase) else { return false } - guard !archSpecificSubscope.generatesAppleGenericVersioningFile(context) || - archSpecificSubscope.evaluate(BuiltinMacros.VERSION_INFO_EXPORT_DECL).split(separator: " ").contains("static") || - ["", "apple-generic-hidden"].contains(scope.evaluate(BuiltinMacros.VERSIONING_SYSTEM)) else { + guard !archSpecificSubscope.generatesAppleGenericVersioningFile(context) || archSpecificSubscope.evaluate(BuiltinMacros.VERSION_INFO_EXPORT_DECL).split(separator: " ").contains("static") || ["", "apple-generic-hidden"].contains(scope.evaluate(BuiltinMacros.VERSIONING_SYSTEM)) else { return false } } @@ -1484,7 +1492,8 @@ extension TaskProducerContext { /// Compute the flattened list of build files from the frameworks build phase after expanding package product targets. func computeFlattenedFrameworksPhaseBuildFiles(_ buildFilesContext: BuildFilesProcessingContext) -> [BuildFile] { guard let buildPhaseTarget = configuredTarget?.target as? BuildPhaseTarget, - let frameworksPhase = buildPhaseTarget.frameworksBuildPhase else { return [] } + let frameworksPhase = buildPhaseTarget.frameworksBuildPhase + else { return [] } // The ordered list of output files. var result = [BuildFile]() @@ -1505,9 +1514,10 @@ extension TaskProducerContext { for buildFile in phase.buildFiles { // If this is a package producer reference, visit it recursively. if case .targetProduct(let guid) = buildFile.buildableItem, - case let target as PackageProductTarget = workspaceContext.workspace.target(for: guid), - let frameworksBuildPhase = target.frameworksBuildPhase, - buildFilesContext.currentPlatformFilter.matches(buildFile.platformFilters) { + case let target as PackageProductTarget = workspaceContext.workspace.target(for: guid), + let frameworksBuildPhase = target.frameworksBuildPhase, + buildFilesContext.currentPlatformFilter.matches(buildFile.platformFilters) + { if globalProductPlan.dynamicallyBuildingTargets.contains(target) { result.append(buildFile) } else { @@ -1713,7 +1723,6 @@ class ProducerBasedTaskGenerationDelegate: TaskGenerationDelegate { } } - /// This class adapts the TaskGenerationDelegate protocol used by the Core to that provided by the producer delegate API, to provide phase ordering among tasks created by different `PhasedTaskProducers`. /// /// This delegate auto-attaches constructed tasks to the phase ordering gates. diff --git a/Sources/SWBTaskConstruction/TaskProducers/WorkspaceTaskProducers/CreateBuildDirectoryTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/WorkspaceTaskProducers/CreateBuildDirectoryTaskProducer.swift index 12cd9961..b437f015 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/WorkspaceTaskProducers/CreateBuildDirectoryTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/WorkspaceTaskProducers/CreateBuildDirectoryTaskProducer.swift @@ -26,33 +26,35 @@ final class CreateBuildDirectoryTaskProducer: StandardTaskProducer, TaskProducer func prepare() { let containsSwiftPackages = context.globalProductPlan.planRequest.buildGraph.containsSwiftPackages let buildDirectoryContext = context.globalProductPlan.buildDirectories - buildDirectoryContext.add(targetContexts.flatMap { (targetContext: TaskProducerContext) -> [Path] in - // Package products only group dependent targets or carry imparted settings, but never build any content of their own, so we should not create any build directories for them. - if targetContext.configuredTarget?.target.type == .packageProduct { - return [] - } - return targetContext.workspaceContext.buildDirectoryMacros.flatMap { macro -> [Path] in - let scope = targetContext.settings.globalScope - let path = scope.evaluate(macro) + buildDirectoryContext.add( + targetContexts.flatMap { (targetContext: TaskProducerContext) -> [Path] in + // Package products only group dependent targets or carry imparted settings, but never build any content of their own, so we should not create any build directories for them. + if targetContext.configuredTarget?.target.type == .packageProduct { + return [] + } + return targetContext.workspaceContext.buildDirectoryMacros.flatMap { macro -> [Path] in + let scope = targetContext.settings.globalScope + let path = scope.evaluate(macro) - switch macro { - case BuiltinMacros.BUILT_PRODUCTS_DIR: - // If the workspace contains any packages, eagerly create the "PackageFrameworks" directory. As part of "rdar://72205262 (Explore moving away from two target approach for dynamic targets to changing linkage directly in Swift Build)", we should instead compute any search paths to "PackageFrameworks" dynamically to avoid this. - if containsSwiftPackages { - return [path, path.join("PackageFrameworks")] - } - case BuiltinMacros.DSTROOT: - // Skip generating DSTROOT if DEPLOYMENT_LOCATION is not enabled. - if !scope.evaluate(BuiltinMacros.DEPLOYMENT_LOCATION) { - return [] + switch macro { + case BuiltinMacros.BUILT_PRODUCTS_DIR: + // If the workspace contains any packages, eagerly create the "PackageFrameworks" directory. As part of "rdar://72205262 (Explore moving away from two target approach for dynamic targets to changing linkage directly in Swift Build)", we should instead compute any search paths to "PackageFrameworks" dynamically to avoid this. + if containsSwiftPackages { + return [path, path.join("PackageFrameworks")] + } + case BuiltinMacros.DSTROOT: + // Skip generating DSTROOT if DEPLOYMENT_LOCATION is not enabled. + if !scope.evaluate(BuiltinMacros.DEPLOYMENT_LOCATION) { + return [] + } + default: + break } - default: - break - } - return [path] + return [path] + } } - }) + ) buildDirectoryContext.freeze() } diff --git a/Sources/SWBTaskConstruction/TaskProducers/WorkspaceTaskProducers/HeadermapVFSTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/WorkspaceTaskProducers/HeadermapVFSTaskProducer.swift index f719ff12..ee79dd4a 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/WorkspaceTaskProducers/HeadermapVFSTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/WorkspaceTaskProducers/HeadermapVFSTaskProducer.swift @@ -33,22 +33,25 @@ final class HeadermapVFSTaskProducer: StandardTaskProducer, TaskProducer { var tasks = [any PlannedTask]() do { - let vfsContentsByPath = try Dictionary(try await targetContexts.concurrentMap(maximumParallelism: 100) { (targetContext: TaskProducerContext) async throws -> (Path, ByteString)? in - let targetScope = targetContext.settings.globalScope - let vfsSetting = targetScope.evaluate(BuiltinMacros.CPP_HEADERMAP_PRODUCT_HEADERS_VFS_FILE) - guard targetContext.configuredTarget?.target is SWBCore.StandardTarget, targetScope.evaluate(BuiltinMacros.USE_HEADERMAP), !vfsSetting.isEmpty else { - return nil - } - let vfsPath = self.context.makeAbsolute(vfsSetting) - - let contents = try await targetContext.constructVFSContents() - return (vfsPath, contents) - }.compactMap { $0 }, uniquingKeysWith: { first, second in - guard first == second else { - throw StubError.error("Unexpected difference in VFS content.\nFirst: \(first.asString)\nSecond:\(second.asString)") + let vfsContentsByPath = try Dictionary( + try await targetContexts.concurrentMap(maximumParallelism: 100) { (targetContext: TaskProducerContext) async throws -> (Path, ByteString)? in + let targetScope = targetContext.settings.globalScope + let vfsSetting = targetScope.evaluate(BuiltinMacros.CPP_HEADERMAP_PRODUCT_HEADERS_VFS_FILE) + guard targetContext.configuredTarget?.target is SWBCore.StandardTarget, targetScope.evaluate(BuiltinMacros.USE_HEADERMAP), !vfsSetting.isEmpty else { + return nil + } + let vfsPath = self.context.makeAbsolute(vfsSetting) + + let contents = try await targetContext.constructVFSContents() + return (vfsPath, contents) + }.compactMap { $0 }, + uniquingKeysWith: { first, second in + guard first == second else { + throw StubError.error("Unexpected difference in VFS content.\nFirst: \(first.asString)\nSecond:\(second.asString)") + } + return first } - return first - }) + ) for (vfsPath, contents) in vfsContentsByPath { await appendGeneratedTasks(&tasks) { delegate in @@ -175,11 +178,16 @@ extension TaskProducerContext { let contentsFolderString = scope.evaluate(BuiltinMacros.CONTENTS_FOLDER_PATH).str let wrapperNameString = scope.evaluate(BuiltinMacros.WRAPPER_NAME).str - let moduleInfo = GlobalProductPlan.computeModuleInfo(workspaceContext: globalProductPlan.planRequest.workspaceContext, target: target, settings: settings, diagnosticHandler: { message, location, component, essential in - if essential { - error(message, location: location, component: component) + let moduleInfo = GlobalProductPlan.computeModuleInfo( + workspaceContext: globalProductPlan.planRequest.workspaceContext, + target: target, + settings: settings, + diagnosticHandler: { message, location, component, essential in + if essential { + error(message, location: location, component: component) + } } - }) + ) if let moduleInfo = moduleInfo { let moduleMapSourcePath = mapToTempFilesOnly ? moduleInfo.moduleMapPaths.tmpPath : moduleInfo.moduleMapPaths.sourcePath if !moduleMapSourcePath.isEmpty { @@ -200,7 +208,7 @@ extension TaskProducerContext { // Only invoke the closure for this file if the file is really going to be generated. if moduleInfo.exportsSwiftObjCAPI { let generatedSwiftHeaderPath = SwiftCompilerSpec.generatedObjectiveCHeaderOutputPath(scope) - let swiftHeaderName = scope.evaluate(BuiltinMacros.SWIFT_OBJC_INTERFACE_HEADER_NAME) // if exportsSwiftObjCAPI, then swiftHeaderName is not empty + let swiftHeaderName = scope.evaluate(BuiltinMacros.SWIFT_OBJC_INTERFACE_HEADER_NAME) // if exportsSwiftObjCAPI, then swiftHeaderName is not empty // FIXME: The calculation of swiftHeaderPath below seems like an unfortunate hard-coding of an algorithm that should be elsewhere; it comes from commit ce4151bf9e in HeadermapTaskProducer.swift originally. // FIXME: It's also not clear under what conditions this calculation will be different from SwiftCompilerSpec.generatedObjectiveCHeaderOutputPath(), but if it is (now or someday) we invoke the closure. let swiftHeaderPath = builtProductsDir.join(publicHeadersFolderPseudoPath).join(swiftHeaderName) diff --git a/Sources/SWBTaskConstruction/TaskProducers/WorkspaceTaskProducers/PCHModuleMapTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/WorkspaceTaskProducers/PCHModuleMapTaskProducer.swift index 0a608fe6..47b041ed 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/WorkspaceTaskProducers/PCHModuleMapTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/WorkspaceTaskProducers/PCHModuleMapTaskProducer.swift @@ -28,36 +28,41 @@ final class PCHModuleMapTaskProducer: StandardTaskProducer, TaskProducer { var tasks = [any PlannedTask]() do { - let prefixHeadersToPrecompile = try Dictionary(try await targetContexts.concurrentMap(maximumParallelism: 100) { (targetContext: TaskProducerContext) async throws -> (Path, ByteString)? in - let scope = targetContext.settings.globalScope + let prefixHeadersToPrecompile = try Dictionary( + try await targetContexts.concurrentMap(maximumParallelism: 100) { (targetContext: TaskProducerContext) async throws -> (Path, ByteString)? in + let scope = targetContext.settings.globalScope - // If there is not prefix header, we are done. - var prefixHeader = scope.evaluate(BuiltinMacros.GCC_PREFIX_HEADER) - guard !prefixHeader.isEmpty else { - return nil - } + // If there is not prefix header, we are done. + var prefixHeader = scope.evaluate(BuiltinMacros.GCC_PREFIX_HEADER) + guard !prefixHeader.isEmpty else { + return nil + } - // Make the path absolute. - prefixHeader = targetContext.createNode(prefixHeader).path + // Make the path absolute. + prefixHeader = targetContext.createNode(prefixHeader).path - let prefixModuleMapFile = ClangCompilerSpec.getPrefixHeaderModuleMap(prefixHeader, scope) + let prefixModuleMapFile = ClangCompilerSpec.getPrefixHeaderModuleMap(prefixHeader, scope) - if let prefixModuleMapFile { - let moduleMapContents = ByteString(encodingAsUTF8: """ - module __PCH { - header "\(prefixHeader.str)" - export * + if let prefixModuleMapFile { + let moduleMapContents = ByteString( + encodingAsUTF8: """ + module __PCH { + header "\(prefixHeader.str)" + export * + } + """ + ) + return (prefixModuleMapFile, moduleMapContents) } - """) - return (prefixModuleMapFile, moduleMapContents) - } - return nil - }.compactMap{ $0 }, uniquingKeysWith: { first, second in - guard first == second else { - throw StubError.error("Unexpected difference in PCH module map content.\nFirst: \(first.asString)\nSecond:\(second.asString)") + return nil + }.compactMap { $0 }, + uniquingKeysWith: { first, second in + guard first == second else { + throw StubError.error("Unexpected difference in PCH module map content.\nFirst: \(first.asString)\nSecond:\(second.asString)") + } + return first } - return first - }) + ) for (prefixModuleMapFilePath, moduleMapContents) in prefixHeadersToPrecompile { await appendGeneratedTasks(&tasks) { delegate in diff --git a/Sources/SWBTaskConstruction/TaskProducers/WorkspaceTaskProducers/XCFrameworkTaskProducer.swift b/Sources/SWBTaskConstruction/TaskProducers/WorkspaceTaskProducers/XCFrameworkTaskProducer.swift index a855ddae..738d6896 100644 --- a/Sources/SWBTaskConstruction/TaskProducers/WorkspaceTaskProducers/XCFrameworkTaskProducer.swift +++ b/Sources/SWBTaskConstruction/TaskProducers/WorkspaceTaskProducers/XCFrameworkTaskProducer.swift @@ -104,8 +104,7 @@ final class XCFrameworkTaskProducer: StandardTaskProducer, TaskProducer { return (library, outputDirectory) } - } - catch { + } catch { context.error(error.localizedDescription) } } @@ -130,8 +129,7 @@ final class XCFrameworkTaskProducer: StandardTaskProducer, TaskProducer { let expectedSignatures: [String]? if let expectedSignature = config.expectedSignature { expectedSignatures = [expectedSignature] - } - else { + } else { expectedSignatures = nil } diff --git a/Sources/SWBTaskExecution/BuildDescription.swift b/Sources/SWBTaskExecution/BuildDescription.swift index f2279da3..b0e67737 100644 --- a/Sources/SWBTaskExecution/BuildDescription.swift +++ b/Sources/SWBTaskExecution/BuildDescription.swift @@ -246,10 +246,12 @@ package final class BuildDescription: Serializable, Sendable, Encodable, Cacheab if case let .prepareForIndexing(targetsToPrepare?, _) = buildRequest.buildCommand { var nodesToBuild: [BuildNodeToPrepareForIndex] = [] for targetToPrepare in targetsToPrepare { - let foundTargets = self.allConfiguredTargets.filter{ $0.target.guid == targetToPrepare.guid } - guard let selectedTarget = foundTargets.one(by: { - buildRequestContext.selectConfiguredTargetForIndex($0, $1, hasEnabledIndexBuildArena: buildRequest.enableIndexBuildArena, runDestination: buildRequest.parameters.activeRunDestination) - }) else { + let foundTargets = self.allConfiguredTargets.filter { $0.target.guid == targetToPrepare.guid } + guard + let selectedTarget = foundTargets.one(by: { + buildRequestContext.selectConfiguredTargetForIndex($0, $1, hasEnabledIndexBuildArena: buildRequest.enableIndexBuildArena, runDestination: buildRequest.parameters.activeRunDestination) + }) + else { continue } for task in taskStore.tasksForTarget(selectedTarget) { @@ -605,21 +607,21 @@ package final class BuildDescriptionBuilder { allOutputs.insert(Ref(MakePlannedVirtualNode(staleFileRemovalIdentifier))) // It's a matter of convenience (I think - mhr) that the SFR task key here is the same as the SFR node added to "outputs" - they could be different, if we want them to be. - commandDefinitions["<\(staleFileRemovalIdentifier)>"] = OutputByteStream().writingJSONObject({ - $0["tool"] = "stale-file-removal" - $0["expectedOutputs"] = outputPaths.map { $0.str } - - // Unwrap the configured target, as target-independent tasks don't have a set of root paths. - if let configuredTarget = configuredTarget, let rootPaths = rootPathsPerTarget[configuredTarget] { - $0["roots"] = rootPaths.map { $0.str } - } + commandDefinitions["<\(staleFileRemovalIdentifier)>"] = + OutputByteStream().writingJSONObject({ + $0["tool"] = "stale-file-removal" + $0["expectedOutputs"] = outputPaths.map { $0.str } + + // Unwrap the configured target, as target-independent tasks don't have a set of root paths. + if let configuredTarget = configuredTarget, let rootPaths = rootPathsPerTarget[configuredTarget] { + $0["roots"] = rootPaths.map { $0.str } + } - $0["outputs"] = ["<\(staleFileRemovalIdentifier)>"] - }).bytes + $0["outputs"] = ["<\(staleFileRemovalIdentifier)>"] + }).bytes } } - // Create the root target node. do { // The roots are any outputs which are never declared as an input. @@ -628,11 +630,12 @@ package final class BuildDescriptionBuilder { let allRootIdentifiers = allOutputs.subtracting(allInputs).map({ $0.instance.identifier }).sorted() let stableIdentifier = "" - commandDefinitions[stableIdentifier] = OutputByteStream().writingJSONObject({ - $0["tool"] = "phony" - $0["inputs"] = allRootIdentifiers - $0["outputs"] = [""] - }).bytes + commandDefinitions[stableIdentifier] = + OutputByteStream().writingJSONObject({ + $0["tool"] = "phony" + $0["inputs"] = allRootIdentifiers + $0["outputs"] = [""] + }).bytes } func encodeIfNeeded(_ value: ByteString) -> ByteString { @@ -653,7 +656,7 @@ package final class BuildDescriptionBuilder { "name": encodeIfNeeded("basic"), "version": ByteString(encodingAsUTF8: String(BuildDescription.manifestClientVersion)), "file-system": encodeIfNeeded(ByteString(encodingAsUTF8: fs.fileSystemMode.manifestLabel)), - "perform-ownership-analysis": SWBFeatureFlag.performOwnershipAnalysis.value ? encodeIfNeeded("yes") : encodeIfNeeded("no") + "perform-ownership-analysis": SWBFeatureFlag.performOwnershipAnalysis.value ? encodeIfNeeded("yes") : encodeIfNeeded("no"), ] let sections = [ @@ -673,8 +676,7 @@ package final class BuildDescriptionBuilder { // Pass the manifest data to the delegate. do { try delegate.recordManifest(targetDefinitions: targetDefinitions, toolDefinitions: toolDefinitions, nodeDefinitions: nodeDefinitions, commandDefinitions: commandDefinitions) - } - catch { + } catch { throw StubError.error("unable to record manifest to build description delegate: \(error)") } @@ -694,8 +696,7 @@ package final class BuildDescriptionBuilder { let buildDescription: BuildDescription do { buildDescription = try BuildDescription(inDir: path, signature: signature, taskStore: frozenTaskStore, allOutputPaths: allOutputPaths, rootPathsPerTarget: rootPathsPerTarget, moduleCachePathsPerTarget: moduleCachePathsPerTarget, artifactInfoPerTarget: artifactInfoPerTarget, casValidationInfos: casValidationInfos, settingsPerTarget: settingsPerTarget, taskActionMap: taskActionMap, targetTaskCounts: targetTaskCounts, moduleSessionFilePath: moduleSessionFilePath, diagnostics: diagnosticsEngines.mapValues { engine in engine.diagnostics }, fs: fs, invalidationPaths: invalidationPaths, recursiveSearchPathResults: recursiveSearchPathResults, copiedPathMap: copiedPathMap, targetDependencies: targetDependencies, definingTargetsByModuleName: definingTargetsByModuleName, bypassActualTasks: bypassActualTasks, targetsBuildInParallel: targetsBuildInParallel, emitFrontendCommandLines: emitFrontendCommandLines) - } - catch { + } catch { throw StubError.error("unable to create build description: \(error)") } @@ -703,8 +704,7 @@ package final class BuildDescriptionBuilder { do { try fs.createDirectory(buildDescription.manifestPath.dirname, recursive: true) try fs.write(buildDescription.manifestPath, contents: manifest.bytes, atomically: true) - } - catch { + } catch { throw StubError.error("unable to write manifest to '\(buildDescription.manifestPath.str)': \(error)") } @@ -739,7 +739,7 @@ package final class BuildDescriptionBuilder { } if let info = mutatingTasks[Ref(task)], !info.commandDependencies.isEmpty { // If this is a mutating command, we must rewrite out any mutated input nodes... - inputs = inputs.filter{ !mutatedNodes.contains(Ref($0)) } + inputs = inputs.filter { !mutatedNodes.contains(Ref($0)) } // ... and append all the necessary command dependencies. // @@ -767,7 +767,7 @@ package final class BuildDescriptionBuilder { // If this is a mutating command, we must rewrite out any mutated actual output nodes (downstream edges must be either themselves mutating, or depend on some other gate to introduce an ordering between them). if !info.commandDependencies.isEmpty { - outputs = outputs.filter{ !mutatedNodes.contains(Ref($0)) } + outputs = outputs.filter { !mutatedNodes.contains(Ref($0)) } // We currently require producers to have defined an extra virtual node to use for the purposes of forcing the ordering of this command. if outputs.isEmpty { @@ -791,11 +791,11 @@ package final class BuildDescriptionBuilder { commandDefinitions[identifier.rawValue] = definition.bytes // Use this map later to diagnose the attempts to define multiple producers for an output - taskOutputMap[Ref(task), default:[]].append(contentsOf: outputs) + taskOutputMap[Ref(task), default: []].append(contentsOf: outputs) // Update the global input and output list. - allInputs.formUnion(inputs.map{ Ref($0) }) - allOutputs.formUnion(outputs.map{ Ref($0) }) + allInputs.formUnion(inputs.map { Ref($0) }) + allOutputs.formUnion(outputs.map { Ref($0) }) } } @@ -809,14 +809,13 @@ package final class BuildDescriptionBuilder { // Add it to the definitions map. let identifier = node.identifier let newDefinition = definition.bytes - assert(!nodeDefinitions.contains(identifier) || nodeDefinitions[identifier] == newDefinition, "non-unique node identifier '\(identifier)'") - nodeDefinitions[identifier] = newDefinition - } + assert(!nodeDefinitions.contains(identifier) || nodeDefinitions[identifier] == newDefinition, "non-unique node identifier '\(identifier)'") + nodeDefinitions[identifier] = newDefinition + } } // MARK: Adding commands for PlannedTasks. - /// Add a phony command definition for a task. /// /// - parameter task: The task to associate with the command. @@ -1027,13 +1026,13 @@ extension BuildDescription { // FIXME: Bypass actual tasks should go away, eventually. // // FIXME: This layering isn't working well, we are plumbing a bunch of stuff through here just because we don't want to talk to TaskConstruction. - static package func construct(workspace: Workspace, tasks: [any PlannedTask], path: Path, signature: BuildDescriptionSignature, buildCommand: BuildCommand, diagnostics: [ConfiguredTarget?: [Diagnostic]] = [:], indexingInfo: [(forTarget: ConfiguredTarget?, path: Path, indexingInfo: any SourceFileIndexingInfo)] = [], fs: any FSProxy = localFS, bypassActualTasks: Bool = false, targetsBuildInParallel: Bool = true, emitFrontendCommandLines: Bool = false, moduleSessionFilePath: Path? = nil, invalidationPaths: [Path] = [], recursiveSearchPathResults: [RecursiveSearchPathResolver.CachedResult] = [], copiedPathMap: [String: String] = [:], rootPathsPerTarget: [ConfiguredTarget:[Path]] = [:], moduleCachePathsPerTarget: [ConfiguredTarget: [Path]] = [:], artifactInfoPerTarget: [ConfiguredTarget: ArtifactInfo] = [:], casValidationInfos: [BuildDescription.CASValidationInfo] = [], staleFileRemovalIdentifierPerTarget: [ConfiguredTarget?: String] = [:], settingsPerTarget: [ConfiguredTarget: Settings] = [:], delegate: any BuildDescriptionConstructionDelegate, targetDependencies: [TargetDependencyRelationship] = [], definingTargetsByModuleName: [String: OrderedSet], userPreferences: UserPreferences) async throws -> BuildDescription? { + static package func construct(workspace: Workspace, tasks: [any PlannedTask], path: Path, signature: BuildDescriptionSignature, buildCommand: BuildCommand, diagnostics: [ConfiguredTarget?: [Diagnostic]] = [:], indexingInfo: [(forTarget: ConfiguredTarget?, path: Path, indexingInfo: any SourceFileIndexingInfo)] = [], fs: any FSProxy = localFS, bypassActualTasks: Bool = false, targetsBuildInParallel: Bool = true, emitFrontendCommandLines: Bool = false, moduleSessionFilePath: Path? = nil, invalidationPaths: [Path] = [], recursiveSearchPathResults: [RecursiveSearchPathResolver.CachedResult] = [], copiedPathMap: [String: String] = [:], rootPathsPerTarget: [ConfiguredTarget: [Path]] = [:], moduleCachePathsPerTarget: [ConfiguredTarget: [Path]] = [:], artifactInfoPerTarget: [ConfiguredTarget: ArtifactInfo] = [:], casValidationInfos: [BuildDescription.CASValidationInfo] = [], staleFileRemovalIdentifierPerTarget: [ConfiguredTarget?: String] = [:], settingsPerTarget: [ConfiguredTarget: Settings] = [:], delegate: any BuildDescriptionConstructionDelegate, targetDependencies: [TargetDependencyRelationship] = [], definingTargetsByModuleName: [String: OrderedSet], userPreferences: UserPreferences) async throws -> BuildDescription? { var diagnostics = diagnostics // We operate on the sorted tasks here to ensure that the list of task additional inputs is deterministic. // // We need to sort on the stable identifiers in order to ensure the uniqueness of the sort. - let sortedTasks = tasks.sorted{ $0.identifier < $1.identifier } + let sortedTasks = tasks.sorted { $0.identifier < $1.identifier } let messageShortening = userPreferences.activityTextShorteningLevel @@ -1127,7 +1126,7 @@ extension BuildDescription { if delegate.cancelled { return nil } // FIXME: Make this more efficient. - let mutatedNodes = Set>(task.inputs.map{ Ref($0) }).intersection(Set>(task.outputs.map{ Ref($0) })) + let mutatedNodes = Set>(task.inputs.map { Ref($0) }).intersection(Set>(task.outputs.map { Ref($0) })) for node in mutatedNodes { let info = mutableNodes.getOrInsert(node) { MutableNodeInfo() } info.mutatingTasks.append(task) @@ -1171,10 +1170,13 @@ extension BuildDescription { let childDiagnostics = creators.map({ .task($0.execTask) }).richFormattedRuleInfo(workspace: workspace) diagnostics[nil, default: []].append( - Diagnostic(behavior: .error, - location: .unknown, - data: DiagnosticData("Multiple commands produce '\(node.instance.path.str)'"), - childDiagnostics: childDiagnostics)) + Diagnostic( + behavior: .error, + location: .unknown, + data: DiagnosticData("Multiple commands produce '\(node.instance.path.str)'"), + childDiagnostics: childDiagnostics + ) + ) } } @@ -1196,24 +1198,28 @@ extension BuildDescription { // This works because we enforce that every mutating command *must* be strongly ordered w.r.t. the creator and the other mutators, so that we know the order they should run in. func distance(from origin: any PlannedTask, to predecessor: any PlannedTask) -> Int? { let ignoring = node - return minimumDistance(from: Ref(origin), to: Ref(predecessor), successors: { taskRef in + return minimumDistance( + from: Ref(origin), + to: Ref(predecessor), + successors: { taskRef in let task = taskRef.instance var inputNodes = task.inputs if let extraInputs = taskAdditionalInputs[Ref(task)] { inputNodes += extraInputs.nodes } - let inputs = inputNodes.flatMap { input -> [Ref] in + let inputs = inputNodes.flatMap { input -> [Ref] in if input === ignoring { return [] } - return producers[Ref(input)]?.map{ Ref($0) } ?? [] + return producers[Ref(input)]?.map { Ref($0) } ?? [] } return inputs - }) + } + ) } let orderedMutatingTasks = info.mutatingTasks.sorted(by: { - // A task precedes another iff there is exists some path to it. - return distance(from: $1, to: $0) != nil - }) + // A task precedes another iff there is exists some path to it. + return distance(from: $1, to: $0) != nil + }) // Starting with the creator, create a command trigger for the next command in the mutating task chain. var producer = creator @@ -1253,9 +1259,16 @@ extension BuildDescription { var path = !isDirectory ? inputPath.dirname : inputPath while !path.isEmpty && !path.isRoot { if let tasks = paths[path] { - diagnostics[nil, default: []].append(Diagnostic(behavior: .error, location: .unknown, data: DiagnosticData("Multiple commands produce conflicting outputs"), childDiagnostics: ([(pathString(inputPath, isDirectory: isDirectory), inputTask)] + tasks.map { task in (pathString(path, isDirectory: true), task) }).map { (path, task) in - Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("\(path) (for task: \(task.ruleInfo))")) - })) + diagnostics[nil, default: []].append( + Diagnostic( + behavior: .error, + location: .unknown, + data: DiagnosticData("Multiple commands produce conflicting outputs"), + childDiagnostics: ([(pathString(inputPath, isDirectory: isDirectory), inputTask)] + tasks.map { task in (pathString(path, isDirectory: true), task) }).map { (path, task) in + Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("\(path) (for task: \(task.ruleInfo))")) + } + ) + ) } path = path.dirname } @@ -1269,7 +1282,8 @@ extension BuildDescription { var ownedDirectories = DirectoryOutputs() // Create a list of tuples of all output nodes in the graph, and their creator tasks, sorted by depth (which is the same as lexicographic order in this case). The list may contain multiple entries for the same output path, if multiple tasks in the graph (erroneously) produce the same output. Assumes paths are normalized. - let outputNodesAndTasks = sortedTasks + let outputNodesAndTasks = + sortedTasks .flatMap { task in task is GateTask ? [] : task.outputs.map { output in (output, task) } } .sorted(by: { $0.0.path < $1.0.path }) @@ -1365,15 +1379,15 @@ extension BuildDescription { try await group.waitForAll() } - } catch is CancellationError { + } catch is CancellationError { return nil } } // Diagnose attempts to define multiple producers (tasks) for an output. - var outputsSet = Set>() // for identifying duplicate output nodes across tasks + var outputsSet = Set>() // for identifying duplicate output nodes across tasks for (_, task) in sortedTasks.enumerated() { - let amendedOutputs = builder.taskOutputMap[Ref(task)] ?? [] // get the amended outputs of the task + let amendedOutputs = builder.taskOutputMap[Ref(task)] ?? [] // get the amended outputs of the task for output in amendedOutputs { if outputsSet.contains(Ref(output)) { // This condition should almost never appear on a user projects, but we surface it as an error versus an assert in case there are valid situations where the user can author a project that would hit it. @@ -1498,7 +1512,7 @@ package extension PlannedNode { } extension BuildDescription.CASValidationInfo: Serializable { - package func serialize(to serializer: T) where T : Serializer { + package func serialize(to serializer: T) where T: Serializer { serializer.serializeAggregate(2) { serializer.serialize(options) serializer.serialize(llvmCasExec) @@ -1520,7 +1534,7 @@ extension BuildDescription.CASValidationInfo: Hashable { hasher.combine(options.casPath) hasher.combine(llvmCasExec) } - static package func ==(lhs: Self, rhs: Self) -> Bool { + static package func == (lhs: Self, rhs: Self) -> Bool { return lhs.options.casPath == rhs.options.casPath && lhs.llvmCasExec == rhs.llvmCasExec } } diff --git a/Sources/SWBTaskExecution/BuildDescriptionManager.swift b/Sources/SWBTaskExecution/BuildDescriptionManager.swift index e119cd92..525ee9eb 100644 --- a/Sources/SWBTaskExecution/BuildDescriptionManager.swift +++ b/Sources/SWBTaskExecution/BuildDescriptionManager.swift @@ -70,12 +70,18 @@ package enum BuildDescriptionMemoryCacheEvictionPolicy: Sendable, Hashable { /// /// It is intended to manage the construction of descriptions for incoming build requests (or other operations requiring a complete build description), and to work with the build descriptions to efficiently cache the results. package final class BuildDescriptionManager: Sendable { - static let descriptionsRequested = Statistic("BuildDescriptionManager.descriptionRequests", - "The number of build descriptions which were requested.") - static let descriptionsComputed = Statistic("BuildDescriptionManager.descriptionsComputed", - "The number of build descriptions which were computed.") - static let descriptionsLoaded = Statistic("BuildDescriptionManager.descriptionsLoaded", - "The number of build descriptions which were loaded from disk.") + static let descriptionsRequested = Statistic( + "BuildDescriptionManager.descriptionRequests", + "The number of build descriptions which were requested." + ) + static let descriptionsComputed = Statistic( + "BuildDescriptionManager.descriptionsComputed", + "The number of build descriptions which were computed." + ) + static let descriptionsLoaded = Statistic( + "BuildDescriptionManager.descriptionsLoaded", + "The number of build descriptions which were loaded from disk." + ) /// The queue used to serialize access to the on-disk description cache. /// Right now this is only used to write the serialized cached descriptions to disk on a background thread and to remove them from disk, but not to read them or to access the index. In order for this to be a problem, this description would need to be evicted from the in-memory cache and a new request for this description would need to come in before the description has been written. A further refinement of this could involve ensuring that the in-memory copy never gets evicted until the on-disk copy has been written, or providing more sophisticated read/white synchronization of the individual on-disk cache items. @@ -109,26 +115,32 @@ package final class BuildDescriptionManager: Sendable { package init(fs: any FSProxy, buildDescriptionMemoryCacheEvictionPolicy: BuildDescriptionMemoryCacheEvictionPolicy, maxCacheSize: (inMemory: Int, onDisk: Int) = (4, 4)) { self.fs = fs self.inMemoryCachedBuildDescriptions = withHeavyCacheGlobalState(isolated: buildDescriptionMemoryCacheEvictionPolicy == .never) { - HeavyCache(maximumSize: maxCacheSize.inMemory, evictionPolicy: { - switch buildDescriptionMemoryCacheEvictionPolicy { - case .never: - .never - case .default(let totalCostLimit): - .default(totalCostLimit: totalCostLimit, willEvictCallback: { buildDescription in - // Capture the path to a local variable so that the buildDescription instance isn't retained by OSLog's autoclosure message parameter. - let packagePath = buildDescription.packagePath - #if canImport(os) - OSLog.log("Evicted cached build description at '\(packagePath.str)'") - #endif - }) - } - }()) + HeavyCache( + maximumSize: maxCacheSize.inMemory, + evictionPolicy: { + switch buildDescriptionMemoryCacheEvictionPolicy { + case .never: + .never + case .default(let totalCostLimit): + .default( + totalCostLimit: totalCostLimit, + willEvictCallback: { buildDescription in + // Capture the path to a local variable so that the buildDescription instance isn't retained by OSLog's autoclosure message parameter. + let packagePath = buildDescription.packagePath + #if canImport(os) + OSLog.log("Evicted cached build description at '\(packagePath.str)'") + #endif + } + ) + } + }() + ) } self.maxCacheSize = maxCacheSize } package func waitForBuildDescriptionSerialization() async { - await onDiskCacheAccessQueue.sync { } + await onDiskCacheAccessQueue.sync {} } /// Construct the appropriate build plan for a plan request. @@ -140,7 +152,6 @@ package final class BuildDescriptionManager: Sendable { return await BuildPlan(planRequest: planRequest, taskPlanningDelegate: BuildSystemTaskPlanningDelegate(buildDescriptionPath: descriptionPath, clientDelegate, constructionDelegate: constructionDelegate, qos: planRequest.buildRequest.qos, fileSystem: localFS)) } - /// Construct the build description to use for a particular workspace and request. /// /// NOTE: This is primarily accessible for performance testing purposes, actual clients should prefer to access via the cached methods. @@ -180,8 +191,8 @@ package final class BuildDescriptionManager: Sendable { // Compute the default configuration name, platform and root paths per target var staleFileRemovalIdentifierPerTarget = [ConfiguredTarget?: String]() - var settingsPerTarget = [ConfiguredTarget:Settings]() - var rootPathsPerTarget = [ConfiguredTarget:[Path]]() + var settingsPerTarget = [ConfiguredTarget: Settings]() + var rootPathsPerTarget = [ConfiguredTarget: [Path]]() var moduleCachePathsPerTarget = [ConfiguredTarget: [Path]]() var artifactInfoPerTarget = [ConfiguredTarget: ArtifactInfo]() @@ -312,7 +323,7 @@ package final class BuildDescriptionManager: Sendable { func signature(cacheDir: Path) throws -> BuildDescriptionSignature { switch self { case .newOrCached(let planRequest, _, _, _): return try BuildDescriptionSignature.buildDescriptionSignature(planRequest, cacheDir: cacheDir) - case .cachedOnly(let buildDescriptionID, _, _, _, _): return BuildDescriptionSignature.buildDescriptionSignature(buildDescriptionID) + case .cachedOnly(let buildDescriptionID, _, _, _, _): return BuildDescriptionSignature.buildDescriptionSignature(buildDescriptionID) } } } @@ -422,16 +433,20 @@ package final class BuildDescriptionManager: Sendable { } package func releaseBuildDescription(id: BuildDescriptionID) { - self.retainedBuildDescriptions.update(BuildDescriptionSignature.buildDescriptionSignature(id), update: { - let newCount = $0.1 - 1 - if newCount == 0 { - return nil - } else { - return ($0.0, newCount) + self.retainedBuildDescriptions.update( + BuildDescriptionSignature.buildDescriptionSignature(id), + update: { + let newCount = $0.1 - 1 + if newCount == 0 { + return nil + } else { + return ($0.0, newCount) + } + }, + default: { + nil } - }, default: { - nil - }) + ) } /// Returns the path in which the`XCBuildData` directory will live. That location is uses to cache build descriptions for a particular workspace and request, the manifest, and the `build.db` database for llbuild. @@ -490,7 +505,6 @@ package final class BuildDescriptionManager: Sendable { let userPreferences = request.workspaceContext.userPreferences let messageShortening = userPreferences.activityTextShorteningLevel - if messageShortening != .full || userPreferences.enableDebugActivityLogs { constructionDelegate.updateProgress(statusMessage: "Attempting to load build description from disk", showInLog: request.workspaceContext.userPreferences.enableDebugActivityLogs) } @@ -512,15 +526,23 @@ package final class BuildDescriptionManager: Sendable { // Output the difference in signatures for debugging if we already had a build plan if request.workspaceContext.userPreferences.enableDebugActivityLogs, - !request.isForIndex || request.isIndexWorkspaceDescription { + !request.isForIndex || request.isIndexWorkspaceDescription + { let lastBuildPlanRequest = request.isForIndex ? lastIndexBuildPlanRequest.withLock({ $0 }) : lastBuildPlanRequest.withLock({ $0 }) if let planRequest = request.planRequest, let lastBuildPlanRequest = lastBuildPlanRequest { do { if let diff = try BuildDescriptionSignature.compareBuildDescriptionSignatures(planRequest, lastBuildPlanRequest, onDiskPath) { - constructionDelegate.emit(Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("New build description required because the signature changed"), childDiagnostics: [ - Diagnostic(behavior: .note, location: .path(diff.previousSignaturePath), data: DiagnosticData("Previous signature: \(diff.previousSignaturePath.str)")), - Diagnostic(behavior: .note, location: .path(diff.currentSignaturePath), data: DiagnosticData("Current signature: \(diff.currentSignaturePath.str)")), - ])) + constructionDelegate.emit( + Diagnostic( + behavior: .note, + location: .unknown, + data: DiagnosticData("New build description required because the signature changed"), + childDiagnostics: [ + Diagnostic(behavior: .note, location: .path(diff.previousSignaturePath), data: DiagnosticData("Previous signature: \(diff.previousSignaturePath.str)")), + Diagnostic(behavior: .note, location: .path(diff.currentSignaturePath), data: DiagnosticData("Current signature: \(diff.currentSignaturePath.str)")), + ] + ) + ) } } catch { constructionDelegate.emit(Diagnostic(behavior: .error, location: .unknown, data: DiagnosticData("\(error)"))) @@ -600,8 +622,7 @@ package final class BuildDescriptionManager: Sendable { if let buildRequestJSON = request.buildRequest.jsonRepresentation { try self.fs.write(buildDescription.buildRequestPath, contents: ByteString(buildRequestJSON)) } - } - catch { + } catch { // Ignore errors - the failure case is that the description will be recreated for a later build if it's not still in memory. This is a performance hit, but should only occur in strange cases (e.g., the cache directory is not writable, the disk is full, etc.) and is not (I hope) worth nagging the user about. } } @@ -665,10 +686,8 @@ package final class BuildDescriptionManager: Sendable { } } - // MARK: - /// The delegate for planning BuildSystem compatible tasks. private final class BuildSystemTaskPlanningDelegate: TaskPlanningDelegate { private let diagnosticsEngines = LockedValue<[ConfiguredTarget?: DiagnosticsEngine]>([:]) diff --git a/Sources/SWBTaskExecution/BuildDescriptionSignature.swift b/Sources/SWBTaskExecution/BuildDescriptionSignature.swift index c711afae..b9f9147f 100644 --- a/Sources/SWBTaskExecution/BuildDescriptionSignature.swift +++ b/Sources/SWBTaskExecution/BuildDescriptionSignature.swift @@ -85,12 +85,14 @@ package struct BuildDescriptionSignatureComponents: Codable, Hashable, Sendable buildParameters: $0.parameters, provisioningInputs: request.provisioningInputs(for: $0), macroConfigSignature: request.buildRequestContext.getCachedSettings($0.parameters, target: $0.target).macroConfigSignature, - specializeGuidForActiveRunDestination: $0.specializeGuidForActiveRunDestination) + specializeGuidForActiveRunDestination: $0.specializeGuidForActiveRunDestination + ) } projects = request.workspaceContext.workspace.projects.map { ProjectMetadata( name: $0.name, - macroConfigSignature: request.buildRequestContext.getCachedSettings(request.buildRequest.parameters, project: $0).macroConfigSignature) + macroConfigSignature: request.buildRequestContext.getCachedSettings(request.buildRequest.parameters, project: $0).macroConfigSignature + ) } systemInfo = request.workspaceContext.systemInfo userInfo = request.workspaceContext.userInfo diff --git a/Sources/SWBTaskExecution/BuiltinTaskActionsExtension.swift b/Sources/SWBTaskExecution/BuiltinTaskActionsExtension.swift index 299a2962..6f589277 100644 --- a/Sources/SWBTaskExecution/BuiltinTaskActionsExtension.swift +++ b/Sources/SWBTaskExecution/BuiltinTaskActionsExtension.swift @@ -16,7 +16,7 @@ public struct BuiltinTaskActionsExtension: TaskActionExtension { public init() { } - public var taskActionImplementations: [SerializableTypeCode : any PolymorphicSerializable.Type] { + public var taskActionImplementations: [SerializableTypeCode: any PolymorphicSerializable.Type] { [ 1: AuxiliaryFileTaskAction.self, 2: CopyPlistTaskAction.self, diff --git a/Sources/SWBTaskExecution/DynamicTaskSpecs/ClangCachingOutputMaterializerDynamicTaskSpec.swift b/Sources/SWBTaskExecution/DynamicTaskSpecs/ClangCachingOutputMaterializerDynamicTaskSpec.swift index d5e22064..f1c93242 100644 --- a/Sources/SWBTaskExecution/DynamicTaskSpecs/ClangCachingOutputMaterializerDynamicTaskSpec.swift +++ b/Sources/SWBTaskExecution/DynamicTaskSpecs/ClangCachingOutputMaterializerDynamicTaskSpec.swift @@ -31,7 +31,7 @@ public struct ClangCachingOutputMaterializerTaskKey: Serializable, CustomDebugSt self.outputName = outputName } - public func serialize(to serializer: T) where T : Serializer { + public func serialize(to serializer: T) where T: Serializer { serializer.serializeAggregate(4) { serializer.serialize(libclangPath) serializer.serialize(casOptions) diff --git a/Sources/SWBTaskExecution/DynamicTaskSpecs/ClangCachingTaskCacheKey.swift b/Sources/SWBTaskExecution/DynamicTaskSpecs/ClangCachingTaskCacheKey.swift index d128d5d9..beed2b9c 100644 --- a/Sources/SWBTaskExecution/DynamicTaskSpecs/ClangCachingTaskCacheKey.swift +++ b/Sources/SWBTaskExecution/DynamicTaskSpecs/ClangCachingTaskCacheKey.swift @@ -24,7 +24,7 @@ public struct ClangCachingTaskCacheKey: Serializable, CustomDebugStringConvertib self.cacheKey = cacheKey } - public func serialize(to serializer: T) where T : Serializer { + public func serialize(to serializer: T) where T: Serializer { serializer.serializeAggregate(3) { serializer.serialize(libclangPath) serializer.serialize(casOptions) diff --git a/Sources/SWBTaskExecution/DynamicTaskSpecs/ClangModuleDependencyGraph.swift b/Sources/SWBTaskExecution/DynamicTaskSpecs/ClangModuleDependencyGraph.swift index 2f787667..13b31bcf 100644 --- a/Sources/SWBTaskExecution/DynamicTaskSpecs/ClangModuleDependencyGraph.swift +++ b/Sources/SWBTaskExecution/DynamicTaskSpecs/ClangModuleDependencyGraph.swift @@ -33,7 +33,7 @@ package final class ClangModuleDependencyGraph { /// Dependency info for an explicitly-built module. case module(pcmOutputPath: Path) - package func serialize(to serializer: T) where T : SWBUtil.Serializer { + package func serialize(to serializer: T) where T: SWBUtil.Serializer { serializer.serializeAggregate(2) { switch self { case .command: @@ -88,7 +88,7 @@ package final class ClangModuleDependencyGraph { self.arguments = arguments } - package func serialize(to serializer: T) where T : Serializer { + package func serialize(to serializer: T) where T: Serializer { serializer.serializeAggregate(2) { serializer.serialize(cacheKey) serializer.serialize(arguments) @@ -164,7 +164,7 @@ package final class ClangModuleDependencyGraph { self.usesSerializedDiagnostics = usesSerializedDiagnostics } - package func serialize(to serializer: T) where T : Serializer { + package func serialize(to serializer: T) where T: Serializer { serializer.serializeAggregate(10) { serializer.serialize(kind) serializer.serialize(files) @@ -206,7 +206,7 @@ package final class ClangModuleDependencyGraph { private var scannerRegistry: [LibclangRegistryKey: LibclangWithScanner] = [:] package func waitForCompletion() async { - await registryQueue.sync(flags: .barrier) { } + await registryQueue.sync(flags: .barrier) {} } private struct LibclangWithScanner { @@ -316,7 +316,8 @@ package final class ClangModuleDependencyGraph { ) throws -> ScanResult { let clangWithScanner = try libclangWithScanner(forPath: libclangPath, casOptions: casOptions, cacheFallbackIfNotAvailable: cacheFallbackIfNotAvailable, core: core) - let (compilerLauncher, compiler, originalFileArgs) = usesCompilerLauncher + let (compilerLauncher, compiler, originalFileArgs) = + usesCompilerLauncher ? (fileCommandLine[0], fileCommandLine[1], fileCommandLine[2...]) : (nil, fileCommandLine[0], fileCommandLine[1...]) @@ -370,7 +371,7 @@ package final class ClangModuleDependencyGraph { return depModules } - let nameToIdx: [String: Int] = depModules.map{ $0.name + ":" + $0.context_hash }.enumerated().reduce(into: [:]) { (dict, indexAndKey) in + let nameToIdx: [String: Int] = depModules.map { $0.name + ":" + $0.context_hash }.enumerated().reduce(into: [:]) { (dict, indexAndKey) in let (index, key) = indexAndKey dict[key] = index } @@ -444,7 +445,8 @@ package final class ClangModuleDependencyGraph { scanningCommandLine: scanningCommandLine, transitiveIncludeTreeIDs: transitiveIncludeTreeIDs, transitiveCompileCommandCacheKeys: transitiveCommandCacheKeys, - usesSerializedDiagnostics: usesSerializedDiagnostics) + usesSerializedDiagnostics: usesSerializedDiagnostics + ) if reportRequiredTargetDependencies != .no, let targetDependencies = definingTargetsByModuleName[module.name] { requiredTargetDependencies.withLock { for targetDependency in targetDependencies { @@ -526,7 +528,8 @@ package final class ClangModuleDependencyGraph { scanningCommandLine: scanningCommandLine, transitiveIncludeTreeIDs: transitiveIncludeTreeIDs, transitiveCompileCommandCacheKeys: transitiveCommandCacheKeys, - usesSerializedDiagnostics: usesSerializedDiagnostics) + usesSerializedDiagnostics: usesSerializedDiagnostics + ) try recordedDependencyInfoRegistry.getOrInsert(scanningOutputPath, isValid: { _ in true }) { try register(path: scanningOutputPath, dependencyInfo: dependencyInfo, fileSystem: fileSystem) } @@ -571,8 +574,12 @@ package final class ClangModuleDependencyGraph { } } - package func generateReproducer(forFailedDependency dependency: DependencyInfo, - libclangPath: Path, casOptions: CASOptions?, location: String?) throws -> String? { + package func generateReproducer( + forFailedDependency dependency: DependencyInfo, + libclangPath: Path, + casOptions: CASOptions?, + location: String? + ) throws -> String? { let clangWithScanner = try libclangWithScanner( forPath: libclangPath, casOptions: casOptions, @@ -583,7 +590,10 @@ package final class ClangModuleDependencyGraph { return nil } return try clangWithScanner.scanner.generateReproducer( - commandLine: dependency.scanningCommandLine, workingDirectory: dependency.workingDirectory.str, location: location) + commandLine: dependency.scanningCommandLine, + workingDirectory: dependency.workingDirectory.str, + location: location + ) } package var isEmpty: Bool { @@ -621,19 +631,21 @@ package final class ClangModuleDependencyGraph { summaryCSV.writeRow([moduleName, "\(variants.count)"]) summaryMessage += "\(moduleName): \(variants.count == 1 ? "1 variant" : "\(variants.count) variants")\n" - let mergeResult = nWayMerge(variants.map { - (["CWD: \($0.workingDirectory.str)"] + ($0.commands.only?.arguments ?? [])).filter { - if ["pcm", "dia", "d"].contains(Path($0).fileExtension) { - // Filter differences in module paths, they are a function of the other args - return false - } else if $0.hasPrefix("llvmcas://") { - // Filter differences in CAS URLs, they are a function of the other args - return false - } else { - return true + let mergeResult = nWayMerge( + variants.map { + (["CWD: \($0.workingDirectory.str)"] + ($0.commands.only?.arguments ?? [])).filter { + if ["pcm", "dia", "d"].contains(Path($0).fileExtension) { + // Filter differences in module paths, they are a function of the other args + return false + } else if $0.hasPrefix("llvmcas://") { + // Filter differences in CAS URLs, they are a function of the other args + return false + } else { + return true + } } } - }).filter { + ).filter { if $0.elementOf.count == variants.count { // Don't report args common to all variants return false diff --git a/Sources/SWBTaskExecution/DynamicTaskSpecs/CompilationCachingDataPruner.swift b/Sources/SWBTaskExecution/DynamicTaskSpecs/CompilationCachingDataPruner.swift index 6b9a2949..6b6ba197 100644 --- a/Sources/SWBTaskExecution/DynamicTaskSpecs/CompilationCachingDataPruner.swift +++ b/Sources/SWBTaskExecution/DynamicTaskSpecs/CompilationCachingDataPruner.swift @@ -25,7 +25,7 @@ package struct ClangCachingPruneDataTaskKey: Hashable, Serializable, CustomDebug self.casOptions = casOptions } - package func serialize(to serializer: T) where T : Serializer { + package func serialize(to serializer: T) where T: Serializer { serializer.serializeAggregate(2) { serializer.serialize(path) serializer.serialize(casOptions) @@ -81,11 +81,11 @@ package final class CompilationCachingDataPruner: Sendable { ) { let casOpts = key.casOptions guard casOpts.limitingStrategy != .discarded else { - return // No need to prune, CAS directory is getting deleted. + return // No need to prune, CAS directory is getting deleted. } let inserted = state.withLock { $0.prunedCASes.insert(key).inserted } guard inserted else { - return // already pruned + return // already pruned } startedAction() @@ -108,8 +108,8 @@ package final class CompilationCachingDataPruner: Sendable { executionDescription: "Clang caching pruning \(casPath) using \(libclangPath)", signature: signature, target: nil, - parentActivity: nil) - { activityID in + parentActivity: nil + ) { activityID in let status: BuildOperationTaskEnded.Status do { let dbSize = try ByteCount(casDBs.getOndiskSize()) @@ -150,11 +150,11 @@ package final class CompilationCachingDataPruner: Sendable { ) { let casOpts = key.casOptions guard casOpts.limitingStrategy != .discarded else { - return // No need to prune, CAS directory is getting deleted. + return // No need to prune, CAS directory is getting deleted. } let inserted = state.withLock { $0.prunedCASes.insert(key).inserted } guard inserted else { - return // already pruned + return // already pruned } startedAction() @@ -177,8 +177,8 @@ package final class CompilationCachingDataPruner: Sendable { executionDescription: "Swift caching pruning \(casPath) using \(swiftscanPath)", signature: signature, target: nil, - parentActivity: nil) - { activityID in + parentActivity: nil + ) { activityID in let status: BuildOperationTaskEnded.Status do { let dbSize = try ByteCount(casDBs.getStorageSize()) @@ -219,11 +219,11 @@ package final class CompilationCachingDataPruner: Sendable { ) { let casOpts = key.casOptions guard casOpts.limitingStrategy != .discarded else { - return // No need to prune, CAS directory is getting deleted. + return // No need to prune, CAS directory is getting deleted. } let inserted = state.withLock { $0.prunedCASes.insert(key).inserted } guard inserted else { - return // already pruned + return // already pruned } startedAction() @@ -246,8 +246,8 @@ package final class CompilationCachingDataPruner: Sendable { executionDescription: "Pruning \(casPath) using \(path)", signature: signature, target: nil, - parentActivity: nil) - { activityID in + parentActivity: nil + ) { activityID in let status: BuildOperationTaskEnded.Status do { let dbSize = try? ByteCount(toolchainCAS.getOnDiskSize()) diff --git a/Sources/SWBTaskExecution/DynamicTaskSpecs/CompilationCachingUploader.swift b/Sources/SWBTaskExecution/DynamicTaskSpecs/CompilationCachingUploader.swift index b159d23c..82e19ea6 100644 --- a/Sources/SWBTaskExecution/DynamicTaskSpecs/CompilationCachingUploader.swift +++ b/Sources/SWBTaskExecution/DynamicTaskSpecs/CompilationCachingUploader.swift @@ -16,7 +16,7 @@ import SWBUtil import Foundation #if canImport(os) -import os + import os #endif /// Manages uploading compilation caching outputs in the background, when a remote cache is enabled. @@ -63,7 +63,7 @@ package final class CompilationCachingUploader { ) { let inserted = lock.withLock { uploadedKeys.insert(cacheKey).inserted } guard inserted else { - return // already uploaded + return // already uploaded } startedUpload() @@ -123,7 +123,7 @@ package final class CompilationCachingUploader { ) { let inserted = lock.withLock { uploadedKeys.insert(cacheKey).inserted } guard inserted else { - return // already uploaded + return // already uploaded } startedUpload() @@ -144,9 +144,11 @@ package final class CompilationCachingUploader { if enableDiagnosticRemarks { for output in try swiftCompilation.getOutputs() { activityReporter.emit( - diagnostic: Diagnostic(behavior: .note, - location: .unknown, - data: DiagnosticData("uploaded CAS output \(output.kindName): \(output.casID)")), + diagnostic: Diagnostic( + behavior: .note, + location: .unknown, + data: DiagnosticData("uploaded CAS output \(output.kindName): \(output.casID)") + ), for: activityID, signature: signature ) diff --git a/Sources/SWBTaskExecution/DynamicTaskSpecs/DynamicTaskOperationContext.swift b/Sources/SWBTaskExecution/DynamicTaskSpecs/DynamicTaskOperationContext.swift index c9545df7..fc2a56b9 100644 --- a/Sources/SWBTaskExecution/DynamicTaskSpecs/DynamicTaskOperationContext.swift +++ b/Sources/SWBTaskExecution/DynamicTaskSpecs/DynamicTaskOperationContext.swift @@ -54,7 +54,7 @@ public final class DynamicTaskOperationContext { /// Opaque "token" used to enforce that ``DynamicTaskOperationContext/waitForCompletion()`` is always called immediately prior to invoking ``DynamicTaskOperationContext/reset(completionToken:)``. package struct DynamicTaskOperationContextCompletionToken: Sendable { - fileprivate init() { } + fileprivate init() {} consuming fileprivate func run(body: () -> Void) { body() } diff --git a/Sources/SWBTaskExecution/DynamicTaskSpecs/PrecompileClangModuleDynamicTaskSpec.swift b/Sources/SWBTaskExecution/DynamicTaskSpecs/PrecompileClangModuleDynamicTaskSpec.swift index 6bc16e1f..e0ab80aa 100644 --- a/Sources/SWBTaskExecution/DynamicTaskSpecs/PrecompileClangModuleDynamicTaskSpec.swift +++ b/Sources/SWBTaskExecution/DynamicTaskSpecs/PrecompileClangModuleDynamicTaskSpec.swift @@ -91,7 +91,7 @@ final class PrecompileClangModuleDynamicTaskSpec: DynamicTaskSpec { self.fileNameMapPath = fileNameMapPath } - func serialize(to serializer: T) where T : SWBUtil.Serializer { + func serialize(to serializer: T) where T: SWBUtil.Serializer { serializer.serializeAggregate(3) { serializer.serialize(serializedDiagnosticsPath) serializer.serialize(fileNameMapPath) diff --git a/Sources/SWBTaskExecution/DynamicTaskSpecs/SwiftCachingTaskKeys.swift b/Sources/SWBTaskExecution/DynamicTaskSpecs/SwiftCachingTaskKeys.swift index dd7c4981..089ee8b8 100644 --- a/Sources/SWBTaskExecution/DynamicTaskSpecs/SwiftCachingTaskKeys.swift +++ b/Sources/SWBTaskExecution/DynamicTaskSpecs/SwiftCachingTaskKeys.swift @@ -24,7 +24,7 @@ public struct SwiftCachingKeyQueryTaskKey: Serializable, CustomDebugStringConver self.compilerLocation = compilerLocation } - public func serialize(to serializer: T) where T : Serializer { + public func serialize(to serializer: T) where T: Serializer { serializer.serializeAggregate(3) { serializer.serialize(casOptions) serializer.serialize(cacheKeys) @@ -62,7 +62,7 @@ public struct SwiftCachingOutputMaterializerTaskKey: Serializable, CustomDebugSt self.compilerLocation = compilerLocation } - public func serialize(to serializer: T) where T : Serializer { + public func serialize(to serializer: T) where T: Serializer { serializer.serializeAggregate(4) { serializer.serialize(casOptions) serializer.serialize(casID) diff --git a/Sources/SWBTaskExecution/DynamicTaskSpecs/SwiftDriverJobDynamicTaskSpec.swift b/Sources/SWBTaskExecution/DynamicTaskSpecs/SwiftDriverJobDynamicTaskSpec.swift index 127e3047..035662a7 100644 --- a/Sources/SWBTaskExecution/DynamicTaskSpecs/SwiftDriverJobDynamicTaskSpec.swift +++ b/Sources/SWBTaskExecution/DynamicTaskSpecs/SwiftDriverJobDynamicTaskSpec.swift @@ -35,7 +35,7 @@ public struct SwiftDriverJobTaskKey: Serializable, CustomDebugStringConvertible self.casOptions = casOptions } - public func serialize(to serializer: T) where T : Serializer { + public func serialize(to serializer: T) where T: Serializer { serializer.serializeAggregate(8) { serializer.serialize(identifier) serializer.serialize(variant) @@ -80,7 +80,7 @@ public struct SwiftDriverExplicitDependencyJobTaskKey: Serializable, CustomDebug self.casOptions = casOptions } - public func serialize(to serializer: T) where T : Serializer { + public func serialize(to serializer: T) where T: Serializer { serializer.serializeAggregate(5) { serializer.serialize(arch) serializer.serialize(driverJobKey) @@ -125,7 +125,7 @@ struct SwiftDriverJobDynamicTaskPayload: TaskPayload { self.casOptions = try deserializer.deserialize() } - func serialize(to serializer: T) where T : Serializer { + func serialize(to serializer: T) where T: Serializer { serializer.serializeAggregate(4) { serializer.serialize(expectedOutputs) serializer.serialize(isUsingWholeModuleOptimization) @@ -139,7 +139,7 @@ final class SwiftDriverJobDynamicTaskSpec: DynamicTaskSpec { func buildExecutableTask(dynamicTask: DynamicTask, context: DynamicTaskOperationContext) throws -> any ExecutableTask { let commandLinePrefix: [ByteString] = [ "builtin-swiftTaskExecution", - "--" + "--", ] var commandLine: [ByteString] let expectedOutputs: [Path] @@ -150,59 +150,60 @@ final class SwiftDriverJobDynamicTaskSpec: DynamicTaskSpec { let compilerLocation: LibSwiftDriver.CompilerLocation let casOpts: CASOptions? switch dynamicTask.taskKey { - case .swiftDriverJob(let key): - guard let job = try context.swiftModuleDependencyGraph.queryPlannedBuild(for: key.identifier).plannedTargetJob(for: key.driverJobKey)?.driverJob else { - throw StubError.error("Failed to lookup Swift driver job \(key.driverJobKey) in build plan \(key.identifier)") - } - commandLine = commandLinePrefix + job.commandLine - expectedOutputs = job.outputs - ruleInfo = ["Swift\(job.ruleInfoType)", key.variant, key.arch, job.descriptionForLifecycle] + job.displayInputs.map(\.str) - forTarget = dynamicTask.target - descriptionForLifecycle = job.descriptionForLifecycle - isUsingWholeModuleOptimization = key.isUsingWholeModuleOptimization - compilerLocation = key.compilerLocation - casOpts = key.casOptions - case .swiftDriverExplicitDependencyJob(let key): - guard let job = context.swiftModuleDependencyGraph.plannedExplicitDependencyBuildJob(for: key.driverJobKey)?.driverJob else { - throw StubError.error("Failed to lookup explicit modules Swift driver job \(key.driverJobKey)") - } - commandLine = commandLinePrefix + job.commandLine - expectedOutputs = job.outputs - assert(expectedOutputs.count > 0, "Explicit modules job was expected to have at least one primary output") - ruleInfo = ["SwiftExplicitDependency\(job.ruleInfoType)", key.arch, expectedOutputs.first?.str ?? ""] - forTarget = nil - descriptionForLifecycle = job.descriptionForLifecycle - // WMO doesn't apply to explicit module builds - isUsingWholeModuleOptimization = false - compilerLocation = key.compilerLocation - casOpts = key.casOptions - default: - fatalError("Unexpected dynamic task: \(dynamicTask)") + case .swiftDriverJob(let key): + guard let job = try context.swiftModuleDependencyGraph.queryPlannedBuild(for: key.identifier).plannedTargetJob(for: key.driverJobKey)?.driverJob else { + throw StubError.error("Failed to lookup Swift driver job \(key.driverJobKey) in build plan \(key.identifier)") + } + commandLine = commandLinePrefix + job.commandLine + expectedOutputs = job.outputs + ruleInfo = ["Swift\(job.ruleInfoType)", key.variant, key.arch, job.descriptionForLifecycle] + job.displayInputs.map(\.str) + forTarget = dynamicTask.target + descriptionForLifecycle = job.descriptionForLifecycle + isUsingWholeModuleOptimization = key.isUsingWholeModuleOptimization + compilerLocation = key.compilerLocation + casOpts = key.casOptions + case .swiftDriverExplicitDependencyJob(let key): + guard let job = context.swiftModuleDependencyGraph.plannedExplicitDependencyBuildJob(for: key.driverJobKey)?.driverJob else { + throw StubError.error("Failed to lookup explicit modules Swift driver job \(key.driverJobKey)") + } + commandLine = commandLinePrefix + job.commandLine + expectedOutputs = job.outputs + assert(expectedOutputs.count > 0, "Explicit modules job was expected to have at least one primary output") + ruleInfo = ["SwiftExplicitDependency\(job.ruleInfoType)", key.arch, expectedOutputs.first?.str ?? ""] + forTarget = nil + descriptionForLifecycle = job.descriptionForLifecycle + // WMO doesn't apply to explicit module builds + isUsingWholeModuleOptimization = false + compilerLocation = key.compilerLocation + casOpts = key.casOptions + default: + fatalError("Unexpected dynamic task: \(dynamicTask)") } if !supportsParseableOutput(for: ruleInfo) { commandLine = commandLine.filter({ $0 != "-frontend-parseable-output" }) } - return Task(type: self, - payload: - SwiftDriverJobDynamicTaskPayload( - expectedOutputs: expectedOutputs, - isUsingWholeModuleOptimization: isUsingWholeModuleOptimization, - compilerLocation: compilerLocation, - casOptions: casOpts - ), - forTarget: forTarget, - ruleInfo: ruleInfo, - commandLine: commandLine.map { .literal($0) }, - environment: dynamicTask.environment, - workingDirectory: dynamicTask.workingDirectory, - showEnvironment: dynamicTask.showEnvironment, - execDescription: descriptionForLifecycle, - preparesForIndexing: true, - showCommandLineInLog: false, - isDynamic: true - ) + return Task( + type: self, + payload: + SwiftDriverJobDynamicTaskPayload( + expectedOutputs: expectedOutputs, + isUsingWholeModuleOptimization: isUsingWholeModuleOptimization, + compilerLocation: compilerLocation, + casOptions: casOpts + ), + forTarget: forTarget, + ruleInfo: ruleInfo, + commandLine: commandLine.map { .literal($0) }, + environment: dynamicTask.environment, + workingDirectory: dynamicTask.workingDirectory, + showEnvironment: dynamicTask.showEnvironment, + execDescription: descriptionForLifecycle, + preparesForIndexing: true, + showCommandLineInLog: false, + isDynamic: true + ) } var payloadType: (any TaskPayload.Type)? { @@ -217,13 +218,10 @@ final class SwiftDriverJobDynamicTaskSpec: DynamicTaskSpec { if supportsParseableOutput(for: task.ruleInfo) { return SwiftCommandOutputParser.self } else { - return serializedDiagnosticsPaths(task).isEmpty ? - GenericOutputParser.self : - SerializedDiagnosticsOutputParser.self + return serializedDiagnosticsPaths(task).isEmpty ? GenericOutputParser.self : SerializedDiagnosticsOutputParser.self } } - func serializedDiagnosticsPaths(_ task: any ExecutableTask) -> [Path] { if supportsParseableOutput(for: task.ruleInfo) { return [] @@ -252,19 +250,19 @@ final class SwiftDriverJobDynamicTaskSpec: DynamicTaskSpec { func buildTaskAction(dynamicTaskKey: DynamicTaskKey, context: DynamicTaskOperationContext) throws -> TaskAction { switch dynamicTaskKey { - case .swiftDriverJob(let key): + case .swiftDriverJob(let key): guard let job = try context.swiftModuleDependencyGraph.queryPlannedBuild(for: key.identifier).plannedTargetJob(for: key.driverJobKey) else { throw StubError.error("Failed to lookup Swift driver job \(key.driverJobKey) in build plan \(key.identifier)") } return SwiftDriverJobTaskAction(job, variant: key.variant, arch: key.arch, identifier: .targetCompile(key.identifier), isUsingWholeModuleOptimization: key.isUsingWholeModuleOptimization) - case .swiftDriverExplicitDependencyJob(let key): - // WMO doesn't apply to explicit module builds - guard let job = context.swiftModuleDependencyGraph.plannedExplicitDependencyBuildJob(for: key.driverJobKey) else { - throw StubError.error("Failed to lookup explicit module Swift driver job \(key.driverJobKey)") - } + case .swiftDriverExplicitDependencyJob(let key): + // WMO doesn't apply to explicit module builds + guard let job = context.swiftModuleDependencyGraph.plannedExplicitDependencyBuildJob(for: key.driverJobKey) else { + throw StubError.error("Failed to lookup explicit module Swift driver job \(key.driverJobKey)") + } return SwiftDriverJobTaskAction(job, variant: nil, arch: key.arch, identifier: .explicitDependency, isUsingWholeModuleOptimization: false) - default: - fatalError("Unexpected dynamic task key: \(dynamicTaskKey)") + default: + fatalError("Unexpected dynamic task key: \(dynamicTaskKey)") } } } diff --git a/Sources/SWBTaskExecution/DynamicTaskSpecs/SwiftDriverPlanningDynamicTaskSpec.swift b/Sources/SWBTaskExecution/DynamicTaskSpecs/SwiftDriverPlanningDynamicTaskSpec.swift index ef3b5711..64556dae 100644 --- a/Sources/SWBTaskExecution/DynamicTaskSpecs/SwiftDriverPlanningDynamicTaskSpec.swift +++ b/Sources/SWBTaskExecution/DynamicTaskSpecs/SwiftDriverPlanningDynamicTaskSpec.swift @@ -21,7 +21,7 @@ public struct SwiftDriverPlanningTaskKey: Serializable, CustomDebugStringConvert self.swiftPayload = swiftPayload } - public func serialize(to serializer: T) where T : Serializer { + public func serialize(to serializer: T) where T: Serializer { serializer.serializeAggregate(1) { serializer.serialize(swiftPayload) } @@ -50,19 +50,20 @@ final class SwiftDriverPlanningDynamicTaskSpec: DynamicTaskSpec { fatalError("Attempted to request a driver planning operation with no driver payload") } - return Task(type: self, - payload: key.swiftPayload, - forTarget: dynamicTask.target, - ruleInfo: driverPayload.ruleInfo, - commandLine: driverPayload.commandLine.map { .literal(ByteString(encodingAsUTF8: $0)) }, - environment: dynamicTask.environment, - workingDirectory: dynamicTask.workingDirectory, - showEnvironment: dynamicTask.showEnvironment, - execDescription: "Planning Swift module \(driverPayload.moduleName) (\(driverPayload.architecture))", - preparesForIndexing: true, - priority: .unblocksDownstreamTasks, - isDynamic: true - ) + return Task( + type: self, + payload: key.swiftPayload, + forTarget: dynamicTask.target, + ruleInfo: driverPayload.ruleInfo, + commandLine: driverPayload.commandLine.map { .literal(ByteString(encodingAsUTF8: $0)) }, + environment: dynamicTask.environment, + workingDirectory: dynamicTask.workingDirectory, + showEnvironment: dynamicTask.showEnvironment, + execDescription: "Planning Swift module \(driverPayload.moduleName) (\(driverPayload.architecture))", + preparesForIndexing: true, + priority: .unblocksDownstreamTasks, + isDynamic: true + ) } func buildTaskAction(dynamicTaskKey: DynamicTaskKey, context: DynamicTaskOperationContext) -> TaskAction { diff --git a/Sources/SWBTaskExecution/ProjectPlanner.swift b/Sources/SWBTaskExecution/ProjectPlanner.swift index 287c35b7..2b3cd98b 100644 --- a/Sources/SWBTaskExecution/ProjectPlanner.swift +++ b/Sources/SWBTaskExecution/ProjectPlanner.swift @@ -25,48 +25,53 @@ package struct ProjectPlanner { } package func describeArchivableProducts(input: [SchemeInput]) -> [ProductTupleDescription] { - return Array(Set(input.flatMap { scheme in - return [scheme.analyze, scheme.archive, scheme.profile, scheme.run, scheme.test].flatMap { (input: ActionInput) -> [ProductTupleDescription] in - let parameters = BuildParameters(action: input) - return input.targetIdentifiers.flatMap { (targetIdentifier: String) -> [ProductTupleDescription] in - guard let target = workspaceContext.workspace.target(for: targetIdentifier) else { return [] } - let settings = buildRequestContext.getCachedSettings(parameters, target: target) - return settings.globalScope.evaluate(BuiltinMacros.SUPPORTED_PLATFORMS).compactMap { supportedPlatform in - guard let platform = workspaceContext.core.platformRegistry.lookup(name: supportedPlatform) else { return nil } - let settings = buildRequestContext.getCachedSettings(BuildParameters(action: input, platform: platform), target: target) - let productName = settings.globalScope.evaluate(BuiltinMacros.PRODUCT_NAME) - let team = settings.globalScope.evaluate(BuiltinMacros.DEVELOPMENT_TEAM).nilIfEmpty - return ProductTupleDescription( - displayName: productName, - productName: productName, - productType: { - switch settings.globalScope.evaluate(BuiltinMacros.PRODUCT_TYPE) { - case "com.apple.product-type.application", "com.apple.product-type.application.watchapp2", "com.apple.product-type.application.on-demand-install-capable": - return .app - case "com.apple.product-type.tool": - return .tool - case "com.apple.product-type.framework", "com.apple.product-type.framework.static", "com.apple.product-type.library.dynamic", "com.apple.product-type.library.static": - return .library - case "com.apple.product-type.watchkit2-extension": - return .appex - case "com.apple.product-type.bundle.unit-test", - "com.apple.product-type.bundle.ui-testing", - "com.apple.product-type.bundle.multi-device-ui-testing": - return .tests - default: - return .none - } - }(), - identifier: target.guid, - team: team, - bundleIdentifier: settings.globalScope.evaluate(BuiltinMacros.PRODUCT_BUNDLE_IDENTIFIER).nilIfEmpty, - destination: DestinationInfo(platformName: settings.globalScope.evaluate(BuiltinMacros.PLATFORM_DISPLAY_NAME), isSimulator: settings.globalScope.evaluate(BuiltinMacros.PLATFORM_NAME).hasSuffix("simulator")), - containingSchemes: [scheme.name], - iconPath: nil) + return Array( + Set( + input.flatMap { scheme in + return [scheme.analyze, scheme.archive, scheme.profile, scheme.run, scheme.test].flatMap { (input: ActionInput) -> [ProductTupleDescription] in + let parameters = BuildParameters(action: input) + return input.targetIdentifiers.flatMap { (targetIdentifier: String) -> [ProductTupleDescription] in + guard let target = workspaceContext.workspace.target(for: targetIdentifier) else { return [] } + let settings = buildRequestContext.getCachedSettings(parameters, target: target) + return settings.globalScope.evaluate(BuiltinMacros.SUPPORTED_PLATFORMS).compactMap { supportedPlatform in + guard let platform = workspaceContext.core.platformRegistry.lookup(name: supportedPlatform) else { return nil } + let settings = buildRequestContext.getCachedSettings(BuildParameters(action: input, platform: platform), target: target) + let productName = settings.globalScope.evaluate(BuiltinMacros.PRODUCT_NAME) + let team = settings.globalScope.evaluate(BuiltinMacros.DEVELOPMENT_TEAM).nilIfEmpty + return ProductTupleDescription( + displayName: productName, + productName: productName, + productType: { + switch settings.globalScope.evaluate(BuiltinMacros.PRODUCT_TYPE) { + case "com.apple.product-type.application", "com.apple.product-type.application.watchapp2", "com.apple.product-type.application.on-demand-install-capable": + return .app + case "com.apple.product-type.tool": + return .tool + case "com.apple.product-type.framework", "com.apple.product-type.framework.static", "com.apple.product-type.library.dynamic", "com.apple.product-type.library.static": + return .library + case "com.apple.product-type.watchkit2-extension": + return .appex + case "com.apple.product-type.bundle.unit-test", + "com.apple.product-type.bundle.ui-testing", + "com.apple.product-type.bundle.multi-device-ui-testing": + return .tests + default: + return .none + } + }(), + identifier: target.guid, + team: team, + bundleIdentifier: settings.globalScope.evaluate(BuiltinMacros.PRODUCT_BUNDLE_IDENTIFIER).nilIfEmpty, + destination: DestinationInfo(platformName: settings.globalScope.evaluate(BuiltinMacros.PLATFORM_DISPLAY_NAME), isSimulator: settings.globalScope.evaluate(BuiltinMacros.PLATFORM_NAME).hasSuffix("simulator")), + containingSchemes: [scheme.name], + iconPath: nil + ) + } + } } } - } - })) + ) + ) } // TODO: This is a stub for testing, real implementation will be done in rdar://problem/56446029 @@ -83,11 +88,13 @@ package struct ProjectPlanner { // TODO: This is a stub for testing, real implementation will be done in rdar://problem/56446029 package func describeSchemes(input: [SchemeInput]) -> [SchemeDescription] { return input.map { scheme in - let actions = ActionsInfo(analyze: actionInfo(scheme.analyze), - archive: actionInfo(scheme.archive), - profile: actionInfo(scheme.profile), - run: actionInfo(scheme.run), - test: actionInfo(scheme.test)) + let actions = ActionsInfo( + analyze: actionInfo(scheme.analyze), + archive: actionInfo(scheme.archive), + profile: actionInfo(scheme.profile), + run: actionInfo(scheme.run), + test: actionInfo(scheme.test) + ) return SchemeDescription(name: scheme.name, disambiguatedName: scheme.name, isShared: scheme.isShared, isAutogenerated: scheme.isAutogenerated, actions: actions) } } diff --git a/Sources/SWBTaskExecution/Task.swift b/Sources/SWBTaskExecution/Task.swift index 5f332737..87df487c 100644 --- a/Sources/SWBTaskExecution/Task.swift +++ b/Sources/SWBTaskExecution/Task.swift @@ -126,9 +126,11 @@ package final class Task: ExecutableTask, Serializable, Encodable { case .direct(let directStorage): return EnvironmentBindings(directStorage.environmentBindings) case .interned(let internedStorage): - return EnvironmentBindings(internedStorage.handles.environmentBindings.map { - (internedStorage.stringArena.lookup(handle: $0), internedStorage.stringArena.lookup(handle: $1)) - }) + return EnvironmentBindings( + internedStorage.handles.environmentBindings.map { + (internedStorage.stringArena.lookup(handle: $0), internedStorage.stringArena.lookup(handle: $1)) + } + ) } } @@ -233,7 +235,7 @@ package final class Task: ExecutableTask, Serializable, Encodable { showEnvironment: builder.showEnvironment, execDescription: builder.execDescription, // FIXME: This cast is unfortunate. - action: builder.action.map{ $0 as! TaskAction }, + action: builder.action.map { $0 as! TaskAction }, preparesForIndexing: builder.preparesForIndexing, llbuildControlDisabled: builder.llbuildControlDisabled, targetDependencies: builder.targetDependencies, @@ -313,18 +315,17 @@ package final class Task: ExecutableTask, Serializable, Encodable { // Make sure each configured target object is serialized only once. if let index = delegate.configuredTargetIndexes[configuredTarget] { // We already have an index into the configured target list, so serialize it. - serializer.serialize(1) // Placeholder indicating the next element is an index + serializer.serialize(1) // Placeholder indicating the next element is an index serializer.serialize(index) } else { // This configured target has not been serialized before, so serialize it and add it to our delegate's index map. - serializer.serialize(0) // Placeholder indicating the next element is a serialized ConfiguredTarget + serializer.serialize(0) // Placeholder indicating the next element is a serialized ConfiguredTarget serializer.serialize(configuredTarget) delegate.configuredTargetIndexes[configuredTarget] = delegate.currentConfiguredTargetIndex delegate.currentConfiguredTargetIndex += 1 } serializer.endAggregate() - } - else { + } else { serializer.serializeNil() } serializer.serialize(ruleInfo) @@ -384,8 +385,7 @@ package final class Task: ExecutableTask, Serializable, Encodable { if deserializer.deserializeNil() { self.forTarget = nil - } - else { + } else { guard let delegate = deserializer.delegate as? (any ConfiguredTargetDeserializerDelegate) else { throw DeserializerError.invalidDelegate("delegate must be a ConfiguredTargetDeserializerDelegate") } // Deserialize the configured target by deserializing it if we haven't seen it before, or by looking it up via the delegate if we have. @@ -446,12 +446,14 @@ package final class Task: ExecutableTask, Serializable, Encodable { return .parentPath(stringArena.intern(path.str)) } } - return .init(ruleInfo: ruleInfo.map { stringArena.intern($0) }, - commandLine: internedCommandLine, - additionalSignatureData: stringArena.intern(additionalSignatureData), - inputPathStrings: inputPaths.map { stringArena.intern($0.str) }, - outputPathStrings: outputPaths.map { stringArena.intern($0.str) }, - environmentBindings: environment.bindings.map { (stringArena.intern($0.0), stringArena.intern($0.1)) }) + return .init( + ruleInfo: ruleInfo.map { stringArena.intern($0) }, + commandLine: internedCommandLine, + additionalSignatureData: stringArena.intern(additionalSignatureData), + inputPathStrings: inputPaths.map { stringArena.intern($0.str) }, + outputPathStrings: outputPaths.map { stringArena.intern($0.str) }, + environmentBindings: environment.bindings.map { (stringArena.intern($0.0), stringArena.intern($0.1)) } + ) } internal init(task: Task, internedStorageHandles: Task.Storage.InternedStorage.Handles, frozenByteStringArena: FrozenByteStringArena, frozenStringArena: FrozenStringArena) { @@ -486,12 +488,11 @@ extension Task: Hashable { hasher.combine(ObjectIdentifier(self)) } - package static func ==(lhs: Task, rhs: Task) -> Bool { + package static func == (lhs: Task, rhs: Task) -> Bool { return lhs === rhs } } - /// A delegate which must be used to deserialize a `Task`. package protocol TaskDeserializerDelegate: DeserializerDelegate { /// The specification registry to use to look up `CommandLineToolSpec`s for deserializing Task.type properties. @@ -508,7 +509,8 @@ extension GateTask { extension ConstructedTask { /// Add the task to the given build description builder. func addToDescription(_ builder: BuildDescriptionBuilder) throws { - let allowMissingInputs = (ruleInfo.first == "PhaseScriptExecution" ? !SWBFeatureFlag.disableShellScriptAllowsMissingInputs.value : false) + let allowMissingInputs = + (ruleInfo.first == "PhaseScriptExecution" ? !SWBFeatureFlag.disableShellScriptAllowsMissingInputs.value : false) || ruleInfo.first == "ValidateDevelopmentAssets" // Handle custom tasks. @@ -552,8 +554,7 @@ public enum RequiredTargetDependencyReason: CustomStringConvertible { /// The interface used for interactions between running tasks and the controlling execution environment. /// A `TaskExecutionDelegate` performs operations commonly needed by a task, such as file I/O. /// This protocol enables task behavior to be more easily tested. -public protocol TaskExecutionDelegate -{ +public protocol TaskExecutionDelegate { /// The proxy to use for file system access. var fs: any FSProxy { get } @@ -594,8 +595,7 @@ package protocol BuildOutputDelegate: TargetDiagnosticProducingDelegate { } /// A `TaskOutputDelegate` handles output emitted by a task. -public protocol TaskOutputDelegate: DiagnosticProducingDelegate -{ +public protocol TaskOutputDelegate: DiagnosticProducingDelegate { var startTime: Date { get } /// Emit output log data. @@ -620,8 +620,7 @@ public protocol TaskOutputDelegate: DiagnosticProducingDelegate var result: TaskResult? { get } } -package extension TaskOutputDelegate -{ +package extension TaskOutputDelegate { /// Emit an error message. func emitError(_ message: String) { error(message) @@ -649,8 +648,7 @@ package extension TaskOutputDelegate } /// Convenience function for writing inline text output. -extension TaskOutputDelegate -{ +extension TaskOutputDelegate { func emitOutput(_ body: (OutputByteStream) -> Void) { let stream = OutputByteStream() body(stream) diff --git a/Sources/SWBTaskExecution/TaskActionExtensionPoint.swift b/Sources/SWBTaskExecution/TaskActionExtensionPoint.swift index 04e3ca33..697f1f1d 100644 --- a/Sources/SWBTaskExecution/TaskActionExtensionPoint.swift +++ b/Sources/SWBTaskExecution/TaskActionExtensionPoint.swift @@ -22,12 +22,15 @@ public struct TaskActionExtensionPoint: ExtensionPoint { // MARK: - actual extension point package static func taskActionImplementations(pluginManager: any PluginManager) throws -> [SerializableTypeCode: any PolymorphicSerializable.Type] { - return try pluginManager.extensions(of: Self.self).reduce([:], { implementations, ext in - for (code, _) in ext.taskActionImplementations where implementations[code] != nil { - throw StubError.error("Multiple implementations for task action implementation type code: \(code)") + return try pluginManager.extensions(of: Self.self).reduce( + [:], + { implementations, ext in + for (code, _) in ext.taskActionImplementations where implementations[code] != nil { + throw StubError.error("Multiple implementations for task action implementation type code: \(code)") + } + return implementations.addingContents(of: ext.taskActionImplementations) } - return implementations.addingContents(of: ext.taskActionImplementations) - }) + ) } } diff --git a/Sources/SWBTaskExecution/TaskActions/ClangCachingKeyQueryTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/ClangCachingKeyQueryTaskAction.swift index 40d5ecd5..57cbef9d 100644 --- a/Sources/SWBTaskExecution/TaskActions/ClangCachingKeyQueryTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/ClangCachingKeyQueryTaskAction.swift @@ -41,11 +41,13 @@ public final class ClangCachingKeyQueryTaskAction: TaskAction { outputDelegate: any TaskOutputDelegate ) async -> CommandResult { let clangModuleDependencyGraph = dynamicExecutionDelegate.operationContext.clangModuleDependencyGraph - do { - guard let casDBs = try clangModuleDependencyGraph.getCASDatabases( - libclangPath: key.libclangPath, - casOptions: key.casOptions - ) else { + do { + guard + let casDBs = try clangModuleDependencyGraph.getCASDatabases( + libclangPath: key.libclangPath, + casOptions: key.casOptions + ) + else { throw StubError.error("unable to use CAS databases") } diff --git a/Sources/SWBTaskExecution/TaskActions/ClangCachingMaterializeKeyTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/ClangCachingMaterializeKeyTaskAction.swift index 8192aa71..148874ae 100644 --- a/Sources/SWBTaskExecution/TaskActions/ClangCachingMaterializeKeyTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/ClangCachingMaterializeKeyTaskAction.swift @@ -66,11 +66,13 @@ public final class ClangCachingMaterializeKeyTaskAction: TaskAction { state.reset() let clangModuleDependencyGraph = dynamicExecutionDelegate.operationContext.clangModuleDependencyGraph - do { - guard let casDBs = try clangModuleDependencyGraph.getCASDatabases( - libclangPath: taskKey.libclangPath, - casOptions: taskKey.casOptions - ) else { + do { + guard + let casDBs = try clangModuleDependencyGraph.getCASDatabases( + libclangPath: taskKey.libclangPath, + casOptions: taskKey.casOptions + ) + else { throw StubError.error("unable to use CAS databases") } @@ -117,7 +119,7 @@ public final class ClangCachingMaterializeKeyTaskAction: TaskAction { do { guard let cachedComp = try casDBs.getLocalCachedCompilation(cacheKey: taskKey.cacheKey) else { state = .done - return // compilation key not found. + return // compilation key not found. } let numOutputsToDownload = requestCompilationOutputs(cachedComp, dynamicExecutionDelegate: dynamicExecutionDelegate, jobTaskIDBase: jobTaskIDBase) if numOutputsToDownload == 0 { diff --git a/Sources/SWBTaskExecution/TaskActions/ClangCachingOutputMaterializerTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/ClangCachingOutputMaterializerTaskAction.swift index b7388e0d..a84acade 100644 --- a/Sources/SWBTaskExecution/TaskActions/ClangCachingOutputMaterializerTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/ClangCachingOutputMaterializerTaskAction.swift @@ -41,11 +41,13 @@ public final class ClangCachingOutputMaterializerTaskAction: TaskAction { outputDelegate: any TaskOutputDelegate ) async -> CommandResult { let clangModuleDependencyGraph = dynamicExecutionDelegate.operationContext.clangModuleDependencyGraph - do { - guard let casDBs = try clangModuleDependencyGraph.getCASDatabases( - libclangPath: key.libclangPath, - casOptions: key.casOptions - ) else { + do { + guard + let casDBs = try clangModuleDependencyGraph.getCASDatabases( + libclangPath: key.libclangPath, + casOptions: key.casOptions + ) + else { throw StubError.error("unable to use CAS databases") } diff --git a/Sources/SWBTaskExecution/TaskActions/ClangCompileTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/ClangCompileTaskAction.swift index b81d59f3..03ef49a8 100644 --- a/Sources/SWBTaskExecution/TaskActions/ClangCompileTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/ClangCompileTaskAction.swift @@ -171,12 +171,12 @@ public final class ClangCompileTaskAction: TaskAction, BuildValueValidatingTaskA } } - override public func performTaskAction( - _ task: any ExecutableTask, - dynamicExecutionDelegate: any DynamicTaskExecutionDelegate, + override public func performTaskAction( + _ task: any ExecutableTask, + dynamicExecutionDelegate: any DynamicTaskExecutionDelegate, executionDelegate: any TaskExecutionDelegate, - clientDelegate: any TaskExecutionClientDelegate, - outputDelegate: any TaskOutputDelegate + clientDelegate: any TaskExecutionClientDelegate, + outputDelegate: any TaskOutputDelegate ) async -> CommandResult { defer { if let error = state.executionError { @@ -211,7 +211,7 @@ public final class ClangCompileTaskAction: TaskAction, BuildValueValidatingTaskA return .failed } - let commandLines = dependencyInfo.commands.map{$0.arguments} + let commandLines = dependencyInfo.commands.map { $0.arguments } // By default, don't print the frontend command lines, to avoid introducing too much noise in the log. if executionDelegate.userPreferences.enableDebugActivityLogs || executionDelegate.emitFrontendCommandLines { @@ -320,16 +320,15 @@ public final class ClangCompileTaskAction: TaskAction, BuildValueValidatingTaskA outputDelegate.emitOutput(ByteString(encodingAsUTF8: commandString) + "\n") } - let shouldGenerateReproducer = (lastResult == .failed) && - (explicitModulesPayload.shouldGenerateReproducerForErrors || - (outputDelegate.result?.isCrashed ?? false)) + let shouldGenerateReproducer = (lastResult == .failed) && (explicitModulesPayload.shouldGenerateReproducerForErrors || (outputDelegate.result?.isCrashed ?? false)) if shouldGenerateReproducer { do { if let reproducerMessage = try clangModuleDependencyGraph.generateReproducer( - forFailedDependency: dependencyInfo, - libclangPath: explicitModulesPayload.libclangPath, - casOptions: explicitModulesPayload.casOptions, - location: explicitModulesPayload.reproducerOutputPath?.str) { + forFailedDependency: dependencyInfo, + libclangPath: explicitModulesPayload.libclangPath, + casOptions: explicitModulesPayload.casOptions, + location: explicitModulesPayload.reproducerOutputPath?.str + ) { outputDelegate.emitOutput(ByteString(encodingAsUTF8: reproducerMessage) + "\n") } } catch { @@ -369,8 +368,9 @@ public final class ClangCompileTaskAction: TaskAction, BuildValueValidatingTaskA taskID: inout UInt ) throws { guard let casOptions, - casOptions.enableIntegratedCacheQueries, - casOptions.hasRemoteCache else { + casOptions.enableIntegratedCacheQueries, + casOptions.hasRemoteCache + else { return } @@ -489,7 +489,8 @@ public final class ClangCompileTaskAction: TaskAction, BuildValueValidatingTaskA ) throws { let payload: DependencyValidationInfo.Payload if let traceFilePath, - let traceData = try parseTraceData(Data(fileSystem.read(traceFilePath))) { + let traceData = try parseTraceData(Data(fileSystem.read(traceFilePath))) + { outputDelegate.incrementTaskCounter(.headerDependenciesValidatedTasks) if isModular { diff --git a/Sources/SWBTaskExecution/TaskActions/ClangModuleVerifierInputGeneratorTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/ClangModuleVerifierInputGeneratorTaskAction.swift index 12b8e497..4c09cc39 100644 --- a/Sources/SWBTaskExecution/TaskActions/ClangModuleVerifierInputGeneratorTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/ClangModuleVerifierInputGeneratorTaskAction.swift @@ -28,7 +28,7 @@ public final class ClangModuleVerifierInputGeneratorTaskAction: TaskAction { init?(commandLine: some Sequence, outputDelegate: any TaskOutputDelegate) { var iterator = commandLine.makeIterator() - _ = iterator.next() // Skip argv[0] + _ = iterator.next() // Skip argv[0] guard let inputFramework = iterator.next().map(Path.init) else { outputDelegate.emitError("no input framework specified") return nil @@ -138,9 +138,14 @@ public final class ClangModuleVerifierInputGeneratorTaskAction: TaskAction { } do { - try fs.write(options.mainOutput, contents: ByteString(encodingAsUTF8: """ - \(options.language.includeStatement) - """)) + try fs.write( + options.mainOutput, + contents: ByteString( + encodingAsUTF8: """ + \(options.language.includeStatement) + """ + ) + ) } catch { outputDelegate.emitError("failed to write \(options.mainOutput): \(error)") return .failed @@ -154,10 +159,12 @@ public final class ClangModuleVerifierInputGeneratorTaskAction: TaskAction { if !output.isEmpty { output += "\n"; } - output += ByteString(encodingAsUTF8: """ - // Private - \(framework.allModularPrivateHeaderIncludes(language: options.language)) - """) + output += ByteString( + encodingAsUTF8: """ + // Private + \(framework.allModularPrivateHeaderIncludes(language: options.language)) + """ + ) } try fs.write(options.headerOutput, contents: output) @@ -166,14 +173,17 @@ public final class ClangModuleVerifierInputGeneratorTaskAction: TaskAction { return .failed } do { - try fs.write(options.moduleMapOutput, contents: """ - framework module Test { - umbrella header "Test.h" - - export * - module * { export * } - } - """) + try fs.write( + options.moduleMapOutput, + contents: """ + framework module Test { + umbrella header "Test.h" + + export * + module * { export * } + } + """ + ) } catch { outputDelegate.emitError("failed to write \(options.moduleMapOutput): \(error)") return .failed diff --git a/Sources/SWBTaskExecution/TaskActions/ClangScanTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/ClangScanTaskAction.swift index 8ba55c67..5693a265 100644 --- a/Sources/SWBTaskExecution/TaskActions/ClangScanTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/ClangScanTaskAction.swift @@ -40,8 +40,7 @@ public final class ClangScanTaskAction: TaskAction, BuildValueValidatingTaskActi // Skip the executable. let programName = generator.next() ?? "<>" - argumentIteration: - while let arg = generator.next() { + argumentIteration: while let arg = generator.next() { switch arg { case "-o": if let outputPath = generator.next() { diff --git a/Sources/SWBTaskExecution/TaskActions/CodeSignTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/CodeSignTaskAction.swift index c059f6a8..4f9de770 100644 --- a/Sources/SWBTaskExecution/TaskActions/CodeSignTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/CodeSignTaskAction.swift @@ -14,14 +14,12 @@ public import SWBCore public import SWBUtil import Foundation - public final class CodeSignTaskAction: TaskAction { public override class var toolIdentifier: String { "code-sign-task" } - public override init() - { + public override init() { super.init() } @@ -51,14 +49,11 @@ public final class CodeSignTaskAction: TaskAction { // Serialization - - public override func serialize(to serializer: T) - { + public override func serialize(to serializer: T) { super.serialize(to: serializer) } - public required init(from deserializer: any Deserializer) throws - { + public required init(from deserializer: any Deserializer) throws { try super.init(from: deserializer) } } diff --git a/Sources/SWBTaskExecution/TaskActions/ConstructStubExecutorInputFileListTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/ConstructStubExecutorInputFileListTaskAction.swift index ebb79347..74f80e08 100644 --- a/Sources/SWBTaskExecution/TaskActions/ConstructStubExecutorInputFileListTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/ConstructStubExecutorInputFileListTaskAction.swift @@ -81,8 +81,7 @@ public final class ConstructStubExecutorInputFileListTaskAction: TaskAction { outputDelegate.emitWarning( "Only \(slicesWithEntryPointSection.count) out of \(slices.count) slices in the debug dylib MachO contained Swift entry point sections. Using stub executor library with Swift entry point." ) - } - else { + } else { outputDelegate.emitNote("Using stub executor library with Swift entry point.") } chosenLibraryPath = swiftEntryLibraryVariantPath @@ -94,8 +93,7 @@ public final class ConstructStubExecutorInputFileListTaskAction: TaskAction { try fs.write(fileListPath, contents: ByteString(encodingAsUTF8: "\(chosenLibraryPath.str)\n")) return .succeeded - } - catch { + } catch { outputDelegate.emitError("Unable to process debug dylib: \(error.localizedDescription)") return .failed } diff --git a/Sources/SWBTaskExecution/TaskActions/CopyPlistTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/CopyPlistTaskAction.swift index 3e8f06ea..a7dc001b 100644 --- a/Sources/SWBTaskExecution/TaskActions/CopyPlistTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/CopyPlistTaskAction.swift @@ -105,8 +105,7 @@ public final class CopyPlistTaskAction: TaskAction { let generator = commandLine.makeIterator() // Skip the executable. let programName = generator.next() ?? "<>" - argumentParsing: - while let arg = generator.next() { + argumentParsing: while let arg = generator.next() { switch arg { case "--": break argumentParsing @@ -168,7 +167,7 @@ public final class CopyPlistTaskAction: TaskAction { } // All remaining arguments are input paths. - let inputs = generator.map { Path($0 )} + let inputs = generator.map { Path($0) } // Diagnose missing inputs. if inputs.isEmpty { @@ -234,8 +233,7 @@ public final class CopyPlistTaskAction: TaskAction { let contents: ByteString do { contents = try executionDelegate.fs.read(input) - } - catch { + } catch { outputDelegate.emitError("unable to read input file '\(input.str)': \(error.localizedDescription)") return .failed } @@ -247,8 +245,7 @@ public final class CopyPlistTaskAction: TaskAction { // FIXME: The native build system would remove the old file if it existed, perform the copy, then update the mod time on the new file. do { try executionDelegate.fs.write(output, contents: contents) - } - catch { + } catch { outputDelegate.emitError("unable to write file '\(output.str)': \(error.localizedDescription)") return .failed } @@ -258,11 +255,15 @@ public final class CopyPlistTaskAction: TaskAction { do { // Rev-lock hack var contents = try ByteString(PropertyList.fromBytes(contents.bytes).asBytes(.xml)) - contents = ByteString(Array(contents.unsafeStringValue - .replacingOccurrences(of: ">WRAPPEDPRODUCTNAME<", with: ">$(WRAPPEDPRODUCTNAME)<") - .replacingOccurrences(of: ">WRAPPEDPRODUCTBUNDLEIDENTIFIER<", with: ">$(WRAPPEDPRODUCTBUNDLEIDENTIFIER)<") - .replacingOccurrences(of: ">TESTPRODUCTNAME<", with: ">$(TESTPRODUCTNAME)<") - .replacingOccurrences(of: ">TESTPRODUCTBUNDLEIDENTIFIER<", with: ">$(TESTPRODUCTBUNDLEIDENTIFIER)<").utf8)) + contents = ByteString( + Array( + contents.unsafeStringValue + .replacingOccurrences(of: ">WRAPPEDPRODUCTNAME<", with: ">$(WRAPPEDPRODUCTNAME)<") + .replacingOccurrences(of: ">WRAPPEDPRODUCTBUNDLEIDENTIFIER<", with: ">$(WRAPPEDPRODUCTBUNDLEIDENTIFIER)<") + .replacingOccurrences(of: ">TESTPRODUCTNAME<", with: ">$(TESTPRODUCTNAME)<") + .replacingOccurrences(of: ">TESTPRODUCTBUNDLEIDENTIFIER<", with: ">$(TESTPRODUCTBUNDLEIDENTIFIER)<").utf8 + ) + ) let p = try PropertyList.fromBytes(contents.bytes) if !options.macroExpansions.isEmpty { @@ -307,8 +308,7 @@ public final class CopyPlistTaskAction: TaskAction { // FIXME: The native build system would remove the old file if it existed, then perform the copy. do { try executionDelegate.fs.write(output, contents: ByteString(outputData)) - } - catch { + } catch { outputDelegate.emitError("unable to write file '\(output.str)': \(error.localizedDescription)") return .failed } @@ -319,17 +319,13 @@ public final class CopyPlistTaskAction: TaskAction { return .succeeded } - // Serialization - - public override func serialize(to serializer: T) - { + public override func serialize(to serializer: T) { super.serialize(to: serializer) } - public required init(from deserializer: any Deserializer) throws - { + public required init(from deserializer: any Deserializer) throws { try super.init(from: deserializer) } } diff --git a/Sources/SWBTaskExecution/TaskActions/CopyTiffTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/CopyTiffTaskAction.swift index df1675af..ff569109 100644 --- a/Sources/SWBTaskExecution/TaskActions/CopyTiffTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/CopyTiffTaskAction.swift @@ -55,8 +55,7 @@ public final class CopyTiffTaskAction: TaskAction { let generator = commandLine.makeIterator() // Skip the executable. let programName = generator.next() ?? "<>" - argumentParsing: - while let arg = generator.next() { + argumentParsing: while let arg = generator.next() { switch arg { case "--": break argumentParsing @@ -86,7 +85,7 @@ public final class CopyTiffTaskAction: TaskAction { } // All remaining arguments are input paths. - let inputs = generator.map { Path($0 )} + let inputs = generator.map { Path($0) } // Diagnose missing inputs. if inputs.isEmpty { @@ -167,8 +166,7 @@ public final class CopyTiffTaskAction: TaskAction { let contents: ByteString do { contents = try executionDelegate.fs.read(input) - } - catch { + } catch { outputDelegate.emitError("unable to read input file '\(input.str)': \(error.localizedDescription)") return .failed } @@ -176,8 +174,7 @@ public final class CopyTiffTaskAction: TaskAction { // FIXME: The native build system would remove the old file if it existed, perform the copy, then update the mod time on the new file. do { try executionDelegate.fs.write(output, contents: contents) - } - catch { + } catch { outputDelegate.emitError("unable to write file '\(output.str)': \(error.localizedDescription)") return .failed } @@ -187,17 +184,13 @@ public final class CopyTiffTaskAction: TaskAction { return .succeeded } - // Serialization - - public override func serialize(to serializer: T) - { + public override func serialize(to serializer: T) { super.serialize(to: serializer) } - public required init(from deserializer: any Deserializer) throws - { + public required init(from deserializer: any Deserializer) throws { try super.init(from: deserializer) } } diff --git a/Sources/SWBTaskExecution/TaskActions/CreateBuildDirectoryTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/CreateBuildDirectoryTaskAction.swift index 664383ab..88d7b9a6 100644 --- a/Sources/SWBTaskExecution/TaskActions/CreateBuildDirectoryTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/CreateBuildDirectoryTaskAction.swift @@ -39,7 +39,7 @@ public final class CreateBuildDirectoryTaskAction: TaskAction { try fs.setCreatedByBuildSystemAttribute(directoryPath) } catch { #if canImport(Darwin) - outputDelegate?.emitWarning("Failed to set build system attribute on \(directoryPath.str): \(error.localizedDescription)") + outputDelegate?.emitWarning("Failed to set build system attribute on \(directoryPath.str): \(error.localizedDescription)") #endif } @@ -54,7 +54,7 @@ public final class CreateBuildDirectoryTaskAction: TaskAction { outputDelegate: any TaskOutputDelegate ) async -> CommandResult { let generator = task.commandLineAsStrings.makeIterator() - _ = generator.next() // consume program name + _ = generator.next() // consume program name guard let directory = generator.next() else { outputDelegate.emitError("wrong number of arguments") diff --git a/Sources/SWBTaskExecution/TaskActions/EmbedSwiftStdLibTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/EmbedSwiftStdLibTaskAction.swift index 38501d22..84b0ad11 100644 --- a/Sources/SWBTaskExecution/TaskActions/EmbedSwiftStdLibTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/EmbedSwiftStdLibTaskAction.swift @@ -23,17 +23,18 @@ fileprivate func executableFileNameMatchesSwiftRuntimeLibPattern(_ fileName: Str fileprivate extension MachO { func allSwiftLibNames() throws -> Set { - return try Set(self.slices() - .flatMap { try $0.linkedLibraryPaths() } - .compactMap { (lib: String) -> String? in - let pcs = Path(lib) - if (pcs.dirname.str == "@rpath" || pcs.dirname == Path("/usr/lib/swift")) && executableFileNameMatchesSwiftRuntimeLibPattern(pcs.basename) { - return pcs.basename - } - else { - return nil + return try Set( + self.slices() + .flatMap { try $0.linkedLibraryPaths() } + .compactMap { (lib: String) -> String? in + let pcs = Path(lib) + if (pcs.dirname.str == "@rpath" || pcs.dirname == Path("/usr/lib/swift")) && executableFileNameMatchesSwiftRuntimeLibPattern(pcs.basename) { + return pcs.basename + } else { + return nil + } } - }) + ) } } @@ -50,22 +51,20 @@ fileprivate struct Executable: Hashable { init(path: Path, object: MachO) throws { self.path = path - let swiftABIVersions = try object.slices().compactMap{ try $0.swiftABIVersion() } + let swiftABIVersions = try object.slices().compactMap { try $0.swiftABIVersion() } if swiftABIVersions.isEmpty { self.swiftABIVersion = nil - } - else { + } else { let uniqueVersions = Set(swiftABIVersions) if uniqueVersions.count > 1 { throw StubError.error("Expected a single Swift ABI version in \(path.str) but found \(swiftABIVersions)") - } - else { + } else { self.swiftABIVersion = swiftABIVersions.first! } } self.linkedSwiftLibNames = try object.allSwiftLibNames() - self.uuids = try object.slices().compactMap{ try $0.uuid() } + self.uuids = try object.slices().compactMap { try $0.uuid() } } public func hash(into hasher: inout Hasher) { @@ -88,7 +87,7 @@ public final class EmbedSwiftStdLibTaskAction: TaskAction { /// Combined parent + task environment let effectiveEnvironment: [String: String] - var fs : any FSProxy { return executionDelegate.fs } + var fs: any FSProxy { return executionDelegate.fs } var verbose: Int = 0 @@ -165,7 +164,7 @@ public final class EmbedSwiftStdLibTaskAction: TaskAction { var backDeploySwiftSpan = false // The allowed list of libraries that should *not* be filtered when `filterForSwiftOS=true`. - let allowedLibsForSwiftOS = ["libswiftXCTest" ] + let allowedLibsForSwiftOS = ["libswiftXCTest"] // The allowed list of libraries that should *not* be filtered when `backDeploySwiftConcurrency=true`. let allowedLibsForSwiftConcurrency = ["libswift_Concurrency"] @@ -177,7 +176,7 @@ public final class EmbedSwiftStdLibTaskAction: TaskAction { return path.isAbsolute ? path : task.workingDirectory.join(path) } - func logV(_ msg : @autoclosure () -> String) { + func logV(_ msg: @autoclosure () -> String) { if verbose > 0 { outputController.emitOutputLock.withLock { outputDelegate.emitOutput { $0 <<< msg() <<< "\n" } @@ -185,7 +184,7 @@ public final class EmbedSwiftStdLibTaskAction: TaskAction { } } - func logVV(_ msg : @autoclosure () -> String) { + func logVV(_ msg: @autoclosure () -> String) { if verbose > 1 { outputController.emitOutputLock.withLock { outputDelegate.emitOutput { $0 <<< msg() <<< "\n" } @@ -253,7 +252,7 @@ public final class EmbedSwiftStdLibTaskAction: TaskAction { return p } - func setSingleOccurrence(_ result: inout T?, _ getValue : @autoclosure () throws -> T) throws { + func setSingleOccurrence(_ result: inout T?, _ getValue: @autoclosure () throws -> T) throws { guard result == nil else { throw StubError.error("Failed to parse arguments: expected a single \(arg) argument") } result = try getValue() } @@ -275,8 +274,7 @@ public final class EmbedSwiftStdLibTaskAction: TaskAction { let path = absolutePath(Path(try argParam())) if let exe = try executableIfValid(path: path) { scanExecutables.insert(exe) - } - else { + } else { logV("Failed to scan executable: \(path.str)") } @@ -395,14 +393,14 @@ public final class EmbedSwiftStdLibTaskAction: TaskAction { // Fix up srcDir and platform values. if srcDir == nil && platform == nil { throw StubError.error("at least one of --source-libraries and --platform must be set") - } - else if let srcDir = srcDir, platform == nil { + } else if let srcDir = srcDir, platform == nil { // src_dir is set but platform is not. // Pick platform from src_dir's name. platform = srcDir.basename } - srcDirs = srcDir != nil + srcDirs = + srcDir != nil ? OrderedSet([srcDir!]) : OrderedSet(effectiveSourceDirectories(toolchainsDirs, platform: platform!)) logVV("Effective srcDirs:\n\(srcDirs.elements.map{$0.str}.joined(separator: "\n"))") @@ -475,8 +473,8 @@ public final class EmbedSwiftStdLibTaskAction: TaskAction { func incrementCounter(_ counter: BuildOperationMetrics.Counter, by amount: Int) {} func incrementTaskCounter(_ counter: BuildOperationMetrics.TaskCounter, by amount: Int) {} - var counters: [BuildOperationMetrics.Counter : Int] = [:] - var taskCounters: [BuildOperationMetrics.TaskCounter : Int] = [:] + var counters: [BuildOperationMetrics.Counter: Int] = [:] + var taskCounters: [BuildOperationMetrics.TaskCounter: Int] = [:] let underlyingDelegate: any TaskOutputDelegate var output = ByteString() @@ -527,12 +525,18 @@ public final class EmbedSwiftStdLibTaskAction: TaskAction { } guard !failed else { - throw RunProcessNonZeroExitError(args: args, workingDirectory: task.workingDirectory, environment: .init(effectiveEnvironment), status: { - if case let .exit(exitStatus, _) = processDelegate.outputDelegate.result { - return exitStatus - } - return .uncaughtSignal(0) - }(), mergedOutput: output) + throw RunProcessNonZeroExitError( + args: args, + workingDirectory: task.workingDirectory, + environment: .init(effectiveEnvironment), + status: { + if case let .exit(exitStatus, _) = processDelegate.outputDelegate.result { + return exitStatus + } + return .uncaughtSignal(0) + }(), + mergedOutput: output + ) } return output @@ -545,8 +549,7 @@ public final class EmbedSwiftStdLibTaskAction: TaskAction { func copyFile(src: Path, dst: Path, stripBitcode: Bool) async throws { if stripBitcode { try await copyAndStripBitcode(src: src, dst: dst) - } - else { + } else { try fs.copy(src, to: dst) } } @@ -590,7 +593,6 @@ public final class EmbedSwiftStdLibTaskAction: TaskAction { } } - func queryCodeSignature(codesign: Path, _ file: Path) async throws -> ByteString { logV("Probing signature of \(file.str)") return try await runProcess([codesign.str, "-r-", "--display", file.str]) @@ -602,8 +604,7 @@ public final class EmbedSwiftStdLibTaskAction: TaskAction { if fs.exists(tmpFilePath) { do { try fs.remove(tmpFilePath) - } - catch { + } catch { logV("Failed to remove: '\(tmpFilePath.str)': \(error)") } } @@ -672,8 +673,7 @@ public final class EmbedSwiftStdLibTaskAction: TaskAction { // Some of the Swift runtime libs don't use Swift, so we allow either an empty swiftVersion or a swiftVersion which satisfies the requested swiftVersion. if !exe.usesSwift || exe.swiftABIVersion == swiftVersion { return exe - } - else { + } else { foundNameMatches.append(exe) } } @@ -688,14 +688,12 @@ public final class EmbedSwiftStdLibTaskAction: TaskAction { if !isOptional { let versionStr = ignoreABIVersion ? "" : " for Swift ABI version \(swiftVersion)" throw StubError.error("Could not find \(name)\(versionStr)") - } - else { + } else { return nil } } - func collectTransitiveDependencies(srcDirs: OrderedSet, executables: Set, swiftVersion: SwiftABIVersion, requireAllDependencies: Bool, discoveredDependencyInfo: inout DependencyInfo) throws -> Set - { + func collectTransitiveDependencies(srcDirs: OrderedSet, executables: Set, swiftVersion: SwiftABIVersion, requireAllDependencies: Bool, discoveredDependencyInfo: inout DependencyInfo) throws -> Set { var worklist = Array(executables) var result = Set() var consideredLibNames = Set() @@ -729,31 +727,30 @@ public final class EmbedSwiftStdLibTaskAction: TaskAction { // Pick a Swift version that all executables have to agree on. let swiftVersionOpt = try scanExecutables.reduce(nil) { (memo: (SwiftABIVersion, Executable)?, newExe: Executable) throws -> (SwiftABIVersion, Executable)? in - switch (memo?.0, newExe.swiftABIVersion) { - case (_, nil): + switch (memo?.0, newExe.swiftABIVersion) { + case (_, nil): return memo - case (nil, .some(let newVersion)): + case (nil, .some(let newVersion)): return (newVersion, newExe) - case (.some(let prevVersion), .some(let newVersion)): + case (.some(let prevVersion), .some(let newVersion)): guard prevVersion != newVersion else { return memo } let mismatch = scanExecutables.first { object -> Bool in return object.swiftABIVersion != nil && object.swiftABIVersion! != newVersion - }! + }! let paths = [mismatch.path.str, newExe.path.str].sorted() let message = "The following binaries use incompatible versions of Swift:\n\(paths.joined(separator: "\n"))" if ignoreABIVersion { self.outputDelegate.emitWarning(message) return memo - } - else { + } else { throw StubError.error(message) } } - }?.0 + }?.0 // Discovered dependency paths, collected during processing, emitted at the end. var discoveredDependencyInfo = DependencyInfo(version: "swift-stdlib-tool") @@ -771,14 +768,17 @@ public final class EmbedSwiftStdLibTaskAction: TaskAction { } // Collect Swift library names from the input files and follow dependencies recursively. - let dependencies = try collectTransitiveDependencies(srcDirs: srcDirs, - executables: scanExecutables, - swiftVersion: swiftVersion, - requireAllDependencies: false, // If the library does not exist in srcDirs then assume the user wrote their own library named libswift* and is handling it elsewhere. - discoveredDependencyInfo: &discoveredDependencyInfo) + let dependencies = try collectTransitiveDependencies( + srcDirs: srcDirs, + executables: scanExecutables, + swiftVersion: swiftVersion, + requireAllDependencies: false, // If the library does not exist in srcDirs then assume the user wrote their own library named libswift* and is handling it elsewhere. + discoveredDependencyInfo: &discoveredDependencyInfo + ) // The list of dependencies needs to be pruned based on the filtering mechanism. Under normal circumstances, no libraries are expected to be allowed. - let swiftLibs = dependencies + let swiftLibs = + dependencies .filter { let item = $0.path.basenameWithoutSuffix @@ -798,15 +798,17 @@ public final class EmbedSwiftStdLibTaskAction: TaskAction { // Collect all the Swift libraries that the user requested with --resource-library. - let resourceLibrariesExecutables = try Set(resourceLibraries.map{ obj throws -> Executable in - return try findSwiftLib(srcDirs: srcDirs, name: obj, swiftVersion: swiftVersion, isOptional: false)! - }) + let resourceLibrariesExecutables = try Set( + resourceLibraries.map { obj throws -> Executable in + return try findSwiftLib(srcDirs: srcDirs, name: obj, swiftVersion: swiftVersion, isOptional: false)! + } + ) let swiftLibsForResources: Set = try collectTransitiveDependencies( srcDirs: srcDirs, executables: resourceLibrariesExecutables, swiftVersion: swiftVersion, - requireAllDependencies: true, // These are system libraries and they should be complete. + requireAllDependencies: true, // These are system libraries and they should be complete. discoveredDependencyInfo: &discoveredDependencyInfo ).union(resourceLibrariesExecutables) @@ -888,8 +890,7 @@ public final class EmbedSwiftStdLibTaskAction: TaskAction { } return .succeeded - } - catch { + } catch { outputDelegate.emitError("\(error)") return .failed } diff --git a/Sources/SWBTaskExecution/TaskActions/FileCopyTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/FileCopyTaskAction.swift index f3bbb3f6..197c6bdf 100644 --- a/Sources/SWBTaskExecution/TaskActions/FileCopyTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/FileCopyTaskAction.swift @@ -16,10 +16,8 @@ import SWBLibc public import SWBCore /// Concrete implementation of the in-process file-copying task. -public final class FileCopyTaskAction: TaskAction -{ - public override class var toolIdentifier: String - { +public final class FileCopyTaskAction: TaskAction { + public override class var toolIdentifier: String { return "file-copy" } @@ -122,9 +120,16 @@ public final class FileCopyTaskAction: TaskAction try await withTemporaryDirectory { tempDir in let commandLine = try context.stubCommandLine(frameworkPath: frameworkPath, isDeepBundle: isDeepBundle, platformRegistry: executionDelegate.platformRegistry, sdkRegistry: executionDelegate.sdkRegistry, tempDir: tempDir) - outputDelegate.emit(Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("Injecting stub binary into codeless framework"), childDiagnostics: (commandLine.compileAndLink.flatMap { [$0.compile, $0.link] } + [commandLine.lipo]).map { commandLine in - Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData(UNIXShellCommandCodec(encodingStrategy: .singleQuotes, encodingBehavior: .fullCommandLine).encode(commandLine))) - })) + outputDelegate.emit( + Diagnostic( + behavior: .note, + location: .unknown, + data: DiagnosticData("Injecting stub binary into codeless framework"), + childDiagnostics: (commandLine.compileAndLink.flatMap { [$0.compile, $0.link] } + [commandLine.lipo]).map { commandLine in + Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData(UNIXShellCommandCodec(encodingStrategy: .singleQuotes, encodingBehavior: .fullCommandLine).encode(commandLine))) + } + ) + ) var exists: Bool = false if isDeepBundle && !executionDelegate.fs.isSymlink(frameworkPath.join(frameworkPath.basenameWithoutSuffix), &exists) { diff --git a/Sources/SWBTaskExecution/TaskActions/GenericCachingTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/GenericCachingTaskAction.swift index e46de075..73c4e82a 100644 --- a/Sources/SWBTaskExecution/TaskActions/GenericCachingTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/GenericCachingTaskAction.swift @@ -52,7 +52,7 @@ public final class GenericCachingTaskAction: TaskAction { try super.init(from: deserializer) } - public override func serialize(to serializer: T) where T : Serializer { + public override func serialize(to serializer: T) where T: Serializer { serializer.beginAggregate(7) serializer.serialize(enableCacheDebuggingRemarks) serializer.serialize(enableTaskSandboxEnforcement) @@ -77,10 +77,12 @@ public final class GenericCachingTaskAction: TaskAction { defer { if cas.supportsPruning { - dynamicExecutionDelegate.operationContext.compilationCachingDataPruner.pruneCAS(cas, - key: .init(path: casOptions.casPath, casOptions: casOptions), - activityReporter: dynamicExecutionDelegate, - fileSystem: executionDelegate.fs) + dynamicExecutionDelegate.operationContext.compilationCachingDataPruner.pruneCAS( + cas, + key: .init(path: casOptions.casPath, casOptions: casOptions), + activityReporter: dynamicExecutionDelegate, + fileSystem: executionDelegate.fs + ) } } @@ -172,12 +174,14 @@ public final class GenericCachingTaskAction: TaskAction { emitCacheDebuggingRemark("remapped command line: \(remappedCommandLine.joined(separator: " "))") let remappedTempPath = sandboxDirectory.join("temp") try executionDelegate.fs.createDirectory(remappedTempPath) - let remappedEnvironment = EnvironmentBindings(cacheKey.environmentBindings.bindings + [ - ("TMPDIR", remappedTempPath.str), - ("TEMP", remappedTempPath.str), - ("TEMPDIR", remappedTempPath.str), - ("TMP", remappedTempPath.str) - ]) + let remappedEnvironment = EnvironmentBindings( + cacheKey.environmentBindings.bindings + [ + ("TMPDIR", remappedTempPath.str), + ("TEMP", remappedTempPath.str), + ("TEMPDIR", remappedTempPath.str), + ("TMP", remappedTempPath.str), + ] + ) emitCacheDebuggingRemark("remapped environment: \(remappedEnvironment.bindingsDictionary)") let sandboxArgs = enableTaskSandboxEnforcement ? try Self.prepareSandboxEnforcementArgs(sandboxDirectory: sandboxDirectory, developerDirectory: developerDirectory, executionDelegate: executionDelegate) : [] @@ -243,54 +247,54 @@ public final class GenericCachingTaskAction: TaskAction { static func prepareSandboxEnforcementArgs(sandboxDirectory: Path, developerDirectory: Path, executionDelegate: any TaskExecutionDelegate) throws -> [String] { // We only support enforcement of the task sandbox on a macOS host #if os(macOS) - let sandboxPath = sandboxDirectory.join("sandbox.sb") - let sandboxProfile: ByteString = """ - (version 1) - (allow default) - - (deny network*) - - (deny file-write*) - (allow file-write* (subpath "/dev/")) ; Allow writes to locations such as /dev/null - - (deny file-read* (subpath "/Users/")) ; Block access to most locations under user control, while allowing reads to parts of the system required by frameworks and tools - (allow file-read* (subpath (param "XCODE"))) ; Allow reads into Xcode.app - - ; Allow reads and writes to cache and logs directories - (allow file-read* file-write* (subpath (param "USER_CACHE_DIR"))) - (allow file-read* file-write* (subpath (param "DARWIN_USER_CACHE_DIR"))) - (allow file-read* file-write* (subpath (param "DARWIN_USER_TEMP_DIR"))) - (allow file-read* file-write* (subpath (param "USER_LOGS_DIR"))) - (allow file-read* file-write* (subpath (param "USER_DEVELOPER_DIR"))) - - ; Allow reads and writes to the task sandbox location - (allow file-read* file-write* (subpath (param "TASK_SANDBOX"))) - - ; Allow reads and writes to specific files required by system frameworks and libraries - (allow file-read-data file-write-data - (regex - #"/\\.CFUserTextEncoding$" - #"^/usr/share/nls/" - #"^/usr/share/zoneinfo /var/db/timezone/zoneinfo/" - )) - - (allow file-read-metadata) - """ - try executionDelegate.fs.write(sandboxPath, contents: sandboxProfile) - - return [ - "/usr/bin/sandbox-exec", - "-D", "TASK_SANDBOX=\(sandboxDirectory.str)", - "-D", "XCODE=\(developerDirectory.dirname.dirname.str)", - "-D", "USER_CACHE_DIR=\(Path.homeDirectory.join("Library/Caches").str)", - "-D", "DARWIN_USER_CACHE_DIR=\(userCacheDir().str)", - "-D", "DARWIN_USER_TEMP_DIR=\(try executionDelegate.fs.realpath(Path.temporaryDirectory).str)", - "-D", "USER_LOGS_DIR=\(Path.homeDirectory.join("Library/Logs").str)", - "-D", "USER_DEVELOPER_DIR=\(Path.homeDirectory.join("Library/Developer").str)", - "-f", sandboxPath.str - ] + let sandboxPath = sandboxDirectory.join("sandbox.sb") + let sandboxProfile: ByteString = """ + (version 1) + (allow default) + + (deny network*) + + (deny file-write*) + (allow file-write* (subpath "/dev/")) ; Allow writes to locations such as /dev/null + + (deny file-read* (subpath "/Users/")) ; Block access to most locations under user control, while allowing reads to parts of the system required by frameworks and tools + (allow file-read* (subpath (param "XCODE"))) ; Allow reads into Xcode.app + + ; Allow reads and writes to cache and logs directories + (allow file-read* file-write* (subpath (param "USER_CACHE_DIR"))) + (allow file-read* file-write* (subpath (param "DARWIN_USER_CACHE_DIR"))) + (allow file-read* file-write* (subpath (param "DARWIN_USER_TEMP_DIR"))) + (allow file-read* file-write* (subpath (param "USER_LOGS_DIR"))) + (allow file-read* file-write* (subpath (param "USER_DEVELOPER_DIR"))) + + ; Allow reads and writes to the task sandbox location + (allow file-read* file-write* (subpath (param "TASK_SANDBOX"))) + + ; Allow reads and writes to specific files required by system frameworks and libraries + (allow file-read-data file-write-data + (regex + #"/\\.CFUserTextEncoding$" + #"^/usr/share/nls/" + #"^/usr/share/zoneinfo /var/db/timezone/zoneinfo/" + )) + + (allow file-read-metadata) + """ + try executionDelegate.fs.write(sandboxPath, contents: sandboxProfile) + + return [ + "/usr/bin/sandbox-exec", + "-D", "TASK_SANDBOX=\(sandboxDirectory.str)", + "-D", "XCODE=\(developerDirectory.dirname.dirname.str)", + "-D", "USER_CACHE_DIR=\(Path.homeDirectory.join("Library/Caches").str)", + "-D", "DARWIN_USER_CACHE_DIR=\(userCacheDir().str)", + "-D", "DARWIN_USER_TEMP_DIR=\(try executionDelegate.fs.realpath(Path.temporaryDirectory).str)", + "-D", "USER_LOGS_DIR=\(Path.homeDirectory.join("Library/Logs").str)", + "-D", "USER_DEVELOPER_DIR=\(Path.homeDirectory.join("Library/Developer").str)", + "-f", sandboxPath.str, + ] #else - return [] + return [] #endif } } @@ -412,11 +416,11 @@ fileprivate final class CapturingTaskOutputDelegate: TaskOutputDelegate { underlyingTaskOutputDelegate.incrementTaskCounter(counter, by: amount) } - var counters: [BuildOperationMetrics.Counter : Int] { + var counters: [BuildOperationMetrics.Counter: Int] { underlyingTaskOutputDelegate.counters } - var taskCounters: [BuildOperationMetrics.TaskCounter : Int] { + var taskCounters: [BuildOperationMetrics.TaskCounter: Int] { underlyingTaskOutputDelegate.taskCounters } diff --git a/Sources/SWBTaskExecution/TaskActions/InfoPlistProcessorTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/InfoPlistProcessorTaskAction.swift index 225cc2c8..e381b9f0 100644 --- a/Sources/SWBTaskExecution/TaskActions/InfoPlistProcessorTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/InfoPlistProcessorTaskAction.swift @@ -20,10 +20,8 @@ import class Foundation.PropertyListSerialization import class Foundation.NSError /// Concrete implementation of the Info.plist processor in-process task. -public final class InfoPlistProcessorTaskAction: TaskAction -{ - public override class var toolIdentifier: String - { +public final class InfoPlistProcessorTaskAction: TaskAction { + public override class var toolIdentifier: String { return "info-plist-processor" } @@ -45,8 +43,7 @@ public final class InfoPlistProcessorTaskAction: TaskAction private var requiredArchitecture: String? = nil private var outputFormat: PropertyListSerialization.PropertyListFormat? = nil - public init(_ contextPath: Path) - { + public init(_ contextPath: Path) { self.contextPath = contextPath super.init() } @@ -77,8 +74,7 @@ public final class InfoPlistProcessorTaskAction: TaskAction let productType = context.productType let platform = context.platform // Parse the arguments and cache them. We only have to do this the first time the task is run, since the task won't change from run to run, and it cannot be run multiple times in parallel in the same process. - if configParsingResult == nil - { + if configParsingResult == nil { // The compiler won't let us just do a tuple assignment of the method return value here. c.f let (result, messages) = parseConfiguration(task, context: context) configParsingResult = result @@ -86,8 +82,7 @@ public final class InfoPlistProcessorTaskAction: TaskAction } // We do, however, have to emit the messages and handle a bad exit code on each run. configParsingMessages?.emitMessages(outputDelegate) - if configParsingResult! != .succeeded - { + if configParsingResult! != .succeeded { return configParsingResult! } @@ -110,32 +105,23 @@ public final class InfoPlistProcessorTaskAction: TaskAction // Read the property list from the file. We assume this method will emit any appropriate messages during reading. // We will modify the plist in place throughout the course of the tool until we finally have the contents we want to write out. let contentsData: ByteString - do - { + do { contentsData = try executionDelegate.fs.read(inputPath) - } - catch let e - { + } catch let e { outputDelegate.emitError("unable to read input file '\(inputPath.str)': \(e.localizedDescription)") return .failed } var (plist, inputFormat): (PropertyListItem, PropertyListSerialization.PropertyListFormat) - do - { + do { (plist, inputFormat) = try PropertyList.fromBytesWithFormat(contentsData.bytes) - } - catch let error as NSError - { + } catch let error as NSError { outputDelegate.emitError("unable to read property list from file: \(inputPath.str): \(error.localizedDescription)") return .failed - } - catch - { + } catch { outputDelegate.emitError("unable to read property list from file: \(inputPath.str): unknown error") return .failed } - // Confirm that the property list is a dictionary. guard case let .plDict(plistDict) = plist else { outputDelegate.emitError("contents of file is not a dictionary: \(inputPath.str)"); return .failed } @@ -148,31 +134,25 @@ public final class InfoPlistProcessorTaskAction: TaskAction return .failed } plist = .plDict(plistDict.addingContents(of: infoPlistFileContentsDict)) - } - catch { + } catch { outputDelegate.emitError("unable to create property list from string '\(infoPlistFileContents)': \(error.localizedDescription)") return .failed } } - // Load the additional content files, if any, and merge their content into the input plist. - for path in additionalContentFilePaths - { + for path in additionalContentFilePaths { let additionalContentResult = addAdditionalContent(from: path, &plist, context: context, executionDelegate, outputDelegate) - guard additionalContentResult == .succeeded else - { + guard additionalContentResult == .succeeded else { // addAdditionalContent() will have emitted any issues as it executed. return additionalContentResult } } - for path in privacyFileContentFilePaths - { + for path in privacyFileContentFilePaths { if let privacyFile = scanForPrivacyFile(at: path, fs: executionDelegate.fs) { let additionalPrivacyContentResult = addAppPrivacyContent(from: privacyFile, &plist, executionDelegate, outputDelegate) - guard additionalPrivacyContentResult == .succeeded else - { + guard additionalPrivacyContentResult == .succeeded else { // addAppPrivacyContent() will have emitted any issues as it executed. return additionalPrivacyContentResult } @@ -180,19 +160,16 @@ public final class InfoPlistProcessorTaskAction: TaskAction } // If we were passed any required architecture, then we apply it to the value of the 'UIRequiredDeviceCapabilities' key, by setting it and removing any others. - if requiredArchitecture != nil - { + if requiredArchitecture != nil { let result = setRequiredDeviceCapabilities(&plist, context: context, outputDelegate: outputDelegate) - if result != .succeeded - { + if result != .succeeded { return result } } // Add the data from the additional info property in the platform to the plist. let additionalInfoResult = addAdditionalEntriesFromPlatform(&plist, context: context, outputDelegate: outputDelegate) - if additionalInfoResult != .succeeded - { + if additionalInfoResult != .succeeded { // addAdditionalEntriesFromPlatform() will have emitted any issues as it executed. return additionalInfoResult } @@ -230,34 +207,36 @@ public final class InfoPlistProcessorTaskAction: TaskAction } // Elide any empty string values for a specific set of keys (bad things happen in the system if an Info.plist has an empty value for any of these keys). - plist = plist.byElidingRecursivelyEmptyStringValuesInDictionaries(Set([ - "CFBundleDevelopmentRegion", - "CFBundleExecutable", - "CFBundleGetInfoString", - "CFBundleIconFile", - "CFBundleIdentifier", - "CFBundleName", - "CFBundlePackageType", - "CFBundleResourceSpecification", - "CFBundleShortVersionString", - "CFBundleSignature", - "CFBundleTypeIconFile", - "CFBundleTypeRole", - "CFBundleVersion", - "NSDocumentClass", - "NSHelpFile", - "NSHumanReadableCopyright", - "NSMainNibFile", - "NSPrincipalClass", - "NSStickerSharingLevel", - - // These aren't harmful to the system if they're empty, but will cause App Store submission to fail, so elide them as well. - "BuildMachineOSBuild", - "DTSwiftPlaygroundsBuild", - "DTSwiftPlaygroundsVersion", - "DTXcode", - "DTXcodeBuild", - ])) + plist = plist.byElidingRecursivelyEmptyStringValuesInDictionaries( + Set([ + "CFBundleDevelopmentRegion", + "CFBundleExecutable", + "CFBundleGetInfoString", + "CFBundleIconFile", + "CFBundleIdentifier", + "CFBundleName", + "CFBundlePackageType", + "CFBundleResourceSpecification", + "CFBundleShortVersionString", + "CFBundleSignature", + "CFBundleTypeIconFile", + "CFBundleTypeRole", + "CFBundleVersion", + "NSDocumentClass", + "NSHelpFile", + "NSHumanReadableCopyright", + "NSMainNibFile", + "NSPrincipalClass", + "NSStickerSharingLevel", + + // These aren't harmful to the system if they're empty, but will cause App Store submission to fail, so elide them as well. + "BuildMachineOSBuild", + "DTSwiftPlaygroundsBuild", + "DTSwiftPlaygroundsVersion", + "DTXcode", + "DTXcodeBuild", + ]) + ) // Convert the PropertyListItem to a Dictionary so we can inject content guard case .plDict(var plistDict) = plist else { @@ -273,8 +252,7 @@ public final class InfoPlistProcessorTaskAction: TaskAction let deploymentTarget: Version? if let deploymentTargetMacro = platform?.deploymentTargetMacro { deploymentTarget = try? Version(scope.evaluate(deploymentTargetMacro)) - } - else { + } else { deploymentTarget = nil } @@ -325,8 +303,7 @@ public final class InfoPlistProcessorTaskAction: TaskAction if numDocumentTypes > 0, plistDict["LSSupportsOpeningDocumentsInPlace"] == nil, plistDict["UISupportsDocumentBrowser"] == nil { outputDelegate.emitWarning("The application supports opening files, but doesn't declare whether it supports opening them in place. You can add an LSSupportsOpeningDocumentsInPlace entry or an UISupportsDocumentBrowser entry to your Info.plist to declare support.") } - } - else if platform?.familyName == "macOS", !disableInfoPlistPlatformEditing { + } else if platform?.familyName == "macOS", !disableInfoPlistPlatformEditing { if numDocumentTypes > 0, let sodip = plistDict["LSSupportsOpeningDocumentsInPlace"], sodip.boolValue == false { outputDelegate.emitError("'LSSupportsOpeningDocumentsInPlace = NO' is not supported on macOS. Either remove the entry or set it to YES, and also ensure that the application does open documents in place on macOS.") return .failed @@ -371,49 +348,35 @@ public final class InfoPlistProcessorTaskAction: TaskAction return .succeeded } - // MARK: Parsing the configuration - - private func parseConfiguration(_ task: any ExecutableTask, context: InfoPlistProcessorTaskActionContext) -> (CommandResult, TaskActionMessageCollection) - { + private func parseConfiguration(_ task: any ExecutableTask, context: InfoPlistProcessorTaskActionContext) -> (CommandResult, TaskActionMessageCollection) { let messages = TaskActionMessageCollection() let commandLine = [String](task.commandLineAsStrings) - var i = 1 // Skip over the tool name - while i < commandLine.count - { + var i = 1 // Skip over the tool name + while i < commandLine.count { let option = commandLine[i] switch option { case "-format": // The '-format' option takes a single argument: 'openstep', 'xml', or 'binary'. - if commandLine.count > i+1 - { + if commandLine.count > i + 1 { // The argument is the format in which to write the output. i += 1 let arg = commandLine[i] - if arg == "openstep" - { + if arg == "openstep" { outputFormat = .openStep - } - else if arg == "xml" - { + } else if arg == "xml" { outputFormat = .xml - } - else if arg == "binary" - { + } else if arg == "binary" { outputFormat = .binary - } - else - { + } else { // Unrecognized argument to -format. messages.addMessage(.error("unrecognized argument to \(option): '\(arg)' (use 'openstep', 'xml', or 'binary')")) return (.failed, messages) } - } - else - { + } else { // No argument to -format. messages.addMessage(.error("missing argument for \(option) (use 'openstep', 'xml', or 'binary')")) return (.failed, messages) @@ -421,25 +384,19 @@ public final class InfoPlistProcessorTaskAction: TaskAction case "-genpkginfo": // The '-pkginfo' option takes a single argument: the path of the pkginfo file to generate. - if commandLine.count > i+1 - { + if commandLine.count > i + 1 { // The argument is the output path. i += 1 let arg = commandLine[i] - if pkgInfoPath == nil - { + if pkgInfoPath == nil { // We don't already have a pkginfo path; we do now. pkgInfoPath = Path(arg) - } - else - { + } else { // We already have a pkginfo path. messages.addMessage(.error("multiple pkginfo paths specified")) return (.failed, messages) } - } - else - { + } else { // No argument to -genpkginfo. messages.addMessage(.error("missing argument for \(option)")) return (.failed, messages) @@ -466,32 +423,25 @@ public final class InfoPlistProcessorTaskAction: TaskAction case "-platform": // The '-platform' option takes a single argument: the name of the platform for which the Info.plist is being generated. - if commandLine.count > i+1 - { + if commandLine.count > i + 1 { // The argument is the platform name. i += 1 let arg = commandLine[i] - if platformName == nil - { + if platformName == nil { // We don't already have a platform name; we do now. platformName = arg // If the platform name we were passed differs from the platform we were configured with, then emit an error. - if platformName! != context.platform?.name - { + if platformName! != context.platform?.name { messages.addMessage(.error("argument to -platform '\(platformName!)' differs from platform being targeted '\(context.platform?.name ?? "")'")) return (.failed, messages) } - } - else - { + } else { // We already have a pkginfo path. messages.addMessage(.error("multiple platform names specified")) return (.failed, messages) } - } - else - { + } else { // No argument to -platform. messages.addMessage(.error("missing argument for \(option)")) return (.failed, messages) @@ -507,25 +457,19 @@ public final class InfoPlistProcessorTaskAction: TaskAction case "-resourcerulesfile": // The '-resourcerulesfile' option takes a single argument: the name (or path) of the resource rules file. - if commandLine.count > i+1 - { + if commandLine.count > i + 1 { // The argument is the resource rules file. i += 1 let arg = commandLine[i] - if resourceRulesFileName == nil - { + if resourceRulesFileName == nil { // We don't already have a resource rules file; we do now. resourceRulesFileName = Path(arg) - } - else - { + } else { // We already have a pkginfo path. messages.addMessage(.error("multiple resource rules files specified")) return (.failed, messages) } - } - else - { + } else { // No argument to -resourcerulesfile. messages.addMessage(.error("missing argument for \(option)")) return (.failed, messages) @@ -533,15 +477,12 @@ public final class InfoPlistProcessorTaskAction: TaskAction case "-additionalcontentfile": // The '-additionalcontentfile' option takes a single argument: the name of the additional content file. There may be more than one such file passed. - if commandLine.count > i+1 - { + if commandLine.count > i + 1 { // The argument is the additional content file. i += 1 let arg = commandLine[i] additionalContentFilePaths.append(Path(arg)) - } - else - { + } else { // No argument to -additionalcontentfile. messages.addMessage(.error("missing argument for \(option)")) return (.failed, messages) @@ -549,14 +490,11 @@ public final class InfoPlistProcessorTaskAction: TaskAction case "-scanforprivacyfile": // The '-scanforprivacyfile' option takes a single argument: the path of the path to scan for an `PrivacyInfo.xcprivacy` file. There may be more than one such path passed. - if commandLine.count > i+1 - { + if commandLine.count > i + 1 { i += 1 let arg = commandLine[i] privacyFileContentFilePaths.append(Path(arg)) - } - else - { + } else { // No argument to -scanforprivacyfile. messages.addMessage(.error("missing argument for \(option)")) return (.failed, messages) @@ -570,7 +508,7 @@ public final class InfoPlistProcessorTaskAction: TaskAction return (.failed, messages) } - guard commandLine.count > i+1 else { + guard commandLine.count > i + 1 else { messages.addMessage(.error("missing argument for \(option)")) return (.failed, messages) } @@ -582,44 +520,33 @@ public final class InfoPlistProcessorTaskAction: TaskAction case "-o": // The '-o' option takes a single argument: the output path. - if commandLine.count > i+1 - { + if commandLine.count > i + 1 { // The argument is the resource rules file. i += 1 let arg = commandLine[i] - if outputPath == nil - { + if outputPath == nil { // We don't already have a resource rules file; we do now. outputPath = Path(arg) - } - else - { + } else { // We already have a pkginfo path. messages.addMessage(.error("multiple output files specified")) return (.failed, messages) } - } - else - { + } else { // No argument to -o. messages.addMessage(.error("missing argument for \(option)")) return (.failed, messages) } default: - if option.hasPrefix("-") - { + if option.hasPrefix("-") { // Unknown option. messages.addMessage(.error("unrecognized option: \(option)")) return (.failed, messages) - } - else if inputPath == nil - { + } else if inputPath == nil { // If we don't already have an input path, then we use this as the input path. inputPath = Path(option) - } - else - { + } else { // It's an input path and we already have one; that's not allowed. messages.addMessage(.error("multiple input files specified")) return (.failed, messages) @@ -692,8 +619,7 @@ public final class InfoPlistProcessorTaskAction: TaskAction /// Produce a collection of default Info.plist keys based on content in the project, taking platform and product type into account. /// Could be done through InfoPlistAdditions properties in product types, but that would result in more duplication at this point - private func defaultInfoPlistContent(scope: MacroEvaluationScope, platform: Platform?, productType: ProductTypeSpec?) -> [String: PropertyListItem] - { + private func defaultInfoPlistContent(scope: MacroEvaluationScope, platform: Platform?, productType: ProductTypeSpec?) -> [String: PropertyListItem] { var content: [String: PropertyListItem] = [:] // The general theory of this method is that we now have a bunch of INFOPLIST_KEY-prefixed build settings that are used to define sufficiently simple Info.plist content, and we go to those (which will either have appropriate backstops set as necessary, or be empty) to determine the content to generate. @@ -701,8 +627,10 @@ public final class InfoPlistProcessorTaskAction: TaskAction /// Get the macro name corresponding to the given Info.plist key name. /// This accommodates platform-specific override names like UISomething~ipad, which may use proper capitalization in the build setting. func macroNameForInfoPlistKey(key: String, prefix: String) -> String { - let replacements = [ "~iphone": "_iPhone", - "~ipad": "_iPad" ] + let replacements = [ + "~iphone": "_iPhone", + "~ipad": "_iPad", + ] var adjustedKey = key for (infoPlistPart, macroNamePart) in replacements { adjustedKey = adjustedKey.replacingOccurrences(of: infoPlistPart, with: macroNamePart) @@ -738,69 +666,70 @@ public final class InfoPlistProcessorTaskAction: TaskAction // Note that for flexibility, instead of rigidly assigning only some values for some platforms, we (mostly) rely on the templates to specify the right values for the platforms involved, and to leave values they don't care about unspecified. - let generatedInfoPlistKeys: [String] = [ - // General + let generatedInfoPlistKeys: [String] = + [ + // General - "CFBundleDisplayName", - "LSApplicationCategoryType", - "NSHumanReadableCopyright", - "NSPrincipalClass", - "ITSAppUsesNonExemptEncryption", - "ITSEncryptionExportComplianceCode", - "NSLocationTemporaryUsageDescriptionDictionary", + "CFBundleDisplayName", + "LSApplicationCategoryType", + "NSHumanReadableCopyright", + "NSPrincipalClass", + "ITSAppUsesNonExemptEncryption", + "ITSEncryptionExportComplianceCode", + "NSLocationTemporaryUsageDescriptionDictionary", - // macOS + // macOS - "LSBackgroundOnly", - "LSUIElement", - "NSMainNibFile", - "NSMainStoryboardFile", + "LSBackgroundOnly", + "LSUIElement", + "NSMainNibFile", + "NSMainStoryboardFile", - // iOS and Derived + // iOS and Derived - "UILaunchStoryboardName", - "UIMainStoryboardFile", - "UIRequiredDeviceCapabilities", - "UISupportedInterfaceOrientations", - "UIUserInterfaceStyle", + "UILaunchStoryboardName", + "UIMainStoryboardFile", + "UIRequiredDeviceCapabilities", + "UISupportedInterfaceOrientations", + "UIUserInterfaceStyle", - // iOS + // iOS - "LSSupportsOpeningDocumentsInPlace", - "NSSensorKitPrivacyPolicyURL", - "NSSupportsLiveActivities", - "NSSupportsLiveActivitiesFrequentUpdates", - "UIApplicationSupportsIndirectInputEvents", - "UIRequiresFullScreen", - "UIStatusBarHidden", - "UIStatusBarStyle", - "UISupportedInterfaceOrientations~ipad", - "UISupportedInterfaceOrientations~iphone", - "UISupportsDocumentBrowser", + "LSSupportsOpeningDocumentsInPlace", + "NSSensorKitPrivacyPolicyURL", + "NSSupportsLiveActivities", + "NSSupportsLiveActivitiesFrequentUpdates", + "UIApplicationSupportsIndirectInputEvents", + "UIRequiresFullScreen", + "UIStatusBarHidden", + "UIStatusBarStyle", + "UISupportedInterfaceOrientations~ipad", + "UISupportedInterfaceOrientations~iphone", + "UISupportsDocumentBrowser", - // watchOS + // watchOS - "CLKComplicationPrincipalClass", - "WKApplication", - "WKCompanionAppBundleIdentifier", - "WKExtensionDelegateClassName", - "WKRunsIndependentlyOfCompanionApp", - "WKWatchOnly", - "WKSupportsLiveActivityLaunchAttributeTypes", + "CLKComplicationPrincipalClass", + "WKApplication", + "WKCompanionAppBundleIdentifier", + "WKExtensionDelegateClassName", + "WKRunsIndependentlyOfCompanionApp", + "WKWatchOnly", + "WKSupportsLiveActivityLaunchAttributeTypes", - // Metal + // Metal - "MetalCaptureEnabled", + "MetalCaptureEnabled", - // Game Controller and Game Mode + // Game Controller and Game Mode - "GCSupportsControllerUserInteraction", - "GCSupportsGameMode", + "GCSupportsControllerUserInteraction", + "GCSupportsGameMode", - // Sticker Packs + // Sticker Packs - "NSStickerSharingLevel", - ] + usageDescriptionStringKeys + "NSStickerSharingLevel", + ] + usageDescriptionStringKeys for key in generatedInfoPlistKeys { updateContentWithInfoPlistKeyMacroValue(&content, key) @@ -824,7 +753,7 @@ public final class InfoPlistProcessorTaskAction: TaskAction updateContent(&content, key: "CFBundlePackageType", buildSetting: BuiltinMacros.PRODUCT_BUNDLE_PACKAGE_TYPE.name) updateContent(&content, key: "CFBundleShortVersionString", buildSetting: BuiltinMacros.MARKETING_VERSION.name) - content["CFBundleInfoDictionaryVersion"] = .plString("6.0") // TODO: consider removing this since it likely isn't needed + content["CFBundleInfoDictionaryVersion"] = .plString("6.0") // TODO: consider removing this since it likely isn't needed // iOS and Derived Platforms - Special Cases @@ -869,36 +798,26 @@ public final class InfoPlistProcessorTaskAction: TaskAction } /// Add the keys from the `AdditionalInfo` dictionary in the platform to the Info.plist. Only keys which do not exist in the input plist will be added. This will modify the input `plist` in place. - private func addAdditionalEntriesFromPlatform(_ plist: inout PropertyListItem, context: InfoPlistProcessorTaskActionContext, outputDelegate: any TaskOutputDelegate) -> CommandResult - { + private func addAdditionalEntriesFromPlatform(_ plist: inout PropertyListItem, context: InfoPlistProcessorTaskActionContext, outputDelegate: any TaskOutputDelegate) -> CommandResult { // If we were passed a platform with -platform, then we use its additional info. - if platformName != nil - { + if platformName != nil { // Only do anything if the platform's additional info is not empty. - if context.platform?.additionalInfoPlistEntries.count > 0 - { + if context.platform?.additionalInfoPlistEntries.count > 0 { guard case .plDict(let dict) = plist else { outputDelegate.emitError("Info.plist contents are not a dictionary prior to adding additional entries from platform"); return .failed } var plistDict = dict // Go through the additional entries from the platform and add them to the plist dictionary. But only if they're not already present in the plist. - for (key, plValue) in context.platform?.additionalInfoPlistEntries ?? [:] where plistDict[key] == nil - { - if case .plString(let value) = plValue - { + for (key, plValue) in context.platform?.additionalInfoPlistEntries ?? [:] where plistDict[key] == nil { + if case .plString(let value) = plValue { // In the past, the convention was used to indicate that the value for this key should be the value in the platform's properties. It looks like no platform circa Xcode 9.0 uses this convention anymore, so we no longer support it and we warn if we find such a key - we should encourage platforms to use actual build setting evaluation instead. - if value.hasPrefix("<") && value.hasSuffix(">") && value.count > 2 - { + if value.hasPrefix("<") && value.hasSuffix(">") && value.count > 2 { outputDelegate.emitWarning("key '\(key)' in 'AdditionalInfo' dictionary for platform \(context.platform?.displayName ?? "") uses unsupported bracket evaluation convention for its value '\(value)'") plistDict[key] = plValue - } - else - { + } else { // String values which aren't surrounded by <>. plistDict[key] = plValue } - } - else - { + } else { // Non-string values. plistDict[key] = plValue } @@ -917,20 +836,17 @@ public final class InfoPlistProcessorTaskAction: TaskAction let contentsData: ByteString do { contentsData = try executionDelegate.fs.read(path) - } - catch { + } catch { outputDelegate.emitError("unable to read additional content file '\(path.str)': \(error.localizedDescription)") return .failed } let additionalPlist: PropertyListItem do { (additionalPlist, _) = try PropertyList.fromBytesWithFormat(contentsData.bytes) - } - catch let error as NSError { + } catch let error as NSError { outputDelegate.emitError("unable to read property list from additional content file: \(path.str): \(error.localizedDescription)") return .failed - } - catch { + } catch { outputDelegate.emitError("unable to read property list from additional content file: \(path.str): unknown error") return .failed } @@ -953,8 +869,7 @@ public final class InfoPlistProcessorTaskAction: TaskAction if key == "UIRequiredDeviceCapabilities" { if let mergedCapabilitiesResult = mergeUIRequiredDeviceCapabilities(valueToMerge, from: path, into: workingValue, from: inputPath!, outputDelegate) { plistDict[key] = mergedCapabilitiesResult - } - else { + } else { return .failed } } else if (key == "CFBundleIcons" || key.hasPrefix("CFBundleIcons~")) && context.scope.evaluate(BuiltinMacros.INFOPLIST_ENABLE_CFBUNDLEICONS_MERGE) { @@ -990,8 +905,7 @@ public final class InfoPlistProcessorTaskAction: TaskAction outputDelegate.emitError("tried to merge \(valueToMerge.typeDisplayString) value for key '\(key)' onto \(workingValue.typeDisplayString) value") return .failed } - } - else { + } else { // If the key isn't defined, then we can just set it. plistDict[key] = valueToMerge } @@ -1011,20 +925,17 @@ public final class InfoPlistProcessorTaskAction: TaskAction let contentsData: ByteString do { contentsData = try executionDelegate.fs.read(path) - } - catch { + } catch { outputDelegate.emitError("unable to read privacy file '\(path.str)': \(error.localizedDescription)") return .failed } let privacyPlist: PropertyListItem do { (privacyPlist, _) = try PropertyList.fromBytesWithFormat(contentsData.bytes) - } - catch let error as NSError { + } catch let error as NSError { outputDelegate.emitError("unable to read property list from privacy file: \(path.str): \(error.localizedDescription)") return .failed - } - catch { + } catch { outputDelegate.emitError("unable to read property list from privacy file: \(path.str): unknown error") return .failed } @@ -1042,8 +953,8 @@ public final class InfoPlistProcessorTaskAction: TaskAction var trackedDomains = plistDict[trackedDomainsKey]?.arrayValue ?? [] if let additionalTrackedDomains = privacyDict[trackedDomainsKey]?.arrayValue { // It's import to both remove duplicates and sort the items to ensure stable file contents. - var set = Set(trackedDomains.compactMap( { $0.stringValue })) - for domain in additionalTrackedDomains.compactMap( { $0.stringValue }) { + var set = Set(trackedDomains.compactMap({ $0.stringValue })) + for domain in additionalTrackedDomains.compactMap({ $0.stringValue }) { set.insert(domain) } trackedDomains = set.sorted().map { PropertyListItem.plString($0) } @@ -1099,7 +1010,7 @@ public final class InfoPlistProcessorTaskAction: TaskAction if plistDict["UIDeviceFamily"] != nil { outputDelegate.emitWarning("User supplied UIDeviceFamily key in the Info.plist will be overwritten. Please use the build setting TARGETED_DEVICE_FAMILY and remove UIDeviceFamily from your Info.plist.") } - plistDict["UIDeviceFamily"] = .plArray(effectiveDevices.sorted().map{ .plInt($0) }) + plistDict["UIDeviceFamily"] = .plArray(effectiveDevices.sorted().map { .plInt($0) }) } } @@ -1141,22 +1052,22 @@ public final class InfoPlistProcessorTaskAction: TaskAction /// Mapping of all keys known to not be supported on some platforms to the info about which platforms they are or aren't supported on. let keyPlatformSupport: [String: PlatformFamilySupportInfo] = [ // Keys not supported on macOS, watchOS. - "LSRequiresIPhoneOS": .notSupportedOn(Set(["macOS", "watchOS"])), + "LSRequiresIPhoneOS": .notSupportedOn(Set(["macOS", "watchOS"])), // Keys only supported on macOS. - "LSBackgroundOnly": .supportedOnlyOn(Set(["macOS"])), - "LSUIElement": .supportedOnlyOn(Set(["macOS"])), - "NSDocumentClass": .supportedOnlyOn(Set(["macOS"])), - "NSServices": .supportedOnlyOn(Set(["macOS"])), - "NSSupportsAutomaticTermination": .supportedOnlyOn(Set(["macOS"])), - "NSSupportsSuddenTermination": .supportedOnlyOn(Set(["macOS"])), + "LSBackgroundOnly": .supportedOnlyOn(Set(["macOS"])), + "LSUIElement": .supportedOnlyOn(Set(["macOS"])), + "NSDocumentClass": .supportedOnlyOn(Set(["macOS"])), + "NSServices": .supportedOnlyOn(Set(["macOS"])), + "NSSupportsAutomaticTermination": .supportedOnlyOn(Set(["macOS"])), + "NSSupportsSuddenTermination": .supportedOnlyOn(Set(["macOS"])), // Keys only supported on macOS, iOS, tvOS. - "LSApplicationCategoryType": .supportedOnlyOn(Set(["macOS", "iOS", "tvOS"])), + "LSApplicationCategoryType": .supportedOnlyOn(Set(["macOS", "iOS", "tvOS"])), ] /// Mapping of all the values for keys known to not be supported on some platforms to the info about which platforms they are or aren't supported on. - let keyValuesPlatformSupport: [String:[String:PlatformFamilySupportInfo]] = [ + let keyValuesPlatformSupport: [String: [String: PlatformFamilySupportInfo]] = [ "UIBackgroundModes": [ "audio": .supportedOnlyOn(["iOS", "tvOS", "watchOS", "xrOS"]), "location": .supportedOnlyOn(["iOS", "watchOS"]), @@ -1188,11 +1099,10 @@ public final class InfoPlistProcessorTaskAction: TaskAction let reason: String if let platform = platforms.only { reason = "only supported on \(platform)" - } - else { + } else { reason = "not supported on \(context.platform?.familyDisplayName ?? "")" } - remove(unsupportedKey: key, reason) + remove(unsupportedKey: key, reason) } } } @@ -1215,11 +1125,10 @@ public final class InfoPlistProcessorTaskAction: TaskAction let reason: String if let platform = platforms.only { reason = "only supported on \(platform)" - } - else { + } else { reason = "not supported on \(context.platform?.familyDisplayName ?? "")" } - remove(unsupportedValue: value, from: key, reason) + remove(unsupportedValue: value, from: key, reason) } } } @@ -1266,7 +1175,7 @@ public final class InfoPlistProcessorTaskAction: TaskAction if let oldValue { switch (oldValue, newValue) { case (.plArray(let oldArray), .plArray(let newArray)): - // Both values are arrays - we append the contents of newValue to oldValue, making sure not to add the same value twice. + // Both values are arrays - we append the contents of newValue to oldValue, making sure not to add the same value twice. // (This would be easier if PropertyListItem were hashable and we could create an OrderedSet here.) var resultArray = oldArray for item in newArray { @@ -1282,8 +1191,7 @@ public final class InfoPlistProcessorTaskAction: TaskAction for item in oldArray { if let key = item.stringValue { oldDict[key] = .plBool(true) - } - else { + } else { outputDelegate.emitError("all values in 'UIRequiredDeviceCapabilities' must be strings (file \(oldValuePath)") return nil } @@ -1296,8 +1204,7 @@ public final class InfoPlistProcessorTaskAction: TaskAction for item in newArray { if let key = item.stringValue { newDict[key] = .plBool(true) - } - else { + } else { outputDelegate.emitError("all values in 'UIRequiredDeviceCapabilities' must be strings (file \(newValuePath)") return nil } @@ -1319,21 +1226,18 @@ public final class InfoPlistProcessorTaskAction: TaskAction } return nil } - } - else { + } else { // If oldValue is empty then we can just set it to newValue, as long as it is an array or a dictionary. if newValue.arrayValue != nil || newValue.dictValue != nil { return newValue - } - else { + } else { outputDelegate.emitError("'UIRequiredDeviceCapabilities' must be an array or dictionary but it is \(newValue.typeDisplayString.withIndefiniteArticle) (file \(oldValuePath.str))") return nil } } } - private func setRequiredDeviceCapabilities(_ plist: inout PropertyListItem, context: InfoPlistProcessorTaskActionContext, outputDelegate: any TaskOutputDelegate) -> CommandResult - { + private func setRequiredDeviceCapabilities(_ plist: inout PropertyListItem, context: InfoPlistProcessorTaskActionContext, outputDelegate: any TaskOutputDelegate) -> CommandResult { guard case .plDict(var plistDict) = plist else { outputDelegate.emitError("Info.plist contents are not a dictionary prior to setting required device capabilities") return .failed @@ -1406,8 +1310,7 @@ public final class InfoPlistProcessorTaskAction: TaskAction outputDelegate.warning("User-supplied CFBundleIdentifier value '\(identifierString)' in the Info.plist must be the same as the PRODUCT_BUNDLE_IDENTIFIER build setting value '\(buildSettingIdentifierString)'.", location: .buildSetting(BuiltinMacros.PRODUCT_BUNDLE_IDENTIFIER)) } } - } - else { + } else { outputDelegate.emitWarning("the value for Bundle Identifier must be of type string, but is \(identifier.typeDisplayString.withIndefiniteArticle)") } } @@ -1430,14 +1333,12 @@ public final class InfoPlistProcessorTaskAction: TaskAction if minimumSystemVersionStr.isEmpty { outputDelegate.emitWarning("\(minimumSystemVersionKey) is explicitly set to empty - setting to value of \(deploymentTargetName) '\(deploymentTarget.description)'.") plistDict[minimumSystemVersionKey] = .plString(deploymentTarget.description) - } - else if let minimumSystemVersion = try? Version(minimumSystemVersionStr) { + } else if let minimumSystemVersion = try? Version(minimumSystemVersionStr) { if minimumSystemVersion < deploymentTarget { outputDelegate.emitWarning("\(minimumSystemVersionKey) of '\(minimumSystemVersionStr)' is less than the value of \(deploymentTargetName) '\(deploymentTarget.description)' - setting to '\(deploymentTarget.description)'.") plistDict[minimumSystemVersionKey] = .plString(deploymentTarget.description) } - } - else { + } else { outputDelegate.emitError("\(minimumSystemVersionKey) '\(minimumSystemVersionStr)' is not a valid version.") return false } @@ -1460,7 +1361,7 @@ public final class InfoPlistProcessorTaskAction: TaskAction case iOS case tvOS case watchOS - case visionOS = "xrOS" // must be "xrOS" as it's compared against Platform.familyName + case visionOS = "xrOS" // must be "xrOS" as it's compared against Platform.familyName } /// The values of the key that are deprecated, if only specific values are deprecated. `nil` indicates the key as a whole is deprecated. @@ -1491,19 +1392,26 @@ public final class InfoPlistProcessorTaskAction: TaskAction let plistKeyDeprecationInfo: [PropertyListKeyPath: DeprecationInfo] = [ "UILaunchImages": .init(moreInfo: .alternate("launch storyboards"), deprecationVersions: [.iOS: Version(), .tvOS: Version(13)]), "CLKComplicationSupportedFamilies": .init(moreInfo: .alternate("the ClockKit complications API"), deprecationVersions: [.watchOS: Version(7)]), - PropertyListKeyPath(.dict(.equal("NSAppTransportSecurity")), .dict(.equal("NSExceptionDomains")), .dict(.any), .any(.equal("NSExceptionMinimumTLSVersion"))): .init(values: [.plString("TLSv1.0"), .plString("TLSv1.1")], moreInfo: .alternate("TLSv1.2 or TLSv1.3"), deprecationVersions: [ - .macOS: Version(12), - .iOS: Version(15), - .tvOS: Version(15), - .watchOS: Version(8) - ]), - "UIRequiresFullScreen": .init(moreInfo: .ignored("See the UIRequiresFullScreen documentation for more details."), deprecationVersions: [ - .macOS: Version(26), - .iOS: Version(26), - .tvOS: Version(26), - .watchOS: Version(26), - .visionOS: Version(26), - ]) + PropertyListKeyPath(.dict(.equal("NSAppTransportSecurity")), .dict(.equal("NSExceptionDomains")), .dict(.any), .any(.equal("NSExceptionMinimumTLSVersion"))): .init( + values: [.plString("TLSv1.0"), .plString("TLSv1.1")], + moreInfo: .alternate("TLSv1.2 or TLSv1.3"), + deprecationVersions: [ + .macOS: Version(12), + .iOS: Version(15), + .tvOS: Version(15), + .watchOS: Version(8), + ] + ), + "UIRequiresFullScreen": .init( + moreInfo: .ignored("See the UIRequiresFullScreen documentation for more details."), + deprecationVersions: [ + .macOS: Version(26), + .iOS: Version(26), + .tvOS: Version(26), + .watchOS: Version(26), + .visionOS: Version(26), + ] + ), ] for (key, info) in plistKeyDeprecationInfo.sorted(byKey: <) { @@ -1567,8 +1475,7 @@ public final class InfoPlistProcessorTaskAction: TaskAction private func scanForPrivacyFile(at path: Path, fs: any FSProxy) -> Path? { do { return try fs.traverse(path) { $0.basename == "PrivacyInfo.xcprivacy" ? $0 : nil }.first - } - catch {} + } catch {} return nil } @@ -1595,7 +1502,8 @@ public final class InfoPlistProcessorTaskAction: TaskAction } // Get the package type code and signature (a.k.a. creator) code from the Info.plist. We do various correctness checks. One of the more interesting restrictions is that both the type code and the signature code have to be convertible to Mac OS Roman encoding. The reason for this is that both four-character codes are really OSTypes, which were implicitly encoded in Mac OS Roman back in historical times. - let pkgInfoBytes = (OutputByteStream() + let pkgInfoBytes = + (OutputByteStream() <<< getFourCharCode(forKey: "CFBundlePackageType") <<< getFourCharCode(forKey: "CFBundleSignature")).bytes do { @@ -1605,32 +1513,25 @@ public final class InfoPlistProcessorTaskAction: TaskAction } } - // Serialization - - public override func serialize(to serializer: T) - { + public override func serialize(to serializer: T) { serializer.beginAggregate(2) serializer.serialize(contextPath) super.serialize(to: serializer) serializer.endAggregate() } - public required init(from deserializer: any Deserializer) throws - { + public required init(from deserializer: any Deserializer) throws { try deserializer.beginAggregate(2) self.contextPath = try deserializer.deserialize() try super.init(from: deserializer) } } - -private extension PropertyListItem -{ +private extension PropertyListItem { /// Specialized private method to transform a property list by removing any keys which are in the set `keys` from any dictionary in the plist where the value for that key is empty. - func byElidingRecursivelyEmptyStringValuesInDictionaries(_ keysToElide: Set) -> PropertyListItem - { + func byElidingRecursivelyEmptyStringValuesInDictionaries(_ keysToElide: Set) -> PropertyListItem { switch self { case .plArray(let value): @@ -1640,14 +1541,12 @@ private extension PropertyListItem case .plDict(let value): // Handle this dictionary. var result = [String: PropertyListItem]() - for (key, item) in value - { + for (key, item) in value { switch item { case .plString(let value): // For strings, we elide it only if the key is in keysToElide and the value is empty. - if !value.isEmpty || !keysToElide.contains(key) - { + if !value.isEmpty || !keysToElide.contains(key) { result[key] = item } diff --git a/Sources/SWBTaskExecution/TaskActions/LinkAssetCatalogTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/LinkAssetCatalogTaskAction.swift index 3a30eaa6..fb417304 100644 --- a/Sources/SWBTaskExecution/TaskActions/LinkAssetCatalogTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/LinkAssetCatalogTaskAction.swift @@ -147,7 +147,7 @@ public final class LinkAssetCatalogTaskAction: TaskAction { try executionDelegate.fs.remove(path) } try executionDelegate.fs.copy(sourceDependencies, to: path) - case .makefile, .makefiles, .makefileIgnoringSubsequentOutputs, nil: + case .makefile, .makefiles, .makefileIgnoringSubsequentOutputs, nil: throw StubError.error("Unexpected dependency data style") } @@ -203,14 +203,16 @@ extension FSProxy { } func relativeFilePaths(directory: Path) throws -> Set { - try Set(traverse(directory) { path -> Path? in - guard !isDirectory(path) else { - return nil - } - guard let subpath = path.relativeSubpath(from: directory) else { - throw StubError.error("Could not compute path of \(path.str) relative to \(directory.str)") + try Set( + traverse(directory) { path -> Path? in + guard !isDirectory(path) else { + return nil + } + guard let subpath = path.relativeSubpath(from: directory) else { + throw StubError.error("Could not compute path of \(path.str) relative to \(directory.str)") + } + return Path(subpath) } - return Path(subpath) - }) + ) } } diff --git a/Sources/SWBTaskExecution/TaskActions/MergeInfoPlistTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/MergeInfoPlistTaskAction.swift index 73f0a0cd..2c25a930 100644 --- a/Sources/SWBTaskExecution/TaskActions/MergeInfoPlistTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/MergeInfoPlistTaskAction.swift @@ -35,9 +35,16 @@ public final class MergeInfoPlistTaskAction: TaskAction { do { guard let byteString = try Set(inputs.map { try executionDelegate.fs.read($0) }).only else { - outputDelegate.emit(Diagnostic(behavior: .error, location: .unknown, data: DiagnosticData("Info.plist preprocessing produced variable content across multiple architectures and/or build variants, which is not allowed for bundle targets."), childDiagnostics: inputs.map { input in - Diagnostic(behavior: .note, location: .path(input), data: DiagnosticData("Using preprocessed file: \(input.str)")) - })) + outputDelegate.emit( + Diagnostic( + behavior: .error, + location: .unknown, + data: DiagnosticData("Info.plist preprocessing produced variable content across multiple architectures and/or build variants, which is not allowed for bundle targets."), + childDiagnostics: inputs.map { input in + Diagnostic(behavior: .note, location: .path(input), data: DiagnosticData("Using preprocessed file: \(input.str)")) + } + ) + ) return .failed } diff --git a/Sources/SWBTaskExecution/TaskActions/ODRAssetPackManifestTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/ODRAssetPackManifestTaskAction.swift index 24142a60..52e6e633 100644 --- a/Sources/SWBTaskExecution/TaskActions/ODRAssetPackManifestTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/ODRAssetPackManifestTaskAction.swift @@ -43,13 +43,11 @@ public final class ODRAssetPackManifestTaskAction: TaskAction { // a) if ASSET_PACK_MANIFEST_URL_PREFIX is set to anything other than the empty string, we use it, and append the name of the asset pack to it (note that we do -not- insert a path separator, to allow prefixes like "http://myserver/script?pack="). guard let suffix = assetPackPath.basename.addingPercentEncoding(withAllowedCharacters: allowedChars) else { throw StubError.error("could not percent-encode \(assetPackPath.basename)") } return assetPackManifestURLPrefix.appending(suffix) - } - else if embedAssetPacksInProductBundle { + } else if embedAssetPacksInProductBundle { // b) otherwise, if the ASSET_PACK_FOLDER_PATH path is inside the Resources directory of the main product bundle, we use the relative path to that Resources directory as the URL, guard let subPath = assetPackPath.relativeSubpath(from: unlocalizedProductResourcesDir) else { throw StubError.error("expected path \(assetPackPath.str) to be a subpath of \(unlocalizedProductResourcesDir.str)") } return subPath - } - else { + } else { // c) otherwise, we use a full http://127.0.0.1/full/path.assetpack path with an absolute path to the asset pack guard let suffix = assetPackPath.str.addingPercentEncoding(withAllowedCharacters: allowedChars) else { throw StubError.error("could not percent-encode \(assetPackPath.str)") } return "http://127.0.0.1".appending(suffix) @@ -64,8 +62,7 @@ public final class ODRAssetPackManifestTaskAction: TaskAction { let infoPlist: PropertyListItem do { infoPlist = try PropertyList.fromPath(infoPlistPath, fs: executionDelegate.fs) - } - catch { + } catch { throw StubError.error("failed to load \(infoPlistPath.str): \(error)") } @@ -76,8 +73,7 @@ public final class ODRAssetPackManifestTaskAction: TaskAction { if let priorityItem = dict["Priority"] { guard case .plDouble(let p) = priorityItem else { throw StubError.error("expected number in \(infoPlistPath.str) : Priority") } priority = p - } - else { + } else { priority = nil } @@ -91,8 +87,7 @@ public final class ODRAssetPackManifestTaskAction: TaskAction { let plistData = try AssetPackManifestPlist(resources: Set(resources)).propertyListItem.asBytes(.binary) try executionDelegate.fs.write(outputPath, contents: ByteString(plistData)) - } - catch { + } catch { outputDelegate.emitError("\(error)") return .failed } diff --git a/Sources/SWBTaskExecution/TaskActions/PrecompileClangModuleTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/PrecompileClangModuleTaskAction.swift index 598c1a54..9f0c38e2 100644 --- a/Sources/SWBTaskExecution/TaskActions/PrecompileClangModuleTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/PrecompileClangModuleTaskAction.swift @@ -147,9 +147,11 @@ final public class PrecompileClangModuleTaskAction: TaskAction, BuildValueValida return .failed } - for node in (dependencyInfo.files.map { - ExecutionNode(identifier: $0.str) - }) { + for node + in (dependencyInfo.files.map { + ExecutionNode(identifier: $0.str) + }) + { dynamicExecutionDelegate.discoveredDependencyNode(node) } @@ -231,10 +233,11 @@ final public class PrecompileClangModuleTaskAction: TaskAction, BuildValueValida if case .some(.exit(.uncaughtSignal, _)) = outputDelegate.result { do { if let reproducerMessage = try clangModuleDependencyGraph.generateReproducer( - forFailedDependency: dependencyInfo, - libclangPath: key.libclangPath, - casOptions: key.casOptions, - location: key.reproducerOutputPath?.str) { + forFailedDependency: dependencyInfo, + libclangPath: key.libclangPath, + casOptions: key.casOptions, + location: key.reproducerOutputPath?.str + ) { outputDelegate.emitOutput(ByteString(encodingAsUTF8: reproducerMessage) + "\n") } } catch { diff --git a/Sources/SWBTaskExecution/TaskActions/ProcessProductEntitlementsTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/ProcessProductEntitlementsTaskAction.swift index 02c45f8e..3d781b73 100644 --- a/Sources/SWBTaskExecution/TaskActions/ProcessProductEntitlementsTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/ProcessProductEntitlementsTaskAction.swift @@ -19,8 +19,7 @@ public import SWBUtil public import SWBMacro /// Concrete implementation of task for processing product entitlements. -public final class ProcessProductEntitlementsTaskAction: TaskAction -{ +public final class ProcessProductEntitlementsTaskAction: TaskAction { /// The merged entitlements. let entitlements: PropertyListItem @@ -45,8 +44,7 @@ public final class ProcessProductEntitlementsTaskAction: TaskAction /// The timestamp of the latest modification of the entitlements on `init` let entitlementsModificationTimestamp: Result? - public init(fs: any FSProxy, entitlements: PropertyListItem, entitlementsVariant: EntitlementsVariant, allowEntitlementsModification: Bool, entitlementsDestination: EntitlementsDestination, destinationPlatformName: String, entitlementsFilePath: Path?) - { + public init(fs: any FSProxy, entitlements: PropertyListItem, entitlementsVariant: EntitlementsVariant, allowEntitlementsModification: Bool, entitlementsDestination: EntitlementsDestination, destinationPlatformName: String, entitlementsFilePath: Path?) { self.entitlements = entitlements self.entitlementsVariant = entitlementsVariant self.allowEntitlementsModification = allowEntitlementsModification @@ -72,11 +70,9 @@ public final class ProcessProductEntitlementsTaskAction: TaskAction } /// The parsed command line options. - private struct Options - { + private struct Options { /// The output format of the task. - enum FormatKind: String - { + enum FormatKind: String { /// Preserve the format of the input file. case sameAsInput = "none" /// Convert to binary. @@ -86,8 +82,7 @@ public final class ProcessProductEntitlementsTaskAction: TaskAction // Note that Foundation no longer supports writing the OpenStep plist format. - init?(name: String) - { + init?(name: String) { switch name { case "openstep": @@ -96,23 +91,18 @@ public final class ProcessProductEntitlementsTaskAction: TaskAction default: // Otherwise we initialize from the raw value, if possible. - if let value = FormatKind(rawValue: name) - { + if let value = FormatKind(rawValue: name) { self = value return - } - else - { + } else { return nil } } } } - static func emitUsage(_ name: String, _ outputDelegate: any TaskOutputDelegate) - { - outputDelegate.emitOutput - { stream in + static func emitUsage(_ name: String, _ outputDelegate: any TaskOutputDelegate) { + outputDelegate.emitOutput { stream in stream <<< "usage: \(name) -entitlements [-format ] -o \n" stream <<< " -entitlements\n" stream <<< " Handle entitlements in the input file. This option is required.\n" @@ -135,16 +125,14 @@ public final class ProcessProductEntitlementsTaskAction: TaskAction /// The path to the output file. let outputPath: Path - init?(_ commandLine: AnySequence, _ outputDelegate: any TaskOutputDelegate) - { + init?(_ commandLine: AnySequence, _ outputDelegate: any TaskOutputDelegate) { var format = FormatKind.sameAsInput var processEntitlements = false var inputPath: Path? = nil var foundOutputPathOption = false var outputPath: Path? = nil var hadErrors = false - func error(_ message: String) - { + func error(_ message: String) { outputDelegate.emitError(message) hadErrors = true } @@ -153,9 +141,7 @@ public final class ProcessProductEntitlementsTaskAction: TaskAction let generator = commandLine.makeIterator() // Skip the executable. let programName = generator.next() ?? "<>" - argumentParsing: - while let arg = generator.next() - { + argumentParsing: while let arg = generator.next() { switch arg { case "-entitlements": @@ -164,13 +150,11 @@ public final class ProcessProductEntitlementsTaskAction: TaskAction case "-format": // The '-format' option takes a single argument: 'xml', 'binary', 'openstep' or 'none'. - guard let name = generator.next() else - { + guard let name = generator.next() else { error("missing argument for option: \(arg)") continue } - guard let kind = FormatKind(name: name) else - { + guard let kind = FormatKind(name: name) else { error("failed to parse option: \(arg) \(name)") continue } @@ -179,8 +163,7 @@ public final class ProcessProductEntitlementsTaskAction: TaskAction case "-o": // The '-o' argument take a single parameter: the output path. foundOutputPathOption = true - guard let value = generator.next() else - { + guard let value = generator.next() else { error("missing argument for option: \(arg)") continue } @@ -202,21 +185,18 @@ public final class ProcessProductEntitlementsTaskAction: TaskAction } // Diagnose missing -entitlements option. - if !processEntitlements - { + if !processEntitlements { error("missing required option: -entitlements") } // Diagnose missing output path option. - if outputPath == nil && !foundOutputPathOption - { + if outputPath == nil && !foundOutputPathOption { error("missing required option: -o") outputPath = Path("<>") } // If there were errors, emit the usage and return an error. - if hadErrors - { + if hadErrors { outputDelegate.emitOutput("\n") Options.emitUsage(programName, outputDelegate) return nil @@ -230,13 +210,11 @@ public final class ProcessProductEntitlementsTaskAction: TaskAction } } - public override class var toolIdentifier: String - { + public override class var toolIdentifier: String { return "process-product-entitlements" } - public override func getSignature(_ task: any ExecutableTask, executionDelegate: any TaskExecutionDelegate) -> ByteString - { + public override func getSignature(_ task: any ExecutableTask, executionDelegate: any TaskExecutionDelegate) -> ByteString { // If the scheme command changes then our signature changes so we have to re-run. return super.getSignature(task, executionDelegate: executionDelegate) + ByteString(encodingAsUTF8: executionDelegate.schemeCommand?.description ?? "") } @@ -254,7 +232,7 @@ public final class ProcessProductEntitlementsTaskAction: TaskAction } // Make paths absolute. -// let input = task.workingDirectory.join(options.inputPath) // Not presently used + // let input = task.workingDirectory.join(options.inputPath) // Not presently used let output = task.workingDirectory.join(options.outputPath) // Updating entitlements is not something that is actively encouraged or supported, however, this is a compatibility pain point for certain projects that we need to maintain some ability to do this. A better approach is to plumb this through the system so that we can track this as a proper dependency mechanism, potentially through our virtual task producers... however, until then, we enable this functionality for those existing clients. @@ -285,8 +263,7 @@ public final class ProcessProductEntitlementsTaskAction: TaskAction if !self.allowEntitlementsModification { outputDelegate.emitError("Entitlements file \"\(entitlementsFilePath.basename)\" was modified during the build, which is not supported. You can disable this error by setting 'CODE_SIGN_ALLOW_ENTITLEMENTS_MODIFICATION' to 'YES', however this may cause the built product's code signature or provisioning profile to contain incorrect entitlements.") return .failed - } - else { + } else { let plist: PropertyListItem do { plist = try PropertyList.fromBytes(executionDelegate.fs.read(entitlementsFilePath).bytes) @@ -309,8 +286,7 @@ public final class ProcessProductEntitlementsTaskAction: TaskAction do { try executionDelegate.fs.write(output, contents: ByteString(effectiveEntitlements.asBytes(.xml))) - } - catch let error as NSError { + } catch let error as NSError { outputDelegate.emitError("could not write entitlements file '\(output.str)': \(error.localizedDescription)") return .failed } @@ -344,9 +320,11 @@ public final class ProcessProductEntitlementsTaskAction: TaskAction } static var allEntitlements: [String: PropertyListItem] { - return Dictionary(uniqueKeysWithValues: allCases.map { entitlement in - return (entitlement.key, entitlement.value) - }) + return Dictionary( + uniqueKeysWithValues: allCases.map { entitlement in + return (entitlement.key, entitlement.value) + } + ) } } @@ -388,14 +366,10 @@ public final class ProcessProductEntitlementsTaskAction: TaskAction return .plDict(augmentedDict) } - // Serialization - - public override func serialize(to serializer: T) - { - serializer.serializeAggregate(8) - { + public override func serialize(to serializer: T) { + serializer.serializeAggregate(8) { // FIXME: We have no way to handle any errors in PropertyListItem.asBytes() here. serializer.serialize(try? entitlements.asBytes(.binary)) serializer.serialize(entitlementsVariant) @@ -408,8 +382,7 @@ public final class ProcessProductEntitlementsTaskAction: TaskAction } } - public required init(from deserializer: any Deserializer) throws - { + public required init(from deserializer: any Deserializer) throws { try deserializer.beginAggregate(8) self.entitlements = try PropertyList.fromBytes(try deserializer.deserialize()) self.entitlementsVariant = try deserializer.deserialize() diff --git a/Sources/SWBTaskExecution/TaskActions/ProcessProductProvisioningProfileTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/ProcessProductProvisioningProfileTaskAction.swift index e78375a2..03652a62 100644 --- a/Sources/SWBTaskExecution/TaskActions/ProcessProductProvisioningProfileTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/ProcessProductProvisioningProfileTaskAction.swift @@ -17,22 +17,17 @@ public import SWBCore import SWBLibc public import SWBUtil - /// Concrete implementation of task for processing the provisioning profile for a product. -public final class ProcessProductProvisioningProfileTaskAction: TaskAction -{ - public override init() - { +public final class ProcessProductProvisioningProfileTaskAction: TaskAction { + public override init() { // Presently this class' initializer doesn't do anything extra - all parameters are passed in as command-line options. super.init() } /// The parsed command line options. - private struct Options - { + private struct Options { /// The output format of the task. - enum FormatKind: String - { + enum FormatKind: String { /// Preserve the format of the input file. case sameAsInput = "none" /// Convert to binary. @@ -42,8 +37,7 @@ public final class ProcessProductProvisioningProfileTaskAction: TaskAction // Note that Foundation no longer supports writing the OpenStep plist format. - init?(name: String) - { + init?(name: String) { switch name { case "openstep": @@ -52,23 +46,18 @@ public final class ProcessProductProvisioningProfileTaskAction: TaskAction default: // Otherwise we initialize from the raw value, if possible. - if let value = FormatKind(rawValue: name) - { + if let value = FormatKind(rawValue: name) { self = value return - } - else - { + } else { return nil } } } } - static func emitUsage(_ name: String, _ outputDelegate: any TaskOutputDelegate) - { - outputDelegate.emitOutput - { stream in + static func emitUsage(_ name: String, _ outputDelegate: any TaskOutputDelegate) { + outputDelegate.emitOutput { stream in stream <<< "usage: \(name) [-format ] -o \n" stream <<< " --format {none|binary|xml}\n" stream <<< " The output format of the entitlements.\n" @@ -86,15 +75,13 @@ public final class ProcessProductProvisioningProfileTaskAction: TaskAction /// The path to the output file. let outputPath: Path - init?(_ commandLine: AnySequence, _ outputDelegate: any TaskOutputDelegate) - { + init?(_ commandLine: AnySequence, _ outputDelegate: any TaskOutputDelegate) { var format = FormatKind.sameAsInput var inputPath: Path? = nil var foundOutputPathOption = false var outputPath: Path? = nil var hadErrors = false - func error(_ message: String) - { + func error(_ message: String) { outputDelegate.emitError(message) hadErrors = true } @@ -103,20 +90,16 @@ public final class ProcessProductProvisioningProfileTaskAction: TaskAction let generator = commandLine.makeIterator() // Skip the executable. let programName = generator.next() ?? "<>" - argumentParsing: - while let arg = generator.next() - { + argumentParsing: while let arg = generator.next() { switch arg { case "-format": // The '-format' option takes a single argument: 'xml', 'binary', 'openstep' or 'none'. - guard let name = generator.next() else - { + guard let name = generator.next() else { error("missing argument for option: \(arg)") continue } - guard let kind = FormatKind(name: name) else - { + guard let kind = FormatKind(name: name) else { error("failed to parse option: \(arg) \(name)") continue } @@ -125,8 +108,7 @@ public final class ProcessProductProvisioningProfileTaskAction: TaskAction case "-o": // The '-o' argument take a single parameter: the output path. foundOutputPathOption = true - guard let value = generator.next() else - { + guard let value = generator.next() else { error("missing argument for option: \(arg)") continue } @@ -148,22 +130,19 @@ public final class ProcessProductProvisioningProfileTaskAction: TaskAction } // Diagnose missing input path. - if inputPath == nil - { + if inputPath == nil { error("no input file specified") inputPath = Path("<>") } // Diagnose missing output path option. - if outputPath == nil && !foundOutputPathOption - { + if outputPath == nil && !foundOutputPathOption { error("missing required option: -o") outputPath = Path("<>") } // If there were errors, emit the usage and return an error. - if hadErrors - { + if hadErrors { outputDelegate.emitOutput("\n") Options.emitUsage(programName, outputDelegate) return nil @@ -176,8 +155,7 @@ public final class ProcessProductProvisioningProfileTaskAction: TaskAction } } - public override class var toolIdentifier: String - { + public override class var toolIdentifier: String { return "process-product-provisioning-profile" } @@ -189,23 +167,19 @@ public final class ProcessProductProvisioningProfileTaskAction: TaskAction outputDelegate: any TaskOutputDelegate ) async -> CommandResult { // Parse the arguments. - guard let options = Options(task.commandLineAsStrings, outputDelegate) else - { + guard let options = Options(task.commandLineAsStrings, outputDelegate) else { return .failed } // Make paths absolute. - let input = task.workingDirectory.join(options.inputPath) // Not presently used + let input = task.workingDirectory.join(options.inputPath) // Not presently used let output = task.workingDirectory.join(options.outputPath) // Read the input file. let contents: ByteString - do - { + do { contents = try executionDelegate.fs.read(input) - } - catch let e - { + } catch let e { outputDelegate.emitError("unable to read input file '\(input.str)': \(e.localizedDescription)") return .failed } @@ -230,12 +204,9 @@ public final class ProcessProductProvisioningProfileTaskAction: TaskAction guard let (plist, _) = validateAsPropertyList(contents, outputDelegate) else { return .failed } let outBytes: [UInt8] - do - { + do { try outBytes = plist.asBytes(outputFormat!) - } - catch - { + } catch { let errorDescription = (error as NSError).localizedDescription outputDelegate.emitError("unable to create \(options.format.rawValue) property list data: \(errorDescription)") return .failed @@ -244,12 +215,9 @@ public final class ProcessProductProvisioningProfileTaskAction: TaskAction } // Finally we can write the output file. - do - { + do { try executionDelegate.fs.write(output, contents: outContents) - } - catch let error as NSError - { + } catch let error as NSError { outputDelegate.emitError("could not write profile file: \(output.str): \(error.localizedDescription)") return .failed } @@ -260,8 +228,7 @@ public final class ProcessProductProvisioningProfileTaskAction: TaskAction /// Validate that the given `contents` are a property list, with the root item being a dictionary, and return the parsed property list. /// /// If validation fails, then messages will be emitted to the `outputDelegate`, and nil will be returned. - private func validateAsPropertyList(_ contents: ByteString, _ outputDelegate: any TaskOutputDelegate) -> (PropertyListItem, PropertyListSerialization.PropertyListFormat)? - { + private func validateAsPropertyList(_ contents: ByteString, _ outputDelegate: any TaskOutputDelegate) -> (PropertyListItem, PropertyListSerialization.PropertyListFormat)? { // Validate it as a property list. The top-level item must be a dictionary. let plist: PropertyListItem let format: PropertyListSerialization.PropertyListFormat @@ -272,8 +239,7 @@ public final class ProcessProductProvisioningProfileTaskAction: TaskAction outputDelegate.emitError("unable to read input file as a property list: \(errorDescription)") return nil } - guard case .plDict = plist else - { + guard case .plDict = plist else { outputDelegate.emitError("input file is not a dictionary") return nil } @@ -281,20 +247,15 @@ public final class ProcessProductProvisioningProfileTaskAction: TaskAction return (plist, format) } - // Serialization - - public override func serialize(to serializer: T) - { - serializer.serializeAggregate(1) - { + public override func serialize(to serializer: T) { + serializer.serializeAggregate(1) { super.serialize(to: serializer) } } - public required init(from deserializer: any Deserializer) throws - { + public required init(from deserializer: any Deserializer) throws { try deserializer.beginAggregate(1) try super.init(from: deserializer) } diff --git a/Sources/SWBTaskExecution/TaskActions/ProcessXCFrameworkTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/ProcessXCFrameworkTaskAction.swift index aacfd3fb..01be8587 100644 --- a/Sources/SWBTaskExecution/TaskActions/ProcessXCFrameworkTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/ProcessXCFrameworkTaskAction.swift @@ -24,7 +24,7 @@ public final class ProcessXCFrameworkTaskAction: TaskAction { public override func performTaskAction(_ task: any ExecutableTask, dynamicExecutionDelegate: any DynamicTaskExecutionDelegate, executionDelegate: any TaskExecutionDelegate, clientDelegate: any TaskExecutionClientDelegate, outputDelegate: any TaskOutputDelegate) async -> CommandResult { let generator = task.commandLineAsStrings.makeIterator() - _ = generator.next() // consume program name + _ = generator.next() // consume program name var xcframeworkPath: Path? var platform: String? @@ -119,15 +119,13 @@ public final class ProcessXCFrameworkTaskAction: TaskAction { if skipSignatureValidation { // NOTE: Always emit the warning when enabled as this can cause issues in other environments, such as CI. outputDelegate.emitWarning("XCFramework signature validation is being skipped. Remove `DISABLE_XCFRAMEWORK_SIGNATURE_VALIDATION` to disable this warning.") - } - else if await !validateExpectedSignature(path, expectedSignatures: expectedSignatures, outputDelegate: outputDelegate) { + } else if await !validateExpectedSignature(path, expectedSignatures: expectedSignatures, outputDelegate: outputDelegate) { return .failed } try xcframework.copy(library: library, from: path, to: target, fs: fs) return .succeeded - } - catch { + } catch { outputDelegate.emitError(error.localizedDescription) return .failed } @@ -156,10 +154,15 @@ public final class ProcessXCFrameworkTaskAction: TaskAction { guard !signatures.isEmpty else { // NOTE: This is likely an internal tooling error or adoption bring-up issue, so soft-error here. - let diagnostic = Diagnostic(behavior: .error, location: location, data: DiagnosticData("Expected signatures are malformed"), childDiagnostics: [ - Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("Expected signatures: \(expectedSignatures.joined(separator: ","))")), - Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("Replace or remove the expected signature data.")), - ]) + let diagnostic = Diagnostic( + behavior: .error, + location: location, + data: DiagnosticData("Expected signatures are malformed"), + childDiagnostics: [ + Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("Expected signatures: \(expectedSignatures.joined(separator: ","))")), + Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("Replace or remove the expected signature data.")), + ] + ) outputDelegate.emit(diagnostic) return false } @@ -185,8 +188,7 @@ public final class ProcessXCFrameworkTaskAction: TaskAction { outputDelegate.emit(diagnostic) return false - } - catch let CodeSignatureInfo.Error.codesignVerificationFailed(description, output) { + } catch let CodeSignatureInfo.Error.codesignVerificationFailed(description, output) { let childDiagnostics = [ Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData(description)), Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData(output)), @@ -196,8 +198,7 @@ public final class ProcessXCFrameworkTaskAction: TaskAction { outputDelegate.emit(diagnostic) return false - } - catch { + } catch { let childDiagnostics = [Diagnostic(behavior: .note, location: .unknown, data: DiagnosticData("Unable to load signature information for '\(path.str)'. error=\(error.localizedDescription)"))] let message = "The signature of “\(path.basename)” cannot be validated and may have been compromised." let diagnostic = Diagnostic(behavior: .error, location: location, data: DiagnosticData(message), childDiagnostics: childDiagnostics) @@ -218,7 +219,7 @@ public final class ProcessXCFrameworkTaskAction: TaskAction { let parts = s.split(separator: ":", maxSplits: 3).map { String($0) } guard let s = parts.first else { return nil } - guard let type = CodeSignatureInfo.SignatureType(rawValue: s) else { return nil } + guard let type = CodeSignatureInfo.SignatureType(rawValue: s) else { return nil } signatureType = type guard parts.count >= 2 else { return nil } diff --git a/Sources/SWBTaskExecution/TaskActions/RegisterExecutionPolicyExceptionTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/RegisterExecutionPolicyExceptionTaskAction.swift index 4d2725e2..dc3a1fec 100644 --- a/Sources/SWBTaskExecution/TaskActions/RegisterExecutionPolicyExceptionTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/RegisterExecutionPolicyExceptionTaskAction.swift @@ -18,7 +18,7 @@ public import SWBCore // Weak-linked because the framework isn't available in all contexts #if canImport(ExecutionPolicy) -@_weakLinked import ExecutionPolicy + @_weakLinked import ExecutionPolicy #endif /// Concrete implementation of task for registering a built bundle or binary with the system to speed up execution policy checks. @@ -89,20 +89,20 @@ public final class RegisterExecutionPolicyExceptionTaskAction: TaskAction { } #if canImport(ExecutionPolicy) - if #_hasSymbol(EPExecutionPolicy.self) { - do { - // EPExecutionPolicy manages an XPC connection to syspolicyd, and we only need one per process. - // Further, concurrent deallocations of EPExecutionPolicy can trigger an ASan issue in libxpc - enum Static { - static let shared = EPExecutionPolicy() + if #_hasSymbol(EPExecutionPolicy.self) { + do { + // EPExecutionPolicy manages an XPC connection to syspolicyd, and we only need one per process. + // Further, concurrent deallocations of EPExecutionPolicy can trigger an ASan issue in libxpc + enum Static { + static let shared = EPExecutionPolicy() + } + + // Register the file. + try Static.shared.addException(for: URL(fileURLWithPath: options.input.str)) + } catch { + // We don't ever fail if blessing fails, and the failure information is not particularly interesting to users. } - - // Register the file. - try Static.shared.addException(for: URL(fileURLWithPath: options.input.str)) - } catch { - // We don't ever fail if blessing fails, and the failure information is not particularly interesting to users. } - } #endif return .succeeded diff --git a/Sources/SWBTaskExecution/TaskActions/SignatureCollectionTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/SignatureCollectionTaskAction.swift index 229198e0..62a33985 100644 --- a/Sources/SWBTaskExecution/TaskActions/SignatureCollectionTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/SignatureCollectionTaskAction.swift @@ -39,13 +39,13 @@ public final class SignatureCollectionTaskAction: TaskAction { let input: Path let output: Path - let additionalInfo: [String:String] + let additionalInfo: [String: String] let skipValidation: Bool init?(_ commandLine: AnySequence, _ outputDelegate: any TaskOutputDelegate) { var inputArg: String? = nil var outputArg: String? = nil - var additionalInfo: [String:String] = [:] + var additionalInfo: [String: String] = [:] var skipSignatureValidation: Bool = false var hadErrors: Bool = false @@ -139,8 +139,7 @@ public final class SignatureCollectionTaskAction: TaskAction { let base = options.output.dirname try executionDelegate.fs.createDirectory(base, recursive: true) try executionDelegate.fs.write(options.output, contents: ByteString(data)) - } - catch { + } catch { outputDelegate.emitError("signature-collection failed: \(error.localizedDescription)") return .failed } diff --git a/Sources/SWBTaskExecution/TaskActions/SwiftCachingKeyQueryTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/SwiftCachingKeyQueryTaskAction.swift index abe3c70f..ad16f423 100644 --- a/Sources/SWBTaskExecution/TaskActions/SwiftCachingKeyQueryTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/SwiftCachingKeyQueryTaskAction.swift @@ -39,7 +39,7 @@ public final class SwiftCachingKeyQueryTaskAction: TaskAction { outputDelegate: any TaskOutputDelegate ) async -> CommandResult { let swiftModuleDependencyGraph = dynamicExecutionDelegate.operationContext.swiftModuleDependencyGraph - do { + do { guard let cas = try swiftModuleDependencyGraph.getCASDatabases(casOptions: key.casOptions, compilerLocation: key.compilerLocation) else { throw StubError.error("unable to use CAS databases") } @@ -86,4 +86,3 @@ public final class SwiftCachingKeyQueryTaskAction: TaskAction { try super.init(from: deserializer) } } - diff --git a/Sources/SWBTaskExecution/TaskActions/SwiftCachingMaterializeKeyTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/SwiftCachingMaterializeKeyTaskAction.swift index 0883b6f4..e63555a6 100644 --- a/Sources/SWBTaskExecution/TaskActions/SwiftCachingMaterializeKeyTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/SwiftCachingMaterializeKeyTaskAction.swift @@ -66,7 +66,7 @@ public final class SwiftCachingMaterializeKeyTaskAction: TaskAction { state.reset() let swiftModuleDependencyGraph = dynamicExecutionDelegate.operationContext.swiftModuleDependencyGraph - do { + do { guard let cas = try swiftModuleDependencyGraph.getCASDatabases(casOptions: taskKey.casOptions, compilerLocation: taskKey.compilerLocation) else { throw StubError.error("unable to use CAS databases") } @@ -96,9 +96,11 @@ public final class SwiftCachingMaterializeKeyTaskAction: TaskAction { ) state = .waitingForKeyQuery(jobTaskIDBase: 1, cas: cas, keys: taskKey.cacheKeys) } else { - if try requestCompilationOutputs(compOutputs, - dynamicExecutionDelegate: dynamicExecutionDelegate, - jobTaskIDBase: 1) != 0 { + if try requestCompilationOutputs( + compOutputs, + dynamicExecutionDelegate: dynamicExecutionDelegate, + jobTaskIDBase: 1 + ) != 0 { state = .waitingForOutputDownloads } else { state = .done @@ -127,11 +129,13 @@ public final class SwiftCachingMaterializeKeyTaskAction: TaskAction { } guard cachedComps.count == keys.count else { state = .done - return // compilation key not found. + return // compilation key not found. } - if try requestCompilationOutputs(cachedComps, - dynamicExecutionDelegate: dynamicExecutionDelegate, - jobTaskIDBase: jobTaskIDBase) != 0 { + if try requestCompilationOutputs( + cachedComps, + dynamicExecutionDelegate: dynamicExecutionDelegate, + jobTaskIDBase: jobTaskIDBase + ) != 0 { state = .waitingForOutputDownloads } else { state = .done @@ -157,10 +161,12 @@ public final class SwiftCachingMaterializeKeyTaskAction: TaskAction { let numRequested = try cachedComps.reduce(into: UInt(0)) { (numRequested, cachedComp) in try cachedComp.getOutputs().forEach { output in if output.isMaterialized { return } - let outputMaterializeKey = SwiftCachingOutputMaterializerTaskKey(casOptions: taskKey.casOptions, - casID: output.casID, - outputKind: output.kindName, - compilerLocation: taskKey.compilerLocation) + let outputMaterializeKey = SwiftCachingOutputMaterializerTaskKey( + casOptions: taskKey.casOptions, + casID: output.casID, + outputKind: output.kindName, + compilerLocation: taskKey.compilerLocation + ) dynamicExecutionDelegate.requestDynamicTask( toolIdentifier: SwiftCachingOutputMaterializerTaskAction.toolIdentifier, taskKey: .swiftCachingOutputMaterializer(outputMaterializeKey), @@ -210,4 +216,3 @@ public final class SwiftCachingMaterializeKeyTaskAction: TaskAction { try super.init(from: deserializer) } } - diff --git a/Sources/SWBTaskExecution/TaskActions/SwiftCachingOutputMaterializerTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/SwiftCachingOutputMaterializerTaskAction.swift index fd917d34..54677c1e 100644 --- a/Sources/SWBTaskExecution/TaskActions/SwiftCachingOutputMaterializerTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/SwiftCachingOutputMaterializerTaskAction.swift @@ -41,7 +41,7 @@ public final class SwiftCachingOutputMaterializerTaskAction: TaskAction { outputDelegate: any TaskOutputDelegate ) async -> CommandResult { let swiftModuleDependencyGraph = dynamicExecutionDelegate.operationContext.swiftModuleDependencyGraph - do { + do { guard let cas = try swiftModuleDependencyGraph.getCASDatabases(casOptions: key.casOptions, compilerLocation: key.compilerLocation) else { throw StubError.error("unable to use CAS databases") } @@ -81,4 +81,3 @@ public final class SwiftCachingOutputMaterializerTaskAction: TaskAction { try super.init(from: deserializer) } } - diff --git a/Sources/SWBTaskExecution/TaskActions/SwiftDriverJobSchedulingTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/SwiftDriverJobSchedulingTaskAction.swift index def85286..2bad7f0e 100644 --- a/Sources/SWBTaskExecution/TaskActions/SwiftDriverJobSchedulingTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/SwiftDriverJobSchedulingTaskAction.swift @@ -23,26 +23,25 @@ open class SwiftDriverJobSchedulingTaskAction: TaskAction { return "" } - - private enum State { - /// The action is in it's initial state, and has not yet performed any work - case initial - /// The action is waiting for execution inputs to be produced. Execution inputs include everything that would be considered an input to a standalone invocation of the driver (source files, output file map, gate tasks, etc.). - case waitingForExecutionInputs(openExecutionInputIDs: Set, jobTaskIDBase: UInt) - /// The action has requested a planning task for a target, and is waiting for the resulting planned build. - case planning(jobTaskIDBase: UInt) - /// A planned build is available, and the action is requesting and running tasks for individual jobs. - case requestingDriverJobs(primaryJobsTaskIDs: Set, secondaryJobTaskIDs: Set, discoveredJobTaskIDs: Set, jobTaskIDBase: UInt) - - /// A dependency of the action failed. - case failedDependencies - /// The action failed internally - case executionError(any Error) - - mutating func reset() { - self = .initial - } - } + private enum State { + /// The action is in it's initial state, and has not yet performed any work + case initial + /// The action is waiting for execution inputs to be produced. Execution inputs include everything that would be considered an input to a standalone invocation of the driver (source files, output file map, gate tasks, etc.). + case waitingForExecutionInputs(openExecutionInputIDs: Set, jobTaskIDBase: UInt) + /// The action has requested a planning task for a target, and is waiting for the resulting planned build. + case planning(jobTaskIDBase: UInt) + /// A planned build is available, and the action is requesting and running tasks for individual jobs. + case requestingDriverJobs(primaryJobsTaskIDs: Set, secondaryJobTaskIDs: Set, discoveredJobTaskIDs: Set, jobTaskIDBase: UInt) + + /// A dependency of the action failed. + case failedDependencies + /// The action failed internally + case executionError(any Error) + + mutating func reset() { + self = .initial + } + } private var state = State.initial @@ -92,16 +91,18 @@ open class SwiftDriverJobSchedulingTaskAction: TaskAction { } // If all execution inputs are now available, request the driver planning task and move to the `planning` state. if openExecutionInputIDs.isEmpty { - dynamicExecutionDelegate.requestDynamicTask(toolIdentifier: SwiftDriverTaskAction.toolIdentifier, - taskKey: .swiftDriverPlanning(.init(swiftPayload: payload)), - taskID: jobTaskIDBase - 1, - singleUse: true, - workingDirectory: task.workingDirectory, - environment: task.environment, - forTarget: task.forTarget, - priority: .unblocksDownstreamTasks, - showEnvironment: task.showEnvironment, - reason: .wasScheduledBySwiftDriver) + dynamicExecutionDelegate.requestDynamicTask( + toolIdentifier: SwiftDriverTaskAction.toolIdentifier, + taskKey: .swiftDriverPlanning(.init(swiftPayload: payload)), + taskID: jobTaskIDBase - 1, + singleUse: true, + workingDirectory: task.workingDirectory, + environment: task.environment, + forTarget: task.forTarget, + priority: .unblocksDownstreamTasks, + showEnvironment: task.showEnvironment, + reason: .wasScheduledBySwiftDriver + ) state = .planning(jobTaskIDBase: jobTaskIDBase) } return @@ -260,7 +261,6 @@ open class SwiftDriverJobSchedulingTaskAction: TaskAction { return .succeeded } - open func primaryJobs(for plannedBuild: LibSwiftDriver.PlannedBuild, driverPayload: SwiftDriverPayload) -> ArraySlice { assertionFailure("Subclass responsibility") return [] @@ -299,26 +299,34 @@ open class SwiftDriverJobSchedulingTaskAction: TaskAction { } } - internal func constructDriverJobTaskKey(driverPayload: SwiftDriverPayload, - plannedJob: LibSwiftDriver.PlannedBuild.PlannedSwiftDriverJob) -> DynamicTaskKey { + internal func constructDriverJobTaskKey( + driverPayload: SwiftDriverPayload, + plannedJob: LibSwiftDriver.PlannedBuild.PlannedSwiftDriverJob + ) -> DynamicTaskKey { let key: DynamicTaskKey if plannedJob.driverJob.categorizer.isExplicitDependencyBuild { - key = .swiftDriverExplicitDependencyJob(SwiftDriverExplicitDependencyJobTaskKey( - arch: driverPayload.architecture, - driverJobKey: plannedJob.key, - driverJobSignature: plannedJob.signature, - compilerLocation: driverPayload.compilerLocation, - casOptions: driverPayload.casOptions)) + key = .swiftDriverExplicitDependencyJob( + SwiftDriverExplicitDependencyJobTaskKey( + arch: driverPayload.architecture, + driverJobKey: plannedJob.key, + driverJobSignature: plannedJob.signature, + compilerLocation: driverPayload.compilerLocation, + casOptions: driverPayload.casOptions + ) + ) } else { - key = .swiftDriverJob(SwiftDriverJobTaskKey( - identifier: driverPayload.uniqueID, - variant: driverPayload.variant, - arch: driverPayload.architecture, - driverJobKey: plannedJob.key, - driverJobSignature: plannedJob.signature, - isUsingWholeModuleOptimization: driverPayload.isUsingWholeModuleOptimization, - compilerLocation: driverPayload.compilerLocation, - casOptions: driverPayload.casOptions)) + key = .swiftDriverJob( + SwiftDriverJobTaskKey( + identifier: driverPayload.uniqueID, + variant: driverPayload.variant, + arch: driverPayload.architecture, + driverJobKey: plannedJob.key, + driverJobSignature: plannedJob.signature, + isUsingWholeModuleOptimization: driverPayload.isUsingWholeModuleOptimization, + compilerLocation: driverPayload.compilerLocation, + casOptions: driverPayload.casOptions + ) + ) } return key } @@ -328,10 +336,10 @@ open class SwiftDriverJobSchedulingTaskAction: TaskAction { let isExplicitDependencyBuildJob = plannedJob.driverJob.categorizer.isExplicitDependencyBuild let taskID: UInt switch plannedJob.key { - case .targetJob(let index): - taskID = UInt(index) + jobTaskIDBase - case .explicitDependencyJob(let index): - taskID = UInt(index) + jobTaskIDBase + UInt(plannedBuild.targetBuildJobCount) + case .targetJob(let index): + taskID = UInt(index) + jobTaskIDBase + case .explicitDependencyJob(let index): + taskID = UInt(index) + jobTaskIDBase + UInt(plannedBuild.targetBuildJobCount) } cacheTaskID?(taskID) let taskKey = constructDriverJobTaskKey(driverPayload: driverPayload, plannedJob: plannedJob) diff --git a/Sources/SWBTaskExecution/TaskActions/SwiftDriverJobTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/SwiftDriverJobTaskAction.swift index 3af00355..1c6c76fb 100644 --- a/Sources/SWBTaskExecution/TaskActions/SwiftDriverJobTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/SwiftDriverJobTaskAction.swift @@ -83,15 +83,15 @@ public final class SwiftDriverJobTaskAction: TaskAction, BuildValueValidatingTas case explicitDependency case targetCompile(_ identifier: String) - public func serialize(to serializer: T) where T : Serializer { + public func serialize(to serializer: T) where T: Serializer { serializer.beginAggregate(2) switch self { - case .explicitDependency: - serializer.serialize(0) - serializer.serializeNil() - case .targetCompile(let identifier): - serializer.serialize(1) - serializer.serialize(identifier) + case .explicitDependency: + serializer.serialize(0) + serializer.serializeNil() + case .targetCompile(let identifier): + serializer.serialize(1) + serializer.serialize(identifier) } serializer.endAggregate() } @@ -100,14 +100,14 @@ public final class SwiftDriverJobTaskAction: TaskAction, BuildValueValidatingTas try deserializer.beginAggregate(2) let code: Int = try deserializer.deserialize() switch code { - case 0: - guard deserializer.deserializeNil() else { throw DeserializerError.deserializationFailed("Unexpected associated value for SwiftDriverJobIdentifier.") } - self = .explicitDependency - case 1: - let string: String = try deserializer.deserialize() - self = .targetCompile(string) - default: - throw DeserializerError.incorrectType("Unexpected type code for SwiftDriverJobIdentifier: \(code)") + case 0: + guard deserializer.deserializeNil() else { throw DeserializerError.deserializationFailed("Unexpected associated value for SwiftDriverJobIdentifier.") } + self = .explicitDependency + case 1: + let string: String = try deserializer.deserialize() + self = .targetCompile(string) + default: + throw DeserializerError.incorrectType("Unexpected type code for SwiftDriverJobIdentifier: \(code)") } } } @@ -127,7 +127,7 @@ public final class SwiftDriverJobTaskAction: TaskAction, BuildValueValidatingTas super.init() } - public override func serialize(to serializer: T) where T : Serializer { + public override func serialize(to serializer: T) where T: Serializer { serializer.serializeAggregate(6) { serializer.serialize(driverJob) serializer.serialize(variant) @@ -178,11 +178,13 @@ public final class SwiftDriverJobTaskAction: TaskAction, BuildValueValidatingTas fatalError("Unexpected payload type: \(type(of: task.payload)).") } let taskID = state.jobTaskIDBase - if try Self.maybeRequestCachingKeyMaterialization(plannedJob: driverJob, - dynamicExecutionDelegate: dynamicExecutionDelegate, - casOptions: payload.casOptions, - compilerLocation: payload.compilerLocation, - taskID: taskID) { + if try Self.maybeRequestCachingKeyMaterialization( + plannedJob: driverJob, + dynamicExecutionDelegate: dynamicExecutionDelegate, + casOptions: payload.casOptions, + compilerLocation: payload.compilerLocation, + taskID: taskID + ) { state.openDependencies.insert(taskID) } state.cacheJobRequested = true @@ -200,20 +202,25 @@ public final class SwiftDriverJobTaskAction: TaskAction, BuildValueValidatingTas } } - internal func constructDriverJobTaskKey(variant: String?, - arch: String, - plannedJob: LibSwiftDriver.PlannedBuild.PlannedSwiftDriverJob, - identifier: String?, - compilerLocation: LibSwiftDriver.CompilerLocation, - casOptions: CASOptions?) -> DynamicTaskKey { + internal func constructDriverJobTaskKey( + variant: String?, + arch: String, + plannedJob: LibSwiftDriver.PlannedBuild.PlannedSwiftDriverJob, + identifier: String?, + compilerLocation: LibSwiftDriver.CompilerLocation, + casOptions: CASOptions? + ) -> DynamicTaskKey { let key: DynamicTaskKey if plannedJob.driverJob.categorizer.isExplicitDependencyBuild { - key = .swiftDriverExplicitDependencyJob(SwiftDriverExplicitDependencyJobTaskKey( - arch: arch, - driverJobKey: plannedJob.key, - driverJobSignature: plannedJob.signature, - compilerLocation: compilerLocation, - casOptions: casOptions)) + key = .swiftDriverExplicitDependencyJob( + SwiftDriverExplicitDependencyJobTaskKey( + arch: arch, + driverJobKey: plannedJob.key, + driverJobSignature: plannedJob.signature, + compilerLocation: compilerLocation, + casOptions: casOptions + ) + ) } else { guard let variant else { fatalError("Expected variant for non-explicit-module job: \(plannedJob.driverJob.descriptionForLifecycle)") @@ -221,15 +228,18 @@ public final class SwiftDriverJobTaskAction: TaskAction, BuildValueValidatingTas guard let jobID = identifier else { fatalError("Expected job identifier for target compile: \(plannedJob.driverJob.descriptionForLifecycle)") } - key = .swiftDriverJob(SwiftDriverJobTaskKey( - identifier: jobID, - variant: variant, - arch: arch, - driverJobKey: plannedJob.key, - driverJobSignature: plannedJob.signature, - isUsingWholeModuleOptimization: isUsingWholeModuleOptimization, - compilerLocation: compilerLocation, - casOptions: casOptions)) + key = .swiftDriverJob( + SwiftDriverJobTaskKey( + identifier: jobID, + variant: variant, + arch: arch, + driverJobKey: plannedJob.key, + driverJobSignature: plannedJob.signature, + isUsingWholeModuleOptimization: isUsingWholeModuleOptimization, + compilerLocation: compilerLocation, + casOptions: casOptions + ) + ) } return key } @@ -246,28 +256,30 @@ public final class SwiftDriverJobTaskAction: TaskAction, BuildValueValidatingTas let jobDependencies: [LibSwiftDriver.PlannedBuild.PlannedSwiftDriverJob] var jobID: String? = nil switch self.identifier { - case .targetCompile(let identifierStr): - let plannedBuild = try graph.queryPlannedBuild(for: identifierStr) - jobDependencies = plannedBuild.dependencies(for: driverJob) - jobID = identifierStr - case .explicitDependency: - guard let explicitBuildJob = graph.plannedExplicitDependencyBuildJob(for: self.driverJob.key) else { - state.executionError = "Could not query build containing explicit dependency build job: \(self.driverJob.driverJob.descriptionForLifecycle)" - return - } - jobDependencies = graph.explicitDependencies(for: explicitBuildJob) + case .targetCompile(let identifierStr): + let plannedBuild = try graph.queryPlannedBuild(for: identifierStr) + jobDependencies = plannedBuild.dependencies(for: driverJob) + jobID = identifierStr + case .explicitDependency: + guard let explicitBuildJob = graph.plannedExplicitDependencyBuildJob(for: self.driverJob.key) else { + state.executionError = "Could not query build containing explicit dependency build job: \(self.driverJob.driverJob.descriptionForLifecycle)" + return + } + jobDependencies = graph.explicitDependencies(for: explicitBuildJob) } let jobTaskIDBase = UInt((task.executionInputs ?? []).count) // For each depended-upon job, request a dynamic task. for (index, dependency) in jobDependencies.enumerated() { let isExplicitDependencyBuildJob = dependency.driverJob.categorizer.isExplicitDependencyBuild - let taskKey = constructDriverJobTaskKey(variant: variant, - arch: arch, - plannedJob: dependency, - identifier: jobID, - compilerLocation: payload.compilerLocation, - casOptions: payload.casOptions) + let taskKey = constructDriverJobTaskKey( + variant: variant, + arch: arch, + plannedJob: dependency, + identifier: jobID, + compilerLocation: payload.compilerLocation, + casOptions: payload.casOptions + ) let taskID = jobTaskIDBase + UInt(index) state.openDependencies.insert(taskID) dynamicExecutionDelegate.requestDynamicTask( @@ -304,11 +316,11 @@ public final class SwiftDriverJobTaskAction: TaskAction, BuildValueValidatingTas let graph = dynamicExecutionDelegate.operationContext.swiftModuleDependencyGraph let jobDependencies: [LibSwiftDriver.PlannedBuild.PlannedSwiftDriverJob] switch self.identifier { - case .targetCompile(let identifier): - let plannedBuild = try graph.queryPlannedBuild(for: identifier) - jobDependencies = plannedBuild.dependencies(for: driverJob) - case .explicitDependency: - jobDependencies = graph.explicitDependencies(for: driverJob) + case .targetCompile(let identifier): + let plannedBuild = try graph.queryPlannedBuild(for: identifier) + jobDependencies = plannedBuild.dependencies(for: driverJob) + case .explicitDependency: + jobDependencies = graph.explicitDependencies(for: driverJob) } let dependencyIdentifier = Int(dependencyID) @@ -346,15 +358,15 @@ public final class SwiftDriverJobTaskAction: TaskAction, BuildValueValidatingTas // Explicit dependency build jobs do not update the delegate's (driver's) // state (incl. incremental), so we do not require access to their planned build. switch self.identifier { - case .targetCompile(let identifier): - do { - let graph = dynamicExecutionDelegate.operationContext.swiftModuleDependencyGraph - plannedBuild = try graph.queryPlannedBuild(for: identifier) - } catch { - state.executionError = "Unable to get planned build for identifier \(identifier): \(error.localizedDescription)" - } - case .explicitDependency: - break + case .targetCompile(let identifier): + do { + let graph = dynamicExecutionDelegate.operationContext.swiftModuleDependencyGraph + plannedBuild = try graph.queryPlannedBuild(for: identifier) + } catch { + state.executionError = "Unable to get planned build for identifier \(identifier): \(error.localizedDescription)" + } + case .explicitDependency: + break } defer { @@ -396,9 +408,9 @@ public final class SwiftDriverJobTaskAction: TaskAction, BuildValueValidatingTas // FIXME: rdar://134664046 (Add an EnvironmentBlock type to represent environment variables) #if os(Windows) - if let value = environment.removeValue(forKey: "PATH") { - environment["Path"] = value - } + if let value = environment.removeValue(forKey: "PATH") { + environment["Path"] = value + } #endif } else { environment = task.environment.bindingsDictionary @@ -413,7 +425,7 @@ public final class SwiftDriverJobTaskAction: TaskAction, BuildValueValidatingTas let plannedBuild: LibSwiftDriver.PlannedBuild? let driverJob: LibSwiftDriver.PlannedBuild.PlannedSwiftDriverJob let arguments: [String] - let environment: [String : String] + let environment: [String: String] let outputDelegate: any TaskOutputDelegate private(set) var output: ByteString = "" @@ -430,7 +442,7 @@ public final class SwiftDriverJobTaskAction: TaskAction, BuildValueValidatingTas return _commandResult } - init(plannedBuild: LibSwiftDriver.PlannedBuild?, driverJob: LibSwiftDriver.PlannedBuild.PlannedSwiftDriverJob, arguments: [String], environment: [String : String], outputDelegate: any TaskOutputDelegate) { + init(plannedBuild: LibSwiftDriver.PlannedBuild?, driverJob: LibSwiftDriver.PlannedBuild.PlannedSwiftDriverJob, arguments: [String], environment: [String: String], outputDelegate: any TaskOutputDelegate) { self.plannedBuild = plannedBuild self.driverJob = driverJob self.arguments = arguments @@ -520,14 +532,17 @@ public final class SwiftDriverJobTaskAction: TaskAction, BuildValueValidatingTas } if let db = cas, - let casOpts = payload.casOptions, - try await Self.replayCachedCommand(cas: db, - plannedJob: driverJob, - commandLine: options.commandLine, - dynamicExecutionDelegate: dynamicExecutionDelegate, - outputDelegate: outputDelegate, - enableDiagnosticRemarks: casOpts.enableDiagnosticRemarks) { - return .succeeded + let casOpts = payload.casOptions, + try await Self.replayCachedCommand( + cas: db, + plannedJob: driverJob, + commandLine: options.commandLine, + dynamicExecutionDelegate: dynamicExecutionDelegate, + outputDelegate: outputDelegate, + enableDiagnosticRemarks: casOpts.enableDiagnosticRemarks + ) + { + return .succeeded } try await spawn(commandLine: options.commandLine, environment: environment, workingDirectory: task.workingDirectory, dynamicExecutionDelegate: dynamicExecutionDelegate, clientDelegate: clientDelegate, processDelegate: delegate) @@ -554,12 +569,14 @@ public final class SwiftDriverJobTaskAction: TaskAction, BuildValueValidatingTas // If has remote cache, start uploading task. if let db = cas, let casOpts = payload.casOptions, casOpts.hasRemoteCache, delegate.commandResult == .succeeded { // upload only if succeed - try Self.upload(cas: db, - plannedJob: driverJob, - dynamicExecutionDelegate: dynamicExecutionDelegate, - outputDelegate: outputDelegate, - enableDiagnosticRemarks: casOpts.enableDiagnosticRemarks, - enableStrictCASErrors: casOpts.enableStrictCASErrors) + try Self.upload( + cas: db, + plannedJob: driverJob, + dynamicExecutionDelegate: dynamicExecutionDelegate, + outputDelegate: outputDelegate, + enableDiagnosticRemarks: casOpts.enableDiagnosticRemarks, + enableStrictCASErrors: casOpts.enableStrictCASErrors + ) } if delegate.commandResult == .failed && !executionDelegate.userPreferences.enableDebugActivityLogs && !executionDelegate.emitFrontendCommandLines { @@ -585,8 +602,9 @@ public final class SwiftDriverJobTaskAction: TaskAction, BuildValueValidatingTas taskID: UInt ) throws -> Bool { guard let casOptions, - casOptions.enableIntegratedCacheQueries, - casOptions.hasRemoteCache else { + casOptions.enableIntegratedCacheQueries, + casOptions.hasRemoteCache + else { return false } let cacheQueryKey = SwiftCachingKeyQueryTaskKey(casOptions: casOptions, cacheKeys: plannedJob.driverJob.cacheKeys, compilerLocation: compilerLocation) @@ -600,19 +618,21 @@ public final class SwiftDriverJobTaskAction: TaskAction, BuildValueValidatingTas forTarget: nil, priority: .network, showEnvironment: false, - reason: .wasCompilationCachingQuery) + reason: .wasCompilationCachingQuery + ) return true } /// Attempts to replay a previously cached compilation, using data from the local CAS. /// /// - Returns: `true` if the the cached compilation outputs were found and replayed, `false` otherwise. - static func replayCachedCommand(cas: SwiftCASDatabases, - plannedJob: LibSwiftDriver.PlannedBuild.PlannedSwiftDriverJob, - commandLine: [String], - dynamicExecutionDelegate: any DynamicTaskExecutionDelegate, - outputDelegate: any TaskOutputDelegate, - enableDiagnosticRemarks: Bool + static func replayCachedCommand( + cas: SwiftCASDatabases, + plannedJob: LibSwiftDriver.PlannedBuild.PlannedSwiftDriverJob, + commandLine: [String], + dynamicExecutionDelegate: any DynamicTaskExecutionDelegate, + outputDelegate: any TaskOutputDelegate, + enableDiagnosticRemarks: Bool ) async throws -> Bool { let cacheKeys = plannedJob.driverJob.cacheKeys guard !cacheKeys.isEmpty else { return false } @@ -648,7 +668,7 @@ public final class SwiftDriverJobTaskAction: TaskAction, BuildValueValidatingTas } // Replay after all checks are done. - let instance = try cas.createReplayInstance(cmd: Array(commandLine.dropFirst(1))) // drop executable name + let instance = try cas.createReplayInstance(cmd: Array(commandLine.dropFirst(1))) // drop executable name let replayResults: [Result] = await comps.concurrentMap(maximumParallelism: 10) { comp in do { return .success(try cas.replayCompilation(instance: instance, compilation: comp)) @@ -681,12 +701,13 @@ public final class SwiftDriverJobTaskAction: TaskAction, BuildValueValidatingTas return result } - static func upload(cas: SwiftCASDatabases, - plannedJob: LibSwiftDriver.PlannedBuild.PlannedSwiftDriverJob, - dynamicExecutionDelegate: any DynamicTaskExecutionDelegate, - outputDelegate: any TaskOutputDelegate, - enableDiagnosticRemarks: Bool, - enableStrictCASErrors: Bool + static func upload( + cas: SwiftCASDatabases, + plannedJob: LibSwiftDriver.PlannedBuild.PlannedSwiftDriverJob, + dynamicExecutionDelegate: any DynamicTaskExecutionDelegate, + outputDelegate: any TaskOutputDelegate, + enableDiagnosticRemarks: Bool, + enableStrictCASErrors: Bool ) throws { let cacheKeys = plannedJob.driverJob.cacheKeys guard !cacheKeys.isEmpty else { return } @@ -705,7 +726,8 @@ public final class SwiftDriverJobTaskAction: TaskAction, BuildValueValidatingTas cacheKey: cacheKey, enableDiagnosticRemarks: enableDiagnosticRemarks, enableStrictCASErrors: enableStrictCASErrors, - activityReporter: dynamicExecutionDelegate) + activityReporter: dynamicExecutionDelegate + ) } } } diff --git a/Sources/SWBTaskExecution/TaskActions/SwiftDriverTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/SwiftDriverTaskAction.swift index a8b21ba1..7262eae0 100644 --- a/Sources/SWBTaskExecution/TaskActions/SwiftDriverTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/SwiftDriverTaskAction.swift @@ -61,17 +61,19 @@ final public class SwiftDriverTaskAction: TaskAction, BuildValueValidatingTaskAc } let commandLine = task.commandLineAsStrings.split(separator: "--", maxSplits: 1, omittingEmptySubsequences: false)[1] - let success = dependencyGraph.planBuild(key: driverPayload.uniqueID, - outputDelegate: outputDelegate, - compilerLocation: driverPayload.compilerLocation, - target: target, - args: Array(commandLine), - workingDirectory: task.workingDirectory, - tempDirPath: driverPayload.tempDirPath, - explicitModulesTempDirPath: driverPayload.explicitModulesTempDirPath, - environment: environment, - eagerCompilationEnabled: driverPayload.eagerCompilationEnabled, - casOptions: driverPayload.casOptions) + let success = dependencyGraph.planBuild( + key: driverPayload.uniqueID, + outputDelegate: outputDelegate, + compilerLocation: driverPayload.compilerLocation, + target: target, + args: Array(commandLine), + workingDirectory: task.workingDirectory, + tempDirPath: driverPayload.tempDirPath, + explicitModulesTempDirPath: driverPayload.explicitModulesTempDirPath, + environment: environment, + eagerCompilationEnabled: driverPayload.eagerCompilationEnabled, + casOptions: driverPayload.casOptions + ) guard success else { return .failed } } @@ -97,7 +99,7 @@ final public class SwiftDriverTaskAction: TaskAction, BuildValueValidatingTaskAc } if driverPayload.explicitModulesEnabled, - let dependencyValidationPayload = payload.dependencyValidationPayload + let dependencyValidationPayload = payload.dependencyValidationPayload { let payload: DependencyValidationInfo.Payload if let imports = try await dependencyGraph.mainModuleImportModuleDependencies(for: driverPayload.uniqueID) { @@ -115,8 +117,7 @@ final public class SwiftDriverTaskAction: TaskAction, BuildValueValidatingTaskAc JSONEncoder(outputFormatting: .sortedKeys).encode(validationInfo) ) ) - } - else { + } else { outputDelegate.incrementTaskCounter(.moduleDependenciesNotValidatedTasks) } diff --git a/Sources/SWBTaskExecution/TaskActions/SwiftHeaderToolTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/SwiftHeaderToolTaskAction.swift index daaaa3bc..0c0c215c 100644 --- a/Sources/SWBTaskExecution/TaskActions/SwiftHeaderToolTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/SwiftHeaderToolTaskAction.swift @@ -57,7 +57,7 @@ public final class SwiftHeaderToolTaskAction: TaskAction { return args } - func setSingleOccurrence(_ result: inout T?, _ getValue : @autoclosure () throws -> T) throws -> T { + func setSingleOccurrence(_ result: inout T?, _ getValue: @autoclosure () throws -> T) throws -> T { guard result == nil else { throw StubError.error("Failed to parse arguments: expected a single \(arg) argument") } let newResult = try getValue() result = newResult @@ -135,7 +135,7 @@ public final class SwiftHeaderToolTaskAction: TaskAction { ("armv7", "__ARM_ARCH_7A__", nil), ("x86_64h", "__x86_64h__", "x86_64"), ("x86_64", "__x86_64__", nil), - ("i386", "__i386__", nil) + ("i386", "__i386__", nil), ] let unknownArchs = Set(options.inputs.keys).subtracting(knownArchs.map { $0.0 }) diff --git a/Sources/SWBTaskExecution/TaskActions/TaskAction.swift b/Sources/SWBTaskExecution/TaskActions/TaskAction.swift index 6b7fd761..272a95d0 100644 --- a/Sources/SWBTaskExecution/TaskActions/TaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/TaskAction.swift @@ -34,8 +34,7 @@ extension BuildValueValidatingTaskAction { /// A task action encapsulates concrete work to be done for a task during a build. /// /// Task actions are primarily used to capture state and execution logic for in-process tasks. -open class TaskAction: PlannedTaskAction, PolymorphicSerializable -{ +open class TaskAction: PlannedTaskAction, PolymorphicSerializable { /// A unique identifier for the tool, used for binding in llbuild. open class var toolIdentifier: String { fatalError("This method is a subclass responsibility") @@ -57,16 +56,14 @@ open class TaskAction: PlannedTaskAction, PolymorphicSerializable return md5.signature } - public init() - { + public init() { self.serializedRepresentationSignature = computeInitialSignature() } /// Get a signature used to identify the internal state of the command. /// /// This is checked to determine if the command needs to rebuild versus the last time it was run. - open func getSignature(_ task: any ExecutableTask, executionDelegate: any TaskExecutionDelegate) -> ByteString - { + open func getSignature(_ task: any ExecutableTask, executionDelegate: any TaskExecutionDelegate) -> ByteString { let md5 = InsecureHashContext() md5.add(bytes: serializedRepresentationSignature!) let commandLine = task.type.commandLineForSignature(for: task) ?? task.commandLine.map { $0.asByteString } @@ -104,8 +101,7 @@ open class TaskAction: PlannedTaskAction, PolymorphicSerializable /// - parameter taskDelegate: The delegate for the tool to perform commonly-used operations. /// - parameter outputDelegate: The delegate for the tool to emit output during its execution. /// - returns: A command result to indicate if the task failed, succeeded, got cancelled or skipped its work. - open func performTaskAction(_ task: any ExecutableTask, dynamicExecutionDelegate: any DynamicTaskExecutionDelegate, executionDelegate: any TaskExecutionDelegate, clientDelegate: any TaskExecutionClientDelegate, outputDelegate: any TaskOutputDelegate) async -> CommandResult - { + open func performTaskAction(_ task: any ExecutableTask, dynamicExecutionDelegate: any DynamicTaskExecutionDelegate, executionDelegate: any TaskExecutionDelegate, clientDelegate: any TaskExecutionClientDelegate, outputDelegate: any TaskOutputDelegate) async -> CommandResult { fatalError("This method is a subclass responsibility") } @@ -117,17 +113,14 @@ open class TaskAction: PlannedTaskAction, PolymorphicSerializable // Serialization - - open func serialize(to serializer: T) - { + open func serialize(to serializer: T) { // TaskAction has no content itself to serialize, but it serializes an aggregate count of 0 so that child classes which also have no content don't have to do anything. serializer.serializeAggregate(1) { serializer.serialize(self.serializedRepresentationSignature) } } - public required init(from deserializer: any Deserializer) throws - { + public required init(from deserializer: any Deserializer) throws { try deserializer.beginAggregate(1) self.serializedRepresentationSignature = try deserializer.deserialize() } @@ -241,19 +234,15 @@ public protocol DynamicTaskExecutionDelegate: ActivityReporter { } /// Class for collecting and caching messages to emit for a task action. -class TaskActionMessageCollection -{ +class TaskActionMessageCollection { var messages = [TaskActionMessage]() - func addMessage(_ message: TaskActionMessage) - { + func addMessage(_ message: TaskActionMessage) { messages.append(message) } - func emitMessages(_ outputDelegate: any TaskOutputDelegate) - { - for message in messages - { + func emitMessages(_ outputDelegate: any TaskOutputDelegate) { + for message in messages { switch message { case .error(let value): @@ -270,8 +259,7 @@ class TaskActionMessageCollection } /// Enum describing the kinds of messages that an `TaskActionMessageCollection` can emit. -enum TaskActionMessage -{ +enum TaskActionMessage { case error(String) case warning(String) case note(String) diff --git a/Sources/SWBTaskExecution/TaskActions/ValidateDependenciesTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/ValidateDependenciesTaskAction.swift index 914904b5..c205b882 100644 --- a/Sources/SWBTaskExecution/TaskActions/ValidateDependenciesTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/ValidateDependenciesTaskAction.swift @@ -173,7 +173,8 @@ public final class ValidateDependenciesTaskAction: TaskAction { outputPaths: [], dependencies: dependencies ) - ], errors: [] + ], + errors: [] ) let outputData = try JSONEncoder(outputFormatting: [.prettyPrinted, .sortedKeys, .withoutEscapingSlashes]).encode(dependencyInfo) diff --git a/Sources/SWBTaskExecution/TaskActions/ValidateProductTaskAction.swift b/Sources/SWBTaskExecution/TaskActions/ValidateProductTaskAction.swift index 398f01ee..f1be7302 100644 --- a/Sources/SWBTaskExecution/TaskActions/ValidateProductTaskAction.swift +++ b/Sources/SWBTaskExecution/TaskActions/ValidateProductTaskAction.swift @@ -65,52 +65,51 @@ public final class ValidateProductTaskAction: TaskAction { let generator = commandLine.makeIterator() // Skip the executable. let programName = generator.next() ?? "<>" - argumentParsing: - while let arg = generator.next() { - switch arg { -// Presently these options are disabled; they are only used for "offline store validation", which was disabled in the original XCWorkQueueCommandBuiltinInvocation_validationUtility and is not yet implemented here. -// When & if they are reenabled, they should be documented in emitUsage(). -/* - case "-verbose": - verbose = true - - case "-warnings": - storeIssueTreatment = .treatAsWarnings - - case "-errors": - storeIssueTreatment = .treatAsErrors -*/ - case "-validate-for-store": - validateForStore = true - - // The default is yes, so we only need an opt-out for now. This also prevents us from having to pass in `-validate-extension` everywhere, even though that is the default behavior. - case "-no-validate-extension": - validateExtension = false - - case "-no-validate-embedded-frameworks": - validateEmbeddedFrameworks = false - - case "-shallow-bundle": - isShallowBundle = true - - case "-infoplist-subpath": - guard let path = generator.next() else { - error("missing argument for option: \(arg)") - continue - } - infoplistSubpath = path - - case _ where arg.hasPrefix("-"): - error("unrecognized option: \(arg)") - - case _ where applicationPath == nil: - // Any other option is considered to be the application path. - applicationPath = Path(arg) - - default: - // But we can only have one application path. - error("multiple application paths specified") + argumentParsing: while let arg = generator.next() { + switch arg { + // Presently these options are disabled; they are only used for "offline store validation", which was disabled in the original XCWorkQueueCommandBuiltinInvocation_validationUtility and is not yet implemented here. + // When & if they are reenabled, they should be documented in emitUsage(). + + // case "-verbose": + // verbose = true + + // case "-warnings": + // storeIssueTreatment = .treatAsWarnings + + // case "-errors": + // storeIssueTreatment = .treatAsErrors + + case "-validate-for-store": + validateForStore = true + + // The default is yes, so we only need an opt-out for now. This also prevents us from having to pass in `-validate-extension` everywhere, even though that is the default behavior. + case "-no-validate-extension": + validateExtension = false + + case "-no-validate-embedded-frameworks": + validateEmbeddedFrameworks = false + + case "-shallow-bundle": + isShallowBundle = true + + case "-infoplist-subpath": + guard let path = generator.next() else { + error("missing argument for option: \(arg)") + continue } + infoplistSubpath = path + + case _ where arg.hasPrefix("-"): + error("unrecognized option: \(arg)") + + case _ where applicationPath == nil: + // Any other option is considered to be the application path. + applicationPath = Path(arg) + + default: + // But we can only have one application path. + error("multiple application paths specified") + } } // Diagnose missing inputs. @@ -126,8 +125,7 @@ public final class ValidateProductTaskAction: TaskAction { let ext = applicationPath.fileExtension.lowercased() if ext == "ipa" { validatingArchive = true - } - else if ext != "app" { + } else if ext != "app" { if validateExtension { error("unknown application extension '.\(ext): expected '.app' or '.ipa'") } @@ -175,8 +173,7 @@ public final class ValidateProductTaskAction: TaskAction { let infoPlistItem: PropertyListItem do { infoPlistItem = try PropertyList.fromPath(applicationPath.join(options.infoplistSubpath), fs: executionDelegate.fs) - } - catch { + } catch { outputDelegate.emitError("Failed to read Info.plist of app \(applicationPath.str): \(error.localizedDescription)") return .failed } @@ -245,8 +242,7 @@ public final class ValidateProductTaskAction: TaskAction { let infoPlistItem: PropertyListItem do { infoPlistItem = try PropertyList.fromPath(infoPlistPath, fs: fs) - } - catch let error { + } catch let error { outputDelegate.emitError("Failed to read Info.plist of framework \(frameworkPath.str): \(error)") hasErrors = true continue diff --git a/Sources/SWBTaskExecution/TaskStore.swift b/Sources/SWBTaskExecution/TaskStore.swift index dc9ff3a2..65d54e2c 100644 --- a/Sources/SWBTaskExecution/TaskStore.swift +++ b/Sources/SWBTaskExecution/TaskStore.swift @@ -50,7 +50,7 @@ package final class TaskStore { @available(*, unavailable) extension TaskStore: Sendable {} package final class FrozenTaskStore: Sendable { - fileprivate init(tasks: [TaskIdentifier : Task], stringArena: FrozenStringArena, byteStringArena: FrozenByteStringArena) { + fileprivate init(tasks: [TaskIdentifier: Task], stringArena: FrozenStringArena, byteStringArena: FrozenByteStringArena) { self.tasks = tasks self.stringArena = stringArena self.byteStringArena = byteStringArena @@ -91,7 +91,7 @@ package final class FrozenTaskStore: Sendable { } extension FrozenTaskStore: Serializable { - package func serialize(to serializer: T) where T : Serializer { + package func serialize(to serializer: T) where T: Serializer { serializer.serialize(Array(tasks.values)) } diff --git a/Sources/SWBTestSupport/AssertMatch.swift b/Sources/SWBTestSupport/AssertMatch.swift index f7118634..8467c3d9 100644 --- a/Sources/SWBTestSupport/AssertMatch.swift +++ b/Sources/SWBTestSupport/AssertMatch.swift @@ -119,9 +119,9 @@ extension StringPattern: ExpressibleByStringInterpolation { } } -package func ~=(pattern: StringPattern, value: String) -> Bool { +package func ~= (pattern: StringPattern, value: String) -> Bool { switch pattern { - // These cases never matches individual items, they are just used for matching string lists. + // These cases never matches individual items, they are just used for matching string lists. case .start, .end, .anySequence: return false @@ -155,7 +155,7 @@ package func ~=(pattern: StringPattern, value: String) -> Bool { } } -package func ~=(patterns: [StringPattern], input: [String]) -> Bool { +package func ~= (patterns: [StringPattern], input: [String]) -> Bool { let startIndex = input.startIndex let endIndex = input.endIndex @@ -224,11 +224,17 @@ package func XCTAssertMatch(_ value: @autoclosure @escaping () -> String?, _ pat XCTAssertMatchImpl(pattern ~= value, { value }, pattern, message, sourceLocation: sourceLocation) } package func XCTAssertNoMatch(_ value: @autoclosure @escaping () -> String?, _ pattern: StringPattern, _ message: String? = nil, sourceLocation: SourceLocation = #_sourceLocation) { - XCTAssertMatchImpl({ - // `nil` always matches, so in this case we return true to ensure the underlying XCTAssert succeeds - guard let value = value() else { return true } - return !(pattern ~= value) - }(), value, pattern, message, sourceLocation: sourceLocation) + XCTAssertMatchImpl( + { + // `nil` always matches, so in this case we return true to ensure the underlying XCTAssert succeeds + guard let value = value() else { return true } + return !(pattern ~= value) + }(), + value, + pattern, + message, + sourceLocation: sourceLocation + ) } package func XCTAssertMatch(_ value: @autoclosure @escaping () -> [String], _ pattern: [StringPattern], _ message: String? = nil, sourceLocation: SourceLocation = #_sourceLocation) { diff --git a/Sources/SWBTestSupport/BuildDescriptionBasedTests.swift b/Sources/SWBTestSupport/BuildDescriptionBasedTests.swift index f4544400..8cb60b40 100644 --- a/Sources/SWBTestSupport/BuildDescriptionBasedTests.swift +++ b/Sources/SWBTestSupport/BuildDescriptionBasedTests.swift @@ -34,7 +34,7 @@ extension CoreBasedTests { package func buildGraph(for workspaceContext: WorkspaceContext, buildRequestContext: BuildRequestContext, configuration: String = "Debug", activeRunDestination: RunDestinationInfo?, overrides: [String: String] = [:], useImplicitDependencies: Bool = false, dependencyScope: DependencyScope = .workspace, fs: any FSProxy = PseudoFS(), includingTargets predicate: (Target) -> Bool) async -> (TargetBuildGraph, BuildRequest) { // Create a fake build request to build all targets. let parameters = BuildParameters(configuration: configuration, activeRunDestination: activeRunDestination, overrides: overrides) - let buildTargets = workspaceContext.workspace.projects.flatMap{ project in + let buildTargets = workspaceContext.workspace.projects.flatMap { project in project.targets.compactMap { return predicate($0) ? BuildRequest.BuildTargetInfo(parameters: parameters, target: $0) : nil } @@ -52,7 +52,7 @@ extension CoreBasedTests { workspaceContext.updateUserPreferences(.defaultForTesting) // Configure fake user and system info. - workspaceContext.updateUserInfo(UserInfo(user: "exampleUser", group: "exampleGroup", uid: 1234, gid:12345, home: Path.root.join("Users/exampleUser"), environment: [:])) + workspaceContext.updateUserInfo(UserInfo(user: "exampleUser", group: "exampleGroup", uid: 1234, gid: 12345, home: Path.root.join("Users/exampleUser"), environment: [:])) workspaceContext.updateSystemInfo(SystemInfo(operatingSystemVersion: Version(99, 98, 97), productBuildVersion: "99A98", nativeArchitecture: "x86_64")) let buildRequestContext = BuildRequestContext(workspaceContext: workspaceContext) diff --git a/Sources/SWBTestSupport/BuildOperationTester.swift b/Sources/SWBTestSupport/BuildOperationTester.swift index 735bdf07..7ac6444e 100644 --- a/Sources/SWBTestSupport/BuildOperationTester.swift +++ b/Sources/SWBTestSupport/BuildOperationTester.swift @@ -59,7 +59,8 @@ extension BuildRequest { buildCommand: buildCommand, schemeCommand: schemeCommand, containerPath: containerPath, - jsonRepresentation: jsonRepresentation) + jsonRepresentation: jsonRepresentation + ) } } @@ -708,20 +709,15 @@ package final class BuildOperationTester { for line in message.split(separator: "\n") { if isInRawTrace { rawTrace.append(String(line)) - } - else if line.hasPrefix("Cycle in dependencies between targets") { + } else if line.hasPrefix("Cycle in dependencies between targets") { header = String(line) - } - else if line.hasPrefix("Cycle path") { + } else if line.hasPrefix("Cycle path") { path = String(line) - } - else if line == "Target build order preserved because “Build Order” is set to “Manual Order” in the scheme settings" { + } else if line == "Target build order preserved because “Build Order” is set to “Manual Order” in the scheme settings" { usingManualOrder = true - } - else if line.count > 2, line.hasPrefix("→ ") || line.hasPrefix("○ ") { + } else if line.count > 2, line.hasPrefix("→ ") || line.hasPrefix("○ ") { lines.append(String(line)) - } - else if line.hasPrefix("Raw dependency cycle trace") { + } else if line.hasPrefix("Raw dependency cycle trace") { isInRawTrace = true } } @@ -814,22 +810,26 @@ package final class BuildOperationTester { /// Check the output of a given task. package func checkTaskOutput(_ task: Task, sourceLocation: SourceLocation = #_sourceLocation, body: (ByteString) throws -> Void) rethrows { - try body(events.compactMap{ (event: BuildOperationTester.BuildEvent) -> ByteString? in - if case .taskHadEvent(task, event: .hadOutput(let output)) = event { - return output - } - return nil - }.reduce(.init(), +)) + try body( + events.compactMap { (event: BuildOperationTester.BuildEvent) -> ByteString? in + if case .taskHadEvent(task, event: .hadOutput(let output)) = event { + return output + } + return nil + }.reduce(.init(), +) + ) } /// Check the output of a given task. @_disfavoredOverload package func checkTaskOutput(_ task: Task, sourceLocation: SourceLocation = #_sourceLocation, body: (ByteString) async throws -> Void) async rethrows { - try await body(events.compactMap{ (event: BuildOperationTester.BuildEvent) -> ByteString? in - if case .taskHadEvent(task, event: .hadOutput(let output)) = event { - return output - } - return nil - }.reduce(.init(), +)) + try await body( + events.compactMap { (event: BuildOperationTester.BuildEvent) -> ByteString? in + if case .taskHadEvent(task, event: .hadOutput(let output)) = event { + return output + } + return nil + }.reduce(.init(), +) + ) } package func checkTaskResult(_ task: Task, expected result: TaskResult, sourceLocation: SourceLocation = #_sourceLocation) { @@ -943,10 +943,12 @@ package final class BuildOperationTester { } let matchings = matchingTasksWithIdentifier(conditions) - guard try matchings.first(where: { identifier, matchingTask in - let edge = try dependencyEdge(from: task, to: matchingTask, using: taskDependencies, resolveDynamicTaskRequests: resolveDynamicTaskRequests) - return edge != .none - }) != nil else { + guard + try matchings.first(where: { identifier, matchingTask in + let edge = try dependencyEdge(from: task, to: matchingTask, using: taskDependencies, resolveDynamicTaskRequests: resolveDynamicTaskRequests) + return edge != .none + }) != nil + else { Issue.record("Unable to find a dependency edge from \(task) to any of matching tasks \(matchings.map(\.task)).", sourceLocation: sourceLocation) return } @@ -1179,7 +1181,8 @@ package final class BuildOperationTester { uid: 1234, gid: 12345, home: Path("/Users/exampleUser"), - environment: ["PATH": defaultPathEntries.joined(separator: String(Path.pathEnvironmentSeparator))].addingContents(of: ProcessInfo.processInfo.cleanEnvironment.filter(keys: ["__XCODE_BUILT_PRODUCTS_DIR_PATHS", "XCODE_DEVELOPER_DIR_PATH", "DYLD_FRAMEWORK_PATH", "DYLD_LIBRARY_PATH", "TEMP", "VCToolsInstallDir"]))) + environment: ["PATH": defaultPathEntries.joined(separator: String(Path.pathEnvironmentSeparator))].addingContents(of: ProcessInfo.processInfo.cleanEnvironment.filter(keys: ["__XCODE_BUILT_PRODUCTS_DIR_PATHS", "XCODE_DEVELOPER_DIR_PATH", "DYLD_FRAMEWORK_PATH", "DYLD_LIBRARY_PATH", "TEMP", "VCToolsInstallDir"])) + ) } } @@ -1414,12 +1417,12 @@ package final class BuildOperationTester { } /// Construct the tasks for the given build parameters, and test the result. - @discardableResult package func checkBuild(_ name: String? = nil, parameters: BuildParameters? = nil, runDestination: SWBProtocol.RunDestinationInfo?, buildRequest inputBuildRequest: BuildRequest? = nil, buildCommand: BuildCommand? = nil, schemeCommand: SchemeCommand? = .launch, persistent: Bool = false, serial: Bool = false, buildOutputMap: [String:String]? = nil, signableTargets: Set = [], signableTargetInputs: [String: ProvisioningTaskInputs] = [:], clientDelegate: (any ClientDelegate)? = nil, sourceLocation: SourceLocation = #_sourceLocation, body: (BuildResults) async throws -> T) async throws -> T { + @discardableResult package func checkBuild(_ name: String? = nil, parameters: BuildParameters? = nil, runDestination: SWBProtocol.RunDestinationInfo?, buildRequest inputBuildRequest: BuildRequest? = nil, buildCommand: BuildCommand? = nil, schemeCommand: SchemeCommand? = .launch, persistent: Bool = false, serial: Bool = false, buildOutputMap: [String: String]? = nil, signableTargets: Set = [], signableTargetInputs: [String: ProvisioningTaskInputs] = [:], clientDelegate: (any ClientDelegate)? = nil, sourceLocation: SourceLocation = #_sourceLocation, body: (BuildResults) async throws -> T) async throws -> T { try await checkBuild(name, parameters: parameters, runDestination: runDestination, buildRequest: inputBuildRequest, buildCommand: buildCommand, schemeCommand: schemeCommand, persistent: persistent, serial: serial, buildOutputMap: buildOutputMap, signableTargets: signableTargets, signableTargetInputs: signableTargetInputs, clientDelegate: clientDelegate, sourceLocation: sourceLocation, body: body, performBuild: { try await $0.buildWithTimeout() }) } /// Construct the tasks for the given build parameters, and test the result. - @discardableResult package func checkBuild(_ name: String? = nil, parameters: BuildParameters? = nil, runDestination: RunDestinationInfo?, buildRequest inputBuildRequest: BuildRequest? = nil, operationBuildRequest: BuildRequest? = nil, buildCommand: BuildCommand? = nil, schemeCommand: SchemeCommand? = .launch, persistent: Bool = false, serial: Bool = false, buildOutputMap: [String:String]? = nil, signableTargets: Set = [], signableTargetInputs: [String: ProvisioningTaskInputs] = [:], clientDelegate: (any ClientDelegate)? = nil, sourceLocation: SourceLocation = #_sourceLocation, body: (BuildResults) async throws -> T, performBuild: @escaping (any BuildSystemOperation) async throws -> Void) async throws -> T { + @discardableResult package func checkBuild(_ name: String? = nil, parameters: BuildParameters? = nil, runDestination: RunDestinationInfo?, buildRequest inputBuildRequest: BuildRequest? = nil, operationBuildRequest: BuildRequest? = nil, buildCommand: BuildCommand? = nil, schemeCommand: SchemeCommand? = .launch, persistent: Bool = false, serial: Bool = false, buildOutputMap: [String: String]? = nil, signableTargets: Set = [], signableTargetInputs: [String: ProvisioningTaskInputs] = [:], clientDelegate: (any ClientDelegate)? = nil, sourceLocation: SourceLocation = #_sourceLocation, body: (BuildResults) async throws -> T, performBuild: @escaping (any BuildSystemOperation) async throws -> Void) async throws -> T { try await checkBuildDescription(parameters, runDestination: runDestination, buildRequest: inputBuildRequest, buildCommand: buildCommand, schemeCommand: schemeCommand, persistent: persistent, serial: serial, signableTargets: signableTargets, signableTargetInputs: signableTargetInputs, clientDelegate: clientDelegate) { results throws in // Check that there are no duplicate task identifiers - it is a fatal error if there are, unless `continueBuildingAfterErrors` is set. var tasksByTaskIdentifier: [TaskIdentifier: Task] = [:] @@ -1476,18 +1479,18 @@ package final class BuildOperationTester { // Check the results. let results = try BuildResults(core: core, workspace: workspace, buildDescriptionResults: results, tasksByTaskIdentifier: delegate.tasksByTaskIdentifier.merging(delegate.dynamicTasksByTaskIdentifier, uniquingKeysWith: { a, b in a }), fs: fs, events: events, dynamicTaskDependencies: dynamicDependencies, buildDatabasePath: persistent ? results.buildDescription.buildDatabasePath : nil) - /*@MainActor func addAttachments() { - // TODO: This `runActivity` call should be wider in scope, but this would significantly complicate the code flow due to threading requirements without having async/await. - XCTContext.runActivity(named: "Execute Build Operation" + (name.map({ " \"\($0)\"" }) ?? "")) { activity in - // TODO: Longer term, we should find a way to share code with CoreQualificationTester, which has a number of APIs for emitting build operation debug info. - activity.attach(name: "Build Transcript", string: results.buildTranscript) - if localFS.exists(results.buildDescription.packagePath) { - activity.attach(name: "Build Description", from: results.buildDescription.packagePath) - } - } - } + // @MainActor func addAttachments() { + // // TODO: This `runActivity` call should be wider in scope, but this would significantly complicate the code flow due to threading requirements without having async/await. + // XCTContext.runActivity(named: "Execute Build Operation" + (name.map({ " \"\($0)\"" }) ?? "")) { activity in + // // TODO: Longer term, we should find a way to share code with CoreQualificationTester, which has a number of APIs for emitting build operation debug info. + // activity.attach(name: "Build Transcript", string: results.buildTranscript) + // if localFS.exists(results.buildDescription.packagePath) { + // activity.attach(name: "Build Description", from: results.buildDescription.packagePath) + // } + // } + // } - await addAttachments()*/ + // await addAttachments() defer { let validationResults = results.validate(sourceLocation: sourceLocation) @@ -1671,13 +1674,13 @@ package final class BuildOperationTester { } @available(*, unavailable) -extension BuildOperationTester: Sendable { } +extension BuildOperationTester: Sendable {} @available(*, unavailable) -extension BuildOperationTester.BuildResults: Sendable { } +extension BuildOperationTester.BuildResults: Sendable {} @available(*, unavailable) -extension BuildOperationTester.BuildDescriptionResults: Sendable { } +extension BuildOperationTester.BuildDescriptionResults: Sendable {} package final class MockTestClientDelegate: ClientDelegate, Sendable { package init() {} @@ -1696,7 +1699,7 @@ private extension Task { } private final class BuildOperationTesterDelegate: BuildOperationDelegate { - var aggregatedCounters: [BuildOperationMetrics.Counter : Int] = [:] + var aggregatedCounters: [BuildOperationMetrics.Counter: Int] = [:] var aggregatedTaskCounters: [String: [BuildOperationMetrics.TaskCounter: Int]] = [:] typealias DiagnosticKind = BuildOperationTester.DiagnosticKind @@ -1760,15 +1763,20 @@ private final class BuildOperationTesterDelegate: BuildOperationDelegate { let diagnosticContext: DiagnosticContextData = .init(target: nil) func diagnosticsEngine(for target: ConfiguredTarget?) -> DiagnosticProducingDelegateProtocolPrivate { - .init(_diagnosticsEngines.withLock({ diagnosticsEngines in - diagnosticsEngines.getOrInsert(target, { - let engine = DiagnosticsEngine() - engine.addHandler { [weak self] diag in - self?.log(target, diag) - } - return engine + .init( + _diagnosticsEngines.withLock({ diagnosticsEngines in + diagnosticsEngines.getOrInsert( + target, + { + let engine = DiagnosticsEngine() + engine.addHandler { [weak self] diag in + self?.log(target, diag) + } + return engine + } + ) }) - })) + ) } private func log(_ target: ConfiguredTarget?, _ diagnostic: Diagnostic) { @@ -1791,9 +1799,8 @@ private final class BuildOperationTesterDelegate: BuildOperationDelegate { self.taskCounters[counter, default: 0] += amount } - var counters: [BuildOperationMetrics.Counter : Int] = [.clangCacheHits: 0, .clangCacheMisses: 0, .swiftCacheHits: 0, .swiftCacheMisses: 0] - var taskCounters: [BuildOperationMetrics.TaskCounter : Int] = [:] - + var counters: [BuildOperationMetrics.Counter: Int] = [.clangCacheHits: 0, .clangCacheMisses: 0, .swiftCacheHits: 0, .swiftCacheMisses: 0] + var taskCounters: [BuildOperationMetrics.TaskCounter: Int] = [:] let startTime = Date() private let _diagnosticsEngine = DiagnosticsEngine() @@ -1930,7 +1937,7 @@ private final class BuildOperationTesterDelegate: BuildOperationDelegate { // MARK: BuildOperationDelegate Implementation - func reportPathMap(_ operation: BuildOperation, copiedPathMap: [String : String], generatedFilesPathMap: [String : String]) { + func reportPathMap(_ operation: BuildOperation, copiedPathMap: [String: String], generatedFilesPathMap: [String: String]) { queue.async { self.events.append(.buildReportedPathMap(copiedPathMap: copiedPathMap, generatedFilesPathMap: generatedFilesPathMap)) } @@ -2069,7 +2076,7 @@ private final class BuildOperationTesterDelegate: BuildOperationDelegate { (delegate as? TesterTaskOutputDelegate)?.handleTaskCompletion() self.aggregatedCounters.merge(delegate.counters) { (a, b) in a + b } if !delegate.taskCounters.isEmpty { - self.aggregatedTaskCounters[task.ruleInfo[0], default: [:]].merge(delegate.taskCounters) { (a, b) in a+b } + self.aggregatedTaskCounters[task.ruleInfo[0], default: [:]].merge(delegate.taskCounters) { (a, b) in a + b } } queue.async { self.tasksByTaskIdentifier[taskIdentifier] = task diff --git a/Sources/SWBTestSupport/CoreBasedTests.swift b/Sources/SWBTestSupport/CoreBasedTests.swift index 67255a67..74e31ead 100644 --- a/Sources/SWBTestSupport/CoreBasedTests.swift +++ b/Sources/SWBTestSupport/CoreBasedTests.swift @@ -40,9 +40,11 @@ extension CoreBasedTests { let core: Result do { let path = try await simulatedInferiorProductsPath() - core = try await .success(testingCoreRegistry.value(forKey: path) { - try await Self.makeCore(simulatedInferiorProductsPath: path) - }) + core = try await .success( + testingCoreRegistry.value(forKey: path) { + try await Self.makeCore(simulatedInferiorProductsPath: path) + } + ) } catch { core = .failure(error) } @@ -242,9 +244,12 @@ extension CoreBasedTests { get async throws { let (core, defaultToolchain) = try await coreAndToolchain() let fallbacklibtool = Path("/usr/bin/libtool") - return try #require(defaultToolchain.executableSearchPaths.findExecutable(operatingSystem: core.hostOperatingSystem, basename: "libtool") - ?? defaultToolchain.executableSearchPaths.findExecutable(operatingSystem: core.hostOperatingSystem, basename: "llvm-ar") - ?? (localFS.exists(fallbacklibtool) ? fallbacklibtool : nil), "couldn't find libtool in default toolchain") + return try #require( + defaultToolchain.executableSearchPaths.findExecutable(operatingSystem: core.hostOperatingSystem, basename: "libtool") + ?? defaultToolchain.executableSearchPaths.findExecutable(operatingSystem: core.hostOperatingSystem, basename: "llvm-ar") + ?? (localFS.exists(fallbacklibtool) ? fallbacklibtool : nil), + "couldn't find libtool in default toolchain" + ) } } @@ -260,13 +265,13 @@ extension CoreBasedTests { package var supportsSDKImports: Bool { get async throws { #if os(macOS) - let (core, defaultToolchain) = try await coreAndToolchain() - let toolPath = try #require(defaultToolchain.executableSearchPaths.findExecutable(operatingSystem: core.hostOperatingSystem, basename: "ld"), "couldn't find ld in default toolchain") - let mockProducer = try await MockCommandProducer(core: getCore(), productTypeIdentifier: "com.apple.product-type.framework", platform: nil, useStandardExecutableSearchPaths: true, toolchain: nil, fs: PseudoFS()) - let toolsInfo = await SWBCore.discoveredLinkerToolsInfo(mockProducer, AlwaysDeferredCoreClientDelegate(), at: toolPath) - return (try? toolsInfo?.toolVersion >= .init("1164")) == true + let (core, defaultToolchain) = try await coreAndToolchain() + let toolPath = try #require(defaultToolchain.executableSearchPaths.findExecutable(operatingSystem: core.hostOperatingSystem, basename: "ld"), "couldn't find ld in default toolchain") + let mockProducer = try await MockCommandProducer(core: getCore(), productTypeIdentifier: "com.apple.product-type.framework", platform: nil, useStandardExecutableSearchPaths: true, toolchain: nil, fs: PseudoFS()) + let toolsInfo = await SWBCore.discoveredLinkerToolsInfo(mockProducer, AlwaysDeferredCoreClientDelegate(), at: toolPath) + return (try? toolsInfo?.toolVersion >= .init("1164")) == true #else - return false + return false #endif } } @@ -316,7 +321,7 @@ extension CoreBasedTests { } package func linkPath(_ targetArchitecture: String) async throws -> Path? { let (core, defaultToolchain) = try await self.coreAndToolchain() - let prefixMapping = ["aarch64" : "arm64", "arm64ec" : "arm64", "armv7" : "arm", "x86_64": "x64", "i686": "x86"] + let prefixMapping = ["aarch64": "arm64", "arm64ec": "arm64", "armv7": "arm", "x86_64": "x64", "i686": "x86"] guard let prefix = prefixMapping[targetArchitecture] else { return nil @@ -371,7 +376,7 @@ private final class AlwaysDeferredCoreClientDelegate: CoreClientDelegate, CoreCl _diagnosticsEngine.hasErrors } - func executeExternalTool(commandLine: [String], workingDirectory: Path?, environment: [String : String]) async throws -> ExternalToolResult { + func executeExternalTool(commandLine: [String], workingDirectory: Path?, environment: [String: String]) async throws -> ExternalToolResult { .deferred } } diff --git a/Sources/SWBTestSupport/CoreTestSupport.swift b/Sources/SWBTestSupport/CoreTestSupport.swift index 28576f3a..5ade34fc 100644 --- a/Sources/SWBTestSupport/CoreTestSupport.swift +++ b/Sources/SWBTestSupport/CoreTestSupport.swift @@ -18,13 +18,13 @@ import SWBTaskExecution import SWBServiceCore #if USE_STATIC_PLUGIN_INITIALIZATION -private import SWBAndroidPlatform -private import SWBApplePlatform -private import SWBGenericUnixPlatform -private import SWBQNXPlatform -private import SWBUniversalPlatform -private import SWBWebAssemblyPlatform -private import SWBWindowsPlatform + private import SWBAndroidPlatform + private import SWBApplePlatform + private import SWBGenericUnixPlatform + private import SWBQNXPlatform + private import SWBUniversalPlatform + private import SWBWebAssemblyPlatform + private import SWBWindowsPlatform #endif /// Testing endpoints @@ -47,7 +47,7 @@ extension Core { /// Get an initialized Core suitable for testing. /// /// This function requires there to be no errors during loading the core. - package static func createInitializedTestingCore(skipLoadingPluginsNamed: Set, registerExtraPlugins: @PluginExtensionSystemActor (MutablePluginManager) -> Void, simulatedInferiorProductsPath: Path? = nil, environment: [String:String] = [:], delegate: TestingCoreDelegate? = nil, configurationDelegate: TestingCoreConfigurationDelegate? = nil) async throws -> Core { + package static func createInitializedTestingCore(skipLoadingPluginsNamed: Set, registerExtraPlugins: @PluginExtensionSystemActor (MutablePluginManager) -> Void, simulatedInferiorProductsPath: Path? = nil, environment: [String: String] = [:], delegate: TestingCoreDelegate? = nil, configurationDelegate: TestingCoreConfigurationDelegate? = nil) async throws -> Core { // When this code is being loaded directly via unit tests, find the running Xcode path. // // This is a "well known" launch parameter set in Xcode's schemes. @@ -145,17 +145,17 @@ extension Core { // This MUST be a compile-time check because the module dependencies on the plugins are conditional. // Minimize the amount of code that is conditionally compiled to avoid breaking the build during refactoring. #if USE_STATIC_PLUGIN_INITIALIZATION - staticPluginInitializers = [ - "Android": SWBAndroidPlatform.initializePlugin, - "Apple": SWBApplePlatform.initializePlugin, - "GenericUnix": SWBGenericUnixPlatform.initializePlugin, - "QNX": SWBQNXPlatform.initializePlugin, - "Universal": SWBUniversalPlatform.initializePlugin, - "WebAssembly": SWBWebAssemblyPlatform.initializePlugin, - "Windows": SWBWindowsPlatform.initializePlugin, - ] + staticPluginInitializers = [ + "Android": SWBAndroidPlatform.initializePlugin, + "Apple": SWBApplePlatform.initializePlugin, + "GenericUnix": SWBGenericUnixPlatform.initializePlugin, + "QNX": SWBQNXPlatform.initializePlugin, + "Universal": SWBUniversalPlatform.initializePlugin, + "WebAssembly": SWBWebAssemblyPlatform.initializePlugin, + "Windows": SWBWindowsPlatform.initializePlugin, + ] #else - staticPluginInitializers = [:] + staticPluginInitializers = [:] #endif if useStaticPluginInitialization { diff --git a/Sources/SWBTestSupport/DeviceFamilyBuildOperationTester.swift b/Sources/SWBTestSupport/DeviceFamilyBuildOperationTester.swift index f06fe03c..041fc595 100644 --- a/Sources/SWBTestSupport/DeviceFamilyBuildOperationTester.swift +++ b/Sources/SWBTestSupport/DeviceFamilyBuildOperationTester.swift @@ -44,7 +44,7 @@ extension CoreBasedTests { path: "", children: [ TestFile("Assets.xcassets"), - TestFile("Info.plist") + TestFile("Info.plist"), ] ), buildConfigurations: [ @@ -54,7 +54,7 @@ extension CoreBasedTests { "PRODUCT_NAME": "$(TARGET_NAME)", "CODE_SIGN_IDENTITY": "", "SDKROOT": "\(destination.sdk)", - "VERSIONING_SYSTEM": "apple-generic" + "VERSIONING_SYSTEM": "apple-generic", ] .merging(sdkVariantSettings, uniquingKeysWith: { (_, new) in new }) .merging(deviceFamilySettings, uniquingKeysWith: { (_, new) in new }) @@ -68,7 +68,7 @@ extension CoreBasedTests { TestBuildConfiguration( "Debug", buildSettings: [ - "INFOPLIST_FILE": "$(SRCROOT)/Info.plist", + "INFOPLIST_FILE": "$(SRCROOT)/Info.plist" ] ) ], @@ -82,10 +82,13 @@ extension CoreBasedTests { let tester = try await BuildOperationTester(core, testProject, simulated: false) let SRCROOT = tmpDirPath - try await tester.fs.writePlist(SRCROOT.join("Info.plist"), .plDict([ - "CFBundleDevelopmentRegion": .plString("en"), - "CFBundleExecutable": .plString("$(EXECUTABLE_NAME)") - ])) + try await tester.fs.writePlist( + SRCROOT.join("Info.plist"), + .plDict([ + "CFBundleDevelopmentRegion": .plString("en"), + "CFBundleExecutable": .plString("$(EXECUTABLE_NAME)"), + ]) + ) try await tester.fs.writeAssetCatalog(SRCROOT.join("Assets.xcassets"), .root, .appIcon("AppIcon")) diff --git a/Sources/SWBTestSupport/DiagnosticsEngine.swift b/Sources/SWBTestSupport/DiagnosticsEngine.swift index af0d93c6..8e72f874 100644 --- a/Sources/SWBTestSupport/DiagnosticsEngine.swift +++ b/Sources/SWBTestSupport/DiagnosticsEngine.swift @@ -70,7 +70,7 @@ final package class DiagnosticsEngineResult { } @available(*, unavailable) -extension DiagnosticsEngineResult: Sendable { } +extension DiagnosticsEngineResult: Sendable {} package enum StringCheck: ExpressibleByStringLiteral, Sendable { case equal(String) diff --git a/Sources/SWBTestSupport/FSUtilities.swift b/Sources/SWBTestSupport/FSUtilities.swift index 75c193c9..e5fd1c3b 100644 --- a/Sources/SWBTestSupport/FSUtilities.swift +++ b/Sources/SWBTestSupport/FSUtilities.swift @@ -11,15 +11,15 @@ //===----------------------------------------------------------------------===// #if canImport(Darwin) -import CoreGraphics -import ImageIO -import UniformTypeIdentifiers + import CoreGraphics + import ImageIO + import UniformTypeIdentifiers #endif package import Foundation #if canImport(FoundationXML) -import FoundationXML + import FoundationXML #endif package import SWBUtil @@ -49,44 +49,43 @@ package extension FSProxy { assert(path.fileExtension == "xcdatamodel") #if os(macOS) || targetEnvironment(macCatalyst) || !canImport(Darwin) - let model = XMLElement(name: "model") - model.setAttributesWith([ - "type": "com.apple.IDECoreDataModeler.DataModel", - "documentVersion": "1.0", - "minimumToolsVersion": "Automatic", - "sourceLanguage": { - switch language { - case .objectiveC: - return "Objective-C" - case .swift: - return "Swift" + let model = XMLElement(name: "model") + model.setAttributesWith([ + "type": "com.apple.IDECoreDataModeler.DataModel", + "documentVersion": "1.0", + "minimumToolsVersion": "Automatic", + "sourceLanguage": { + switch language { + case .objectiveC: + return "Objective-C" + case .swift: + return "Swift" + } + }(), + "userDefinedModelVersionIdentifier": "", + ]) + + for entity in entities { + switch entity { + case let .entity(name): + let entityElement = XMLElement(name: "entity") + entityElement.setAttributesWith([ + "name": name, + "representedClassName": name, + "syncable": "YES", + "codeGenerationType": "class", + ]) + model.addChild(entityElement) } - }(), - "userDefinedModelVersionIdentifier": "" - ]) - - for entity in entities { - switch entity { - case let .entity(name): - let entityElement = XMLElement(name: "entity") - entityElement.setAttributesWith([ - "name": name, - "representedClassName": name, - "syncable": "YES", - "codeGenerationType": "class", - ]) - model.addChild(entityElement) } - } - try createDirectory(path, recursive: true) - try write(path.join("contents"), contents: ByteString(XMLDocument(rootElement: model).xmlData)) + try createDirectory(path, recursive: true) + try write(path.join("contents"), contents: ByteString(XMLDocument(rootElement: model).xmlData)) #else - throw StubError.error("Not supported on this platform") + throw StubError.error("Not supported on this platform") #endif } - func writeCoreDataModelD(_ path: Path, language: CoreDataCodegenLanguage, _ entities: CoreDataEntity...) throws { /// Directory hierarchy is `Model.xcdatamodeld/Model.xcdatamodel/contents` assert(path.fileExtension == "xcdatamodeld") @@ -96,18 +95,22 @@ package extension FSProxy { func writeDAE(_ path: Path) throws { assert(path.fileExtension == "dae" || path.fileExtension == "DAE") - try write(path, contents: ByteString( - """ - - - - - - - - - - """)) + try write( + path, + contents: ByteString( + """ + + + + + + + + + + """ + ) + ) } func writeIntentDefinition(_ path: Path) async throws { @@ -130,10 +133,10 @@ package extension FSProxy { "INIntentParameterCombinationSupportsBackgroundExecution": .plBool(true), "INIntentParameterCombinationTitle": .plString("Test"), "INIntentParameterCombinationTitleID": .plString("cKhBdn"), - "INIntentParameterCombinationUpdatesLinked": .plBool(true) + "INIntentParameterCombinationUpdatesLinked": .plBool(true), ]) ]), - "INIntentName" : .plString("Intent"), + "INIntentName": .plString("Intent"), "INIntentParameterCombinations": .plDict([ "": .plDict([ "INIntentParameterCombinationIsPrimary": .plBool(true), @@ -146,20 +149,20 @@ package extension FSProxy { "INIntentResponseCodes": .plArray([ .plDict([ "INIntentResponseCodeName": .plString("success"), - "INIntentResponseCodeSuccess": .plBool(true) + "INIntentResponseCodeSuccess": .plBool(true), ]), .plDict([ "INIntentResponseCodeName": .plString("failure") - ]) + ]), ]) ]), "INIntentTitle": .plString("Intent"), "INIntentTitleID": .plString("m2tDjz"), "INIntentType": .plString("Custom"), - "INIntentVerb": .plString("Do") + "INIntentVerb": .plString("Do"), ]) ]), - "INTypes": .plArray([]) + "INTypes": .plArray([]), ]) try await writePlist(path, plist) @@ -174,7 +177,7 @@ package extension FSProxy { let infoDict: PropertyListItem = .plDict([ "version": .plInt(1), - "author": .plString("xcode") + "author": .plString("xcode"), ]) for component in components { @@ -182,42 +185,55 @@ package extension FSProxy { case .root: try await writeJSON(path.join("Contents.json"), ["info": infoDict]) case .appIcon(let iconName): - try await writeJSON(path.join("\(iconName).appiconset/Contents.json"), [ - "info": infoDict, - "images": .plArray([]) - ]) + try await writeJSON( + path.join("\(iconName).appiconset/Contents.json"), + [ + "info": infoDict, + "images": .plArray([]), + ] + ) case .imageSet(let imageSetName, let images): - try await writeJSON(path.join("\(imageSetName).imageset/Contents.json"), [ - "info": infoDict, - "images": .plArray(images.map { image in - return .plDict([ - "filename": .plString(image.filename), - "idiom": .plString(image.idiom.rawValue), - "scale": .plString("\(image.scale)x") - ]) - }), - ]) + try await writeJSON( + path.join("\(imageSetName).imageset/Contents.json"), + [ + "info": infoDict, + "images": .plArray( + images.map { image in + return .plDict([ + "filename": .plString(image.filename), + "idiom": .plString(image.idiom.rawValue), + "scale": .plString("\(image.scale)x"), + ]) + } + ), + ] + ) case .colorSet(let colorSetName, let colors): - try await writeJSON(path.join("\(colorSetName).colorset/Contents.json"), [ - "info": infoDict, - "colors": .plArray(colors.map { color in - switch color { - case .sRGB(let red, let green, let blue, let alpha, let idiom): - return .plDict([ - "idiom": .plString(idiom.rawValue), - "color": .plDict([ - "color-space": .plString("srgb"), - "components": .plDict([ - "red": .plDouble(red), - "green": .plDouble(green), - "blue": .plDouble(blue), - "alpha": .plDouble(alpha) + try await writeJSON( + path.join("\(colorSetName).colorset/Contents.json"), + [ + "info": infoDict, + "colors": .plArray( + colors.map { color in + switch color { + case .sRGB(let red, let green, let blue, let alpha, let idiom): + return .plDict([ + "idiom": .plString(idiom.rawValue), + "color": .plDict([ + "color-space": .plString("srgb"), + "components": .plDict([ + "red": .plDouble(red), + "green": .plDouble(green), + "blue": .plDouble(blue), + "alpha": .plDouble(alpha), + ]), + ]), ]) - ]) - ]) - } - }) - ]) + } + } + ), + ] + ) } } } @@ -228,57 +244,57 @@ package extension FSProxy { switch storyboardRuntime { case .iOS: contents <<< """ - - - - - - - - """ + + + + + + + + """ case .watchKit: assert(path.basename == "Interface.storyboard") contents <<< """ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - """ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + """ } } } @@ -313,13 +329,14 @@ package extension FSProxy { try await writePlist(resources.join("Info.plist"), [:]) try await writeFileContents(binary.join(path.basenameWithoutSuffix)) { stream in - try await stream <<< withTemporaryDirectory { dir in - if `static` { - return try await localFS.read(InstalledXcode.currentlySelected().compileStaticLibrary(path: dir, platform: platform, infoLookup: infoLookup, archs: archs, alwaysLipo: alwaysLipo)) - } else { - return try await localFS.read(InstalledXcode.currentlySelected().compileDynamicLibrary(path: dir, platform: platform, infoLookup: infoLookup, archs: archs, alwaysLipo: alwaysLipo)) + try await stream + <<< withTemporaryDirectory { dir in + if `static` { + return try await localFS.read(InstalledXcode.currentlySelected().compileStaticLibrary(path: dir, platform: platform, infoLookup: infoLookup, archs: archs, alwaysLipo: alwaysLipo)) + } else { + return try await localFS.read(InstalledXcode.currentlySelected().compileDynamicLibrary(path: dir, platform: platform, infoLookup: infoLookup, archs: archs, alwaysLipo: alwaysLipo)) + } } - } } try await body(contents, binary, headers, resources) @@ -352,9 +369,10 @@ package extension FSProxy { try await writeFileContents(contents.join("_CodeSignature/CodeSignature")) { $0 <<< "signature" } try await writePlist(contents.join("Info.plist"), [:]) try await writeFileContents(binary.join(path.basenameWithoutSuffix)) { stream in - try await stream <<< withTemporaryDirectory { dir in - try await localFS.read(InstalledXcode.currentlySelected().compileExecutable(path: dir, platform: platform, infoLookup: infoLookup, archs: archs, alwaysLipo: alwaysLipo)) - } + try await stream + <<< withTemporaryDirectory { dir in + try await localFS.read(InstalledXcode.currentlySelected().compileExecutable(path: dir, platform: platform, infoLookup: infoLookup, archs: archs, alwaysLipo: alwaysLipo)) + } } try await writeFileContents(contents.join("PkgInfo")) { $0 <<< "APPL????" } try await writePlist(contents.join("version.plist"), [:]) @@ -363,30 +381,30 @@ package extension FSProxy { func writeImage(_ path: Path, width: Int, height: Int) throws { #if canImport(Darwin) - let bitsPerComponent = 8 - let bitsPerPixel = 32 - let bytes = [UInt8](repeating: 0 /* black */, count: width * height * (bitsPerPixel / bitsPerComponent)) - try bytes.withUnsafeBufferPointer { pointer in - guard let data = CFDataCreateWithBytesNoCopy(kCFAllocatorDefault, pointer.baseAddress, pointer.count, kCFAllocatorNull) else { - throw CGImageError.initializationFailed - } - guard let space = CGColorSpace(name: CGColorSpace.sRGB) else { - throw CGImageError.initializationFailed - } - guard let provider = CGDataProvider(data: data) else { - throw CGImageError.initializationFailed - } - guard let image = CGImage(width: width, height: height, bitsPerComponent: bitsPerComponent, bitsPerPixel: bitsPerPixel, bytesPerRow: bytes.count / height, space: space, bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.first.rawValue).union(.byteOrder32Big), provider: provider, decode: nil, shouldInterpolate: false, intent: CGColorRenderingIntent.defaultIntent) else { - throw CGImageError.initializationFailed - } - guard let destination = CGImageDestinationCreateWithURL(CFURLCreateWithFileSystemPath(kCFAllocatorDefault, path.str as CFString, CFURLPathStyle.cfurlposixPathStyle, false), UTType.png.identifier as CFString, 1, nil) else { - throw CGImageError.initializationFailed + let bitsPerComponent = 8 + let bitsPerPixel = 32 + let bytes = [UInt8](repeating: 0 /* black */, count: width * height * (bitsPerPixel / bitsPerComponent)) + try bytes.withUnsafeBufferPointer { pointer in + guard let data = CFDataCreateWithBytesNoCopy(kCFAllocatorDefault, pointer.baseAddress, pointer.count, kCFAllocatorNull) else { + throw CGImageError.initializationFailed + } + guard let space = CGColorSpace(name: CGColorSpace.sRGB) else { + throw CGImageError.initializationFailed + } + guard let provider = CGDataProvider(data: data) else { + throw CGImageError.initializationFailed + } + guard let image = CGImage(width: width, height: height, bitsPerComponent: bitsPerComponent, bitsPerPixel: bitsPerPixel, bytesPerRow: bytes.count / height, space: space, bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.first.rawValue).union(.byteOrder32Big), provider: provider, decode: nil, shouldInterpolate: false, intent: CGColorRenderingIntent.defaultIntent) else { + throw CGImageError.initializationFailed + } + guard let destination = CGImageDestinationCreateWithURL(CFURLCreateWithFileSystemPath(kCFAllocatorDefault, path.str as CFString, CFURLPathStyle.cfurlposixPathStyle, false), UTType.png.identifier as CFString, 1, nil) else { + throw CGImageError.initializationFailed + } + CGImageDestinationAddImage(destination, image, nil) + CGImageDestinationFinalize(destination) } - CGImageDestinationAddImage(destination, image, nil) - CGImageDestinationFinalize(destination) - } #else - throw StubError.error("Not supported on this platform") + throw StubError.error("Not supported on this platform") #endif } diff --git a/Sources/SWBTestSupport/IndexingInfoResults.swift b/Sources/SWBTestSupport/IndexingInfoResults.swift index 8ce7bb7d..c1c02479 100644 --- a/Sources/SWBTestSupport/IndexingInfoResults.swift +++ b/Sources/SWBTestSupport/IndexingInfoResults.swift @@ -81,7 +81,7 @@ package final class IndexingInfoResults { } @available(*, unavailable) -extension IndexingInfoResults: Sendable { } +extension IndexingInfoResults: Sendable {} package final class IndexingInfo: Hashable, CustomStringConvertible { private enum KnownKeys: String, CaseIterable { @@ -182,7 +182,7 @@ package final class IndexingInfo: Hashable, CustomStringConvertible { } @available(*, unavailable) -extension IndexingInfo: Sendable { } +extension IndexingInfo: Sendable {} extension IndexingInfo { package func checkSourceFilePath(_ path: Path, sourceLocation: SourceLocation = #_sourceLocation) { @@ -220,7 +220,6 @@ extension IndexingInfo { } } - extension IndexingInfo { package var clang: any CommandLineCheckable { return consumeCheckableCommandLineKey(.clangASTCommandArguments) diff --git a/Sources/SWBTestSupport/KnownIssueTestSupport.swift b/Sources/SWBTestSupport/KnownIssueTestSupport.swift index 6d93b10d..8addfca1 100644 --- a/Sources/SWBTestSupport/KnownIssueTestSupport.swift +++ b/Sources/SWBTestSupport/KnownIssueTestSupport.swift @@ -11,49 +11,49 @@ //===----------------------------------------------------------------------===// #if compiler(<6.1) -public import Testing + public import Testing -extension Trait where Self == Testing.ConditionTrait { - public static func flaky(_ comment: Comment, sourceLocation: SourceLocation = #_sourceLocation) -> Self { - disabled("Custom execution traits are not supported in this build") - } + extension Trait where Self == Testing.ConditionTrait { + public static func flaky(_ comment: Comment, sourceLocation: SourceLocation = #_sourceLocation) -> Self { + disabled("Custom execution traits are not supported in this build") + } - public static func knownIssue(_ comment: Comment, sourceLocation: SourceLocation = #_sourceLocation) -> Self { - disabled("Custom execution traits are not supported in this build") + public static func knownIssue(_ comment: Comment, sourceLocation: SourceLocation = #_sourceLocation) -> Self { + disabled("Custom execution traits are not supported in this build") + } } -} #else -package import Testing + package import Testing -package struct KnownIssueTestTrait: TestTrait & SuiteTrait & TestScoping { - let comment: Comment - let isIntermittent: Bool - let sourceLocation: SourceLocation + package struct KnownIssueTestTrait: TestTrait & SuiteTrait & TestScoping { + let comment: Comment + let isIntermittent: Bool + let sourceLocation: SourceLocation - package var isRecursive: Bool { - true - } + package var isRecursive: Bool { + true + } - package func provideScope(for test: Testing.Test, testCase: Testing.Test.Case?, performing function: @Sendable () async throws -> Void) async throws { - if testCase == nil || test.isSuite { - try await function() - } else { - await withKnownIssue(comment, isIntermittent: isIntermittent, sourceLocation: sourceLocation) { + package func provideScope(for test: Testing.Test, testCase: Testing.Test.Case?, performing function: @Sendable () async throws -> Void) async throws { + if testCase == nil || test.isSuite { try await function() + } else { + await withKnownIssue(comment, isIntermittent: isIntermittent, sourceLocation: sourceLocation) { + try await function() + } } } } -} -extension Trait where Self == KnownIssueTestTrait { - /// Causes a test to be marked as a (nondeterministic) expected failure if it throws any error or records any issue. - package static func flaky(_ comment: Comment, sourceLocation: SourceLocation = #_sourceLocation) -> Self { - Self(comment: comment, isIntermittent: true, sourceLocation: sourceLocation) - } + extension Trait where Self == KnownIssueTestTrait { + /// Causes a test to be marked as a (nondeterministic) expected failure if it throws any error or records any issue. + package static func flaky(_ comment: Comment, sourceLocation: SourceLocation = #_sourceLocation) -> Self { + Self(comment: comment, isIntermittent: true, sourceLocation: sourceLocation) + } - /// Causes a test to be marked as a (deterministic) expected failure by requiring it to throw an error or record an issue. - package static func knownIssue(_ comment: Comment, sourceLocation: SourceLocation = #_sourceLocation) -> Self { - Self(comment: comment, isIntermittent: false, sourceLocation: sourceLocation) + /// Causes a test to be marked as a (deterministic) expected failure by requiring it to throw an error or record an issue. + package static func knownIssue(_ comment: Comment, sourceLocation: SourceLocation = #_sourceLocation) -> Self { + Self(comment: comment, isIntermittent: false, sourceLocation: sourceLocation) + } } -} #endif diff --git a/Sources/SWBTestSupport/LibraryGeneration.swift b/Sources/SWBTestSupport/LibraryGeneration.swift index 1a238077..3a693a8f 100644 --- a/Sources/SWBTestSupport/LibraryGeneration.swift +++ b/Sources/SWBTestSupport/LibraryGeneration.swift @@ -64,7 +64,7 @@ extension InstalledXcode { let linkerArgs = linkerOptions.map({ $0.args }).reduce([], +) if buildLibraryForDistribution { distributionArgs = ["-enable-library-evolution"] - _ = try await xcrun(["-sdk", platform.sdkName, "swiftc", "-target", target] + targetVariantArgs + distributionArgs as [String] + ["-emit-module-interface", "-emit-module-interface-path", swiftModuleDir.join("\(name).swiftinterface").str, "-c", sourcePath.str], workingDirectory: workingDirectory) + _ = try await xcrun(["-sdk", platform.sdkName, "swiftc", "-target", target] + targetVariantArgs + distributionArgs as [String] + ["-emit-module-interface", "-emit-module-interface-path", swiftModuleDir.join("\(name).swiftinterface").str, "-c", sourcePath.str], workingDirectory: workingDirectory) } else { distributionArgs = [] } @@ -147,8 +147,7 @@ extension InstalledXcode { let macho = try await compileSwiftFile(name: name, platform: platform, infoLookup: infoLookup, platformVariant: platformVariant, arch: arch, sourcePath: sourcePath, buildDir: buildDir, swiftModuleDir: swiftModuleDir, buildLibraryForDistribution: buildLibraryForDistribution, static: `static`, linkerOptions: linkerOptions, workingDirectory: basePath, fs: fs, object: object, needSigned: needSigned) machos.append(macho) } - } - else { + } else { let sourcePath = srcDir.join("source.c") let sourceContents = sourceContents ?? "int favorite() { return 0; }\n" try fs.write(sourcePath, contents: ByteString(encodingAsUTF8: sourceContents)) @@ -232,10 +231,12 @@ extension InstalledXcode { } let machoPath = basePath.join(baseName) - _ = try await xcrun(["-sdk", platform.sdkName, "lipo"] - .appending(contentsOf: hideARM64 ? ["-hideARM64"] : []) - .appending(contentsOf: ["-create"] + machos) - .appending(contentsOf: ["-output", machoPath.str])) + _ = try await xcrun( + ["-sdk", platform.sdkName, "lipo"] + .appending(contentsOf: hideARM64 ? ["-hideARM64"] : []) + .appending(contentsOf: ["-create"] + machos) + .appending(contentsOf: ["-output", machoPath.str]) + ) return machoPath } @@ -263,8 +264,7 @@ extension InstalledXcode { let macho = try await compileSwiftFile(name: baseName, platform: platform, infoLookup: infoLookup, platformVariant: platformVariant, arch: arch, sourcePath: sourcePath, buildDir: buildDir, swiftModuleDir: swiftModuleDir, buildLibraryForDistribution: buildLibraryForDistribution, static: false, linkerOptions: linkerOptions, workingDirectory: basePath, fs: fs, object: false, needSigned: false) machos.append(macho.str) } - } - else { + } else { let sourcePath = srcDir.join("source.c") let sourceContents = sourceContents ?? "int favorite() { return 0; }\n" try fs.write(sourcePath, contents: ByteString(encodingAsUTF8: sourceContents)) @@ -321,8 +321,7 @@ extension InstalledXcode { if useSwift { fatalError("generated Swift static libraries is not currently supported") - } - else { + } else { // Write out a basic header. let headersDir = buildDir.join("include") try fs.createDirectory(headersDir, recursive: true) diff --git a/Sources/SWBTestSupport/MachO.swift b/Sources/SWBTestSupport/MachO.swift index fd002541..fa765ec4 100644 --- a/Sources/SWBTestSupport/MachO.swift +++ b/Sources/SWBTestSupport/MachO.swift @@ -15,9 +15,9 @@ package import SWBUtil extension MachO.Slice { package func targetTripleStrings(infoLookup: any PlatformInfoLookup) throws -> [String] { #if canImport(Darwin) - return try buildVersions().map { $0.targetTripleString(arch: self.arch, infoLookup: infoLookup) } + return try buildVersions().map { $0.targetTripleString(arch: self.arch, infoLookup: infoLookup) } #else - throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") + throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") #endif } } @@ -57,7 +57,7 @@ extension BuildVersion.Platform { return "driverkit" default: guard let llvmTargetTripleSys = infoLookup.lookupPlatformInfo(platform: self)?.llvmTargetTripleSys else { - fatalError("external Mach-O based platform \(self) must provide a llvmTargetTripleSys value") + fatalError("external Mach-O based platform \(self) must provide a llvmTargetTripleSys value") } return llvmTargetTripleSys } diff --git a/Sources/SWBTestSupport/Misc.swift b/Sources/SWBTestSupport/Misc.swift index 9e77ad92..fdf058dd 100644 --- a/Sources/SWBTestSupport/Misc.swift +++ b/Sources/SWBTestSupport/Misc.swift @@ -20,7 +20,7 @@ package import SWBProtocol package import Testing #if os(Windows) -import WinSDK + import WinSDK #endif package extension Sequence where Element: Equatable { @@ -182,16 +182,16 @@ package func XCTAssertEqualPropertyListItems(_ expression1: @autoclosure () thro extension ProcessInfo { package var isRunningInVirtualMachine: Bool { #if canImport(Darwin) - let machdep_cpu_features = "machdep.cpu.features" - var len: Int = 0 - if sysctlbyname(machdep_cpu_features, nil, &len, nil, 0) == 0 { - var p = [CChar](repeating: 0, count: len) - if sysctlbyname(machdep_cpu_features, &p, &len, nil, 0) == 0 { - if let features = p.withUnsafeBufferPointer({ $0.baseAddress.map({ String(cString: $0) }) })?.split(separator: " ") { - return features.contains("VMM") + let machdep_cpu_features = "machdep.cpu.features" + var len: Int = 0 + if sysctlbyname(machdep_cpu_features, nil, &len, nil, 0) == 0 { + var p = [CChar](repeating: 0, count: len) + if sysctlbyname(machdep_cpu_features, &p, &len, nil, 0) == 0 { + if let features = p.withUnsafeBufferPointer({ $0.baseAddress.map({ String(cString: $0) }) })?.split(separator: " ") { + return features.contains("VMM") + } } } - } #endif return false } @@ -199,19 +199,19 @@ extension ProcessInfo { // Get memory usage of current process in bytes package var memoryUsage: UInt64 { #if canImport(Darwin) - var info = task_vm_info_data_t() - var count = mach_msg_type_number_t(MemoryLayout.size) / 4 - let result: kern_return_t = withUnsafeMutablePointer(to: &info) { - $0.withMemoryRebound(to: integer_t.self, capacity: 1) { - task_info(mach_task_self_, task_flavor_t(TASK_VM_INFO), $0, &count) + var info = task_vm_info_data_t() + var count = mach_msg_type_number_t(MemoryLayout.size) / 4 + let result: kern_return_t = withUnsafeMutablePointer(to: &info) { + $0.withMemoryRebound(to: integer_t.self, capacity: 1) { + task_info(mach_task_self_, task_flavor_t(TASK_VM_INFO), $0, &count) + } } - } - if result == KERN_SUCCESS { - return info.phys_footprint // memory in bytes - } - return 0 + if result == KERN_SUCCESS { + return info.phys_footprint // memory in bytes + } + return 0 #else - return 0 // for non-macOS platforms + return 0 // for non-macOS platforms #endif } } @@ -257,7 +257,7 @@ extension ArenaInfo { package static func indexBuildArena(derivedDataRoot path: Path) -> ArenaInfo { let buildRoot = path.join("Build") - let indexBuildRoot = path.join("Index.noindex/Build") + let indexBuildRoot = path.join("Index.noindex/Build") return ArenaInfo( derivedDataPath: path, buildProductsPath: indexBuildRoot.join("Products"), diff --git a/Sources/SWBTestSupport/PerfTestSupport.swift b/Sources/SWBTestSupport/PerfTestSupport.swift index f05532ad..b2a875ce 100644 --- a/Sources/SWBTestSupport/PerfTestSupport.swift +++ b/Sources/SWBTestSupport/PerfTestSupport.swift @@ -26,12 +26,14 @@ extension PerfTests { var timings: [Duration] = [] for _ in 0..(into n: Int, piecesOf input: [T]) -> [[T]] { // FIXME: This is not efficient. - var results = (0 ..< n).map{ _ in [T]() } + var results = (0.. TestWorkspace { // Create all of the input files. - let files = (0 ..< numFiles).map{ i -> TestFile in + let files = (0.. TestFile in // FIXME: Pick a random file extension. let ext = "c" return TestFile("File-\(i).\(ext)") } // Create the targets, with files randomly divided. - let targets = rng.split(into: numTargets, piecesOf: files).enumerated().map{ (entry) -> TestStandardTarget in + let targets = rng.split(into: numTargets, piecesOf: files).enumerated().map { (entry) -> TestStandardTarget in let (i, files) = entry // FIXME: Pick type randomly. - return TestStandardTarget("Target-\(i)", type: .staticLibrary, buildPhases: [ + return TestStandardTarget( + "Target-\(i)", + type: .staticLibrary, + buildPhases: [ // FIXME: Pick phases randomly. - TestSourcesBuildPhase(files.map{ TestBuildFile($0.name) }) - ]) + TestSourcesBuildPhase(files.map { TestBuildFile($0.name) }) + ] + ) } // FIXME: Create the random projects. assert(numProjects == 1, "FIXME: Unsupported") - let project = TestProject("Project-0", + let project = TestProject( + "Project-0", groupTree: TestGroup("Sources", children: files), buildConfigurations: [ - TestBuildConfiguration("Debug", buildSettings: [ - "PRODUCT_NAME": "$(TARGET_NAME)"]), + TestBuildConfiguration( + "Debug", + buildSettings: [ + "PRODUCT_NAME": "$(TARGET_NAME)" + ] + ) ], - targets: targets) + targets: targets + ) return TestWorkspace("Random", sourceRoot: project.sourceRoot, projects: [project]) } } @available(*, unavailable) -extension RandomWorkspaceBuilder: Sendable { } +extension RandomWorkspaceBuilder: Sendable {} diff --git a/Sources/SWBTestSupport/RunDestinationTestSupport.swift b/Sources/SWBTestSupport/RunDestinationTestSupport.swift index fa29f90c..8ae3aa68 100644 --- a/Sources/SWBTestSupport/RunDestinationTestSupport.swift +++ b/Sources/SWBTestSupport/RunDestinationTestSupport.swift @@ -119,16 +119,16 @@ extension _RunDestinationInfo { /// A run destination targeting macOS, using the public SDK. package static var macOS: Self { #if os(macOS) - switch Architecture.host.stringValue { - case "arm64": - return macOSAppleSilicon - case "x86_64": - return macOSIntel - default: - preconditionFailure("Unknown architecture \(Architecture.host.stringValue ?? "")") - } + switch Architecture.host.stringValue { + case "arm64": + return macOSAppleSilicon + case "x86_64": + return macOSIntel + default: + preconditionFailure("Unknown architecture \(Architecture.host.stringValue ?? "")") + } #else - return macOSIntel + return macOSIntel #endif } @@ -150,17 +150,17 @@ extension _RunDestinationInfo { /// A run destination targeting macOS (Mac Catalyst), using the public SDK. package static var macCatalyst: Self { #if os(macOS) - switch Architecture.host.stringValue { - case "arm64": - // FIXME: Use results.runDestinationTargetArchitecture in our tests where appropriate so that this works - fallthrough // return macCatalystAppleSilicon - case "x86_64": - return macCatalystIntel - default: - preconditionFailure("Unknown architecture \(Architecture.host.stringValue ?? "")") - } + switch Architecture.host.stringValue { + case "arm64": + // FIXME: Use results.runDestinationTargetArchitecture in our tests where appropriate so that this works + fallthrough // return macCatalystAppleSilicon + case "x86_64": + return macCatalystIntel + default: + preconditionFailure("Unknown architecture \(Architecture.host.stringValue ?? "")") + } #else - return macCatalystIntel + return macCatalystIntel #endif } @@ -222,17 +222,17 @@ extension _RunDestinationInfo { /// A run destination targeting DriverKit, using the public SDK. package static var driverKit: Self { #if os(macOS) - switch Architecture.host.stringValue { - case "arm64": - // FIXME: Use results.runDestinationTargetArchitecture in our tests where appropriate so that this works - fallthrough // return driverKitAppleSilicon - case "x86_64": - return driverKitIntel - default: - preconditionFailure("Unknown architecture \(Architecture.host.stringValue ?? "")") - } + switch Architecture.host.stringValue { + case "arm64": + // FIXME: Use results.runDestinationTargetArchitecture in our tests where appropriate so that this works + fallthrough // return driverKitAppleSilicon + case "x86_64": + return driverKitIntel + default: + preconditionFailure("Unknown architecture \(Architecture.host.stringValue ?? "")") + } #else - return driverKitIntel + return driverKitIntel #endif } @@ -248,7 +248,7 @@ extension _RunDestinationInfo { /// A run destination targeting Windows generic device, using the public SDK. package static var windows: Self { - guard let arch = Architecture.hostStringValue else { + guard let arch = Architecture.hostStringValue else { preconditionFailure("Unknown architecture \(Architecture.host.stringValue ?? "")") } return .init(platform: "windows", sdk: "windows", sdkVariant: "windows", targetArchitecture: arch, supportedArchitectures: ["x86_64, aarch64"], disableOnlyActiveArch: false) @@ -256,7 +256,7 @@ extension _RunDestinationInfo { /// A run destination targeting Linux generic device, using the public SDK. package static var linux: Self { - guard let arch = Architecture.hostStringValue else { + guard let arch = Architecture.hostStringValue else { preconditionFailure("Unknown architecture \(Architecture.host.stringValue ?? "")") } return .init(platform: "linux", sdk: "linux", sdkVariant: "linux", targetArchitecture: arch, supportedArchitectures: ["x86_64", "aarch64"], disableOnlyActiveArch: false) @@ -264,7 +264,7 @@ extension _RunDestinationInfo { /// A run destination targeting FreeBSD generic device, using the public SDK. package static var freebsd: Self { - guard let arch = Architecture.hostStringValue else { + guard let arch = Architecture.hostStringValue else { preconditionFailure("Unknown architecture \(Architecture.host.stringValue ?? "")") } return .init(platform: "freebsd", sdk: "freebsd", sdkVariant: "freebsd", targetArchitecture: arch, supportedArchitectures: ["x86_64", "aarch64"], disableOnlyActiveArch: false) @@ -272,7 +272,7 @@ extension _RunDestinationInfo { /// A run destination targeting OpenBSD generic device, using the public SDK. package static var openbsd: Self { - guard let arch = Architecture.hostStringValue else { + guard let arch = Architecture.hostStringValue else { preconditionFailure("Unknown architecture \(Architecture.host.stringValue ?? "")") } return .init(platform: "openbsd", sdk: "openbsd", sdkVariant: "openbsd", targetArchitecture: arch, supportedArchitectures: ["x86_64", "aarch64"], disableOnlyActiveArch: false) @@ -361,7 +361,7 @@ extension _RunDestinationInfo { case "linux": return "linux-gnu" default: - return platform // watchOS, DriverKit + return platform // watchOS, DriverKit } } diff --git a/Sources/SWBTestSupport/SettingsBasedTests.swift b/Sources/SWBTestSupport/SettingsBasedTests.swift index 94ec8e2a..c0df1d92 100644 --- a/Sources/SWBTestSupport/SettingsBasedTests.swift +++ b/Sources/SWBTestSupport/SettingsBasedTests.swift @@ -32,7 +32,7 @@ extension CoreBasedTests { let context = try await WorkspaceContext(core: core.or(await getCore()), workspace: workspace, fs: fs, processExecutionCache: .sharedForTesting) // Configure fake user and system info. - context.updateUserInfo(UserInfo(user: "exampleUser", group: "exampleGroup", uid: 1234, gid:12345, home: Path.root.join("Users").join("exampleUser"), environment: environment)) + context.updateUserInfo(UserInfo(user: "exampleUser", group: "exampleGroup", uid: 1234, gid: 12345, home: Path.root.join("Users").join("exampleUser"), environment: environment)) context.updateSystemInfo(systemInfo ?? SystemInfo(operatingSystemVersion: Version(99, 98, 97), productBuildVersion: "99A98", nativeArchitecture: "x86_64")) return context diff --git a/Sources/SWBTestSupport/SettingsRunDestinationTestSupport.swift b/Sources/SWBTestSupport/SettingsRunDestinationTestSupport.swift index dbb90dc7..537f7acc 100644 --- a/Sources/SWBTestSupport/SettingsRunDestinationTestSupport.swift +++ b/Sources/SWBTestSupport/SettingsRunDestinationTestSupport.swift @@ -20,7 +20,7 @@ package import SWBMacro extension CoreBasedTests { package func testActiveRunDestination(_ targetType: TestStandardTarget.TargetType = .application, extraBuildSettings: [String: String] = [:], runDestination: RunDestinationInfo?, activeArchitecture: String? = nil, hostArchitecture: String? = nil, _ check: (WorkspaceContext, Settings, MacroEvaluationScope) throws -> Void, sourceLocation: SourceLocation = #_sourceLocation) async throws { var buildSettings = [ - "ONLY_ACTIVE_ARCH": "YES", + "ONLY_ACTIVE_ARCH": "YES" ] buildSettings.addContents(of: extraBuildSettings) @@ -37,8 +37,15 @@ extension CoreBasedTests { buildConfigurations: [ TestBuildConfiguration( "Debug", - buildSettings: buildSettings)], - buildPhases: [TestSourcesBuildPhase(["file.c"])])])]).load(getCore(sourceLocation: sourceLocation)) + buildSettings: buildSettings + ) + ], + buildPhases: [TestSourcesBuildPhase(["file.c"])] + ) + ] + ) + ] + ).load(getCore(sourceLocation: sourceLocation)) let context = try await contextForTestData(testWorkspace, systemInfo: hostArchitecture.map { hostArchitecture in SystemInfo(operatingSystemVersion: Version(99, 98, 97), productBuildVersion: "99A98", nativeArchitecture: hostArchitecture) } ?? nil) let buildRequestContext = BuildRequestContext(workspaceContext: context) diff --git a/Sources/SWBTestSupport/SkippedTestSupport.swift b/Sources/SWBTestSupport/SkippedTestSupport.swift index 48eb9fbf..42ef05b1 100644 --- a/Sources/SWBTestSupport/SkippedTestSupport.swift +++ b/Sources/SWBTestSupport/SkippedTestSupport.swift @@ -120,13 +120,16 @@ package final class ConditionTraitContext: CoreBasedTests, Sendable { extension Trait where Self == Testing.ConditionTrait { /// Skips a test case that requires one or more SDKs if they are not all available. package static func requireSDKs(_ knownSDKs: KnownSDK..., comment: Comment? = nil) -> Self { - enabled(comment != nil ? "required SDKs are not installed: \(comment?.description ?? "")" : "required SDKs are not installed.", { - let sdkRegistry = try await ConditionTraitContext.shared.getCore().sdkRegistry - let missingSDKs = await knownSDKs.asyncFilter { knownSDK in - sdkRegistry.lookup(knownSDK.sdkName) == nil && sdkRegistry.allSDKs.count(where: { $0.aliases.contains(knownSDK.sdkName) }) == 0 - }.sorted() - return missingSDKs.isEmpty - }) + enabled( + comment != nil ? "required SDKs are not installed: \(comment?.description ?? "")" : "required SDKs are not installed.", + { + let sdkRegistry = try await ConditionTraitContext.shared.getCore().sdkRegistry + let missingSDKs = await knownSDKs.asyncFilter { knownSDK in + sdkRegistry.lookup(knownSDK.sdkName) == nil && sdkRegistry.allSDKs.count(where: { $0.aliases.contains(knownSDK.sdkName) }) == 0 + }.sorted() + return missingSDKs.isEmpty + } + ) } /// Constructs a condition trait that causes a test to be disabled if not running on the specified host OS. @@ -147,9 +150,12 @@ extension Trait where Self == Testing.ConditionTrait { /// Constructs a condition trait that causes a test to be disabled if the developer directory is pointing at an Xcode developer directory. package static var skipXcodeToolchain: Self { - disabled("This test is incompatible with Xcode toolchains.", { - try await ConditionTraitContext.shared.getCore().developerPath.path.str.contains(".app/Contents/Developer") - }) + disabled( + "This test is incompatible with Xcode toolchains.", + { + try await ConditionTraitContext.shared.getCore().developerPath.path.str.contains(".app/Contents/Developer") + } + ) } /// Constructs a condition trait that causes a test to be disabled if the Foundation process spawning implementation is not thread-safe. @@ -165,9 +171,9 @@ extension Trait where Self == Testing.ConditionTrait { package static var skipSwiftPackage: Self { #if SWIFT_PACKAGE - return disabled("Test is not supported when building Swift Build as a package") + return disabled("Test is not supported when building Swift Build as a package") #else - return enabled(if: true) + return enabled(if: true) #endif } @@ -211,7 +217,7 @@ extension Trait where Self == Testing.ConditionTrait { func installCommand(packageManagerPath: Path, packageNames: String) -> String { switch packageManagerPath.basenameWithoutSuffix { case "pkg_info": - return "pkg_add \(packageNames)" // OpenBSD + return "pkg_add \(packageNames)" // OpenBSD default: return "\(packageManagerPath.basenameWithoutSuffix) install \(packageNames)" } @@ -273,10 +279,13 @@ extension Trait where Self == Testing.ConditionTrait { extension Trait where Self == Testing.ConditionTrait { /// Constructs a condition trait that causes a test to be disabled if running against the exact given version of Xcode. package static func skipXcodeBuildVersion(_ version: String, sourceLocation: SourceLocation = #_sourceLocation) -> Self { - skipXcodeBuildVersions(in: try { - let v: ProductBuildVersion = try ProductBuildVersion(version) - return v...v - }(), sourceLocation: sourceLocation) + skipXcodeBuildVersions( + in: try { + let v: ProductBuildVersion = try ProductBuildVersion(version) + return v...v + }(), + sourceLocation: sourceLocation + ) } /// Constructs a condition trait that causes a test to be disabled if running against the exact given version of Xcode. @@ -286,9 +295,13 @@ extension Trait where Self == Testing.ConditionTrait { /// Constructs a condition trait that causes a test to be disabled if running against a version of Xcode within the given range. package static func skipXcodeBuildVersions(in range: @Sendable @autoclosure @escaping () throws -> R, sourceLocation: SourceLocation = #_sourceLocation) -> Self where R.Bound == ProductBuildVersion { - disabled("Xcode version is not suitable", sourceLocation: sourceLocation, { - return try await range().contains(InstalledXcode.currentlySelected().productBuildVersion()) - }) + disabled( + "Xcode version is not suitable", + sourceLocation: sourceLocation, + { + return try await range().contains(InstalledXcode.currentlySelected().productBuildVersion()) + } + ) } /// Constructs a condition trait that causes a test to be disabled if not running against at least the given version of Xcode. @@ -297,12 +310,16 @@ extension Trait where Self == Testing.ConditionTrait { } package static func requireXcode16(sourceLocation: SourceLocation = #_sourceLocation) -> Self { - enabled("Xcode version is not suitable", sourceLocation: sourceLocation, { - guard let installedVersion = try? await InstalledXcode.currentlySelected().productBuildVersion() else { - return true + enabled( + "Xcode version is not suitable", + sourceLocation: sourceLocation, + { + guard let installedVersion = try? await InstalledXcode.currentlySelected().productBuildVersion() else { + return true + } + return installedVersion > (try ProductBuildVersion("16A242d")) } - return installedVersion > (try ProductBuildVersion("16A242d")) - }) + ) } package static func requireXcode26(sourceLocation: SourceLocation = #_sourceLocation) -> Self { @@ -316,12 +333,16 @@ extension Trait where Self == Testing.ConditionTrait { /// Constructs a condition trait that causes a test to be disabled if not running against a version of Xcode within the given range. package static func requireXcodeBuildVersions(in range: @Sendable @autoclosure @escaping () throws -> R, sourceLocation: SourceLocation = #_sourceLocation) -> Self where R.Bound == ProductBuildVersion { - enabled("Xcode version is not suitable", sourceLocation: sourceLocation, { - guard let installedVersion = try? await InstalledXcode.currentlySelected().productBuildVersion() else { - return true + enabled( + "Xcode version is not suitable", + sourceLocation: sourceLocation, + { + guard let installedVersion = try? await InstalledXcode.currentlySelected().productBuildVersion() else { + return true + } + return try range().contains(installedVersion) } - return try range().contains(installedVersion) - }) + ) } /// Constructs a condition trait that causes a test to be disabled if not running against a version of Xcode including at least the given version of a particular SDK. @@ -331,10 +352,14 @@ extension Trait where Self == Testing.ConditionTrait { /// Constructs a condition trait that causes a test to be disabled if not running against a version of Xcode including at least the given version of a particular SDK. package static func requireMinimumSDKBuildVersion(sdkName: String, requiredVersion: @Sendable @autoclosure @escaping () throws -> ProductBuildVersion, sourceLocation: SourceLocation = #_sourceLocation) -> Self { - disabled("SDK build version is too old", sourceLocation: sourceLocation, { - let sdkVersion = try await InstalledXcode.currentlySelected().productBuildVersion(sdkCanonicalName: sdkName) - return try sdkVersion < requiredVersion() - }) + disabled( + "SDK build version is too old", + sourceLocation: sourceLocation, + { + let sdkVersion = try await InstalledXcode.currentlySelected().productBuildVersion(sdkCanonicalName: sdkName) + return try sdkVersion < requiredVersion() + } + ) } /// Constructs a condition trait that causes a test to be disabled if not running against a version of Xcode including the SDK which is equal to or newer than at least one of the given versions within the same release. @@ -344,20 +369,24 @@ extension Trait where Self == Testing.ConditionTrait { /// Constructs a condition trait that causes a test to be disabled if not running against a version of Xcode including the SDK which is equal to or newer than at least one of the given versions within the same release. package static func requireMinimumSDKBuildVersion(sdkName: String, requiredVersions: @Sendable @autoclosure @escaping () throws -> [ProductBuildVersion], sourceLocation: SourceLocation = #_sourceLocation) -> Self { - disabled("SDK build version is too old", sourceLocation: sourceLocation, { - let sdkVersion = try await InstalledXcode.currentlySelected().productBuildVersion(sdkCanonicalName: sdkName) - - // For each required version, check to see if it is from the same release as the SDK version. If it is, then we will check against it. - for requiredVersion in try requiredVersions() { - if sdkVersion.major == requiredVersion.major, sdkVersion.train == requiredVersion.train { - return sdkVersion < requiredVersion + disabled( + "SDK build version is too old", + sourceLocation: sourceLocation, + { + let sdkVersion = try await InstalledXcode.currentlySelected().productBuildVersion(sdkCanonicalName: sdkName) + + // For each required version, check to see if it is from the same release as the SDK version. If it is, then we will check against it. + for requiredVersion in try requiredVersions() { + if sdkVersion.major == requiredVersion.major, sdkVersion.train == requiredVersion.train { + return sdkVersion < requiredVersion + } } - } - // If the SDK version is not from the same release as any of required versions, then we assume we don't need to skip. This is to handle the common case where we've moved on to newer releases and don't want to be forced to clean up these skips as soon as we do so. It assumes we won't start running the test against older releases of the SDK. - // We could do something more sophisticated here to handle versions outside of the specific releases we were passed, but this meets our needs for now. - return false - }) + // If the SDK version is not from the same release as any of required versions, then we assume we don't need to skip. This is to handle the common case where we've moved on to newer releases and don't want to be forced to clean up these skips as soon as we do so. It assumes we won't start running the test against older releases of the SDK. + // We could do something more sophisticated here to handle versions outside of the specific releases we were passed, but this meets our needs for now. + return false + } + ) } } @@ -482,7 +511,7 @@ fileprivate enum XcodeVersionInfoProvider { case let .installedXcode(xcode): return try xcode.productBuildVersion() case .noXcode: - return try ProductBuildVersion("99T999") // same fallback version that Core uses + return try ProductBuildVersion("99T999") // same fallback version that Core uses } } diff --git a/Sources/SWBTestSupport/SpecTestSupport.swift b/Sources/SWBTestSupport/SpecTestSupport.swift index ed389f39..857f1708 100644 --- a/Sources/SWBTestSupport/SpecTestSupport.swift +++ b/Sources/SWBTestSupport/SpecTestSupport.swift @@ -24,7 +24,7 @@ package class CapturingTaskParserDelegate: TaskOutputParserDelegate { package func skippedSubtask(signature: ByteString) {} package func startSubtask(buildOperationIdentifier: BuildSystemOperationIdentifier, taskName: String, id: ByteString, signature: ByteString, ruleInfo: String, executionDescription: String, commandLine: [ByteString], additionalOutput: [String], interestingPath: Path?, workingDirectory: Path?, serializedDiagnosticsPaths: [Path]) -> any TaskOutputParserDelegate { fatalError() } package func emitOutput(_ data: ByteString) { output <<< data } - package func taskCompleted(exitStatus: Processes.ExitStatus) { } + package func taskCompleted(exitStatus: Processes.ExitStatus) {} package func close() {} } diff --git a/Sources/SWBTestSupport/TargetBuildGraphTestSupport.swift b/Sources/SWBTestSupport/TargetBuildGraphTestSupport.swift index de2bad24..b3f42e1b 100644 --- a/Sources/SWBTestSupport/TargetBuildGraphTestSupport.swift +++ b/Sources/SWBTestSupport/TargetBuildGraphTestSupport.swift @@ -38,7 +38,7 @@ package final class EmptyTargetDependencyResolverDelegate: TargetDependencyResol diagnosticsEngine(for: nil).emit(diagnostic) } - package func updateProgress(statusMessage: String, showInLog: Bool) { } + package func updateProgress(statusMessage: String, showInLog: Bool) {} } extension EmptyTargetDependencyResolverDelegate { @@ -55,7 +55,6 @@ extension EmptyTargetDependencyResolverDelegate { } } - extension TargetBuildGraph { /// Convenience initializer which uses an empty delegate implementation for testing. package init(workspaceContext: WorkspaceContext, buildRequest: BuildRequest, buildRequestContext: BuildRequestContext) async { diff --git a/Sources/SWBTestSupport/TaskConstructionTester.swift b/Sources/SWBTestSupport/TaskConstructionTester.swift index 48816c10..3f702d79 100644 --- a/Sources/SWBTestSupport/TaskConstructionTester.swift +++ b/Sources/SWBTestSupport/TaskConstructionTester.swift @@ -77,7 +77,7 @@ package final class TaskConstructionTester { package var checkedNotes: Bool = false package var checkedRemarks: Bool = false - init(_ core: Core, _ workspace: Workspace, _ existingFiles: [Path], _ buildRequest: BuildRequest, _ buildPlanRequest: BuildPlanRequest,_ buildPlan: BuildPlan, _ delegate: TestTaskPlanningDelegate) { + init(_ core: Core, _ workspace: Workspace, _ existingFiles: [Path], _ buildRequest: BuildRequest, _ buildPlanRequest: BuildPlanRequest, _ buildPlan: BuildPlan, _ delegate: TestTaskPlanningDelegate) { self.core = core self.workspace = workspace self.existingFiles = Set(existingFiles) @@ -164,7 +164,7 @@ package final class TaskConstructionTester { /// /// It is a test error if multiple such targets exist. package func checkTarget(_ name: String, platformDiscriminator: String? = nil, sdkroot: String? = nil, sourceLocation: SourceLocation = #_sourceLocation, body: (ConfiguredTarget) throws -> Void) rethrows { - let matchedTargets = plannedTargets.filter { $0.target.name == name && (platformDiscriminator == nil || $0.platformDiscriminator == platformDiscriminator) && (sdkroot == nil || $0.parameters.overrides["SDKROOT"] == sdkroot) } + let matchedTargets = plannedTargets.filter { $0.target.name == name && (platformDiscriminator == nil || $0.platformDiscriminator == platformDiscriminator) && (sdkroot == nil || $0.parameters.overrides["SDKROOT"] == sdkroot) } if matchedTargets.isEmpty { Issue.record("unable to find target with name '\(name)'\(platformDiscriminator.map{", platform '\($0)'"} ?? "")", sourceLocation: sourceLocation) } else if matchedTargets.count > 1 { @@ -356,9 +356,7 @@ package final class TaskConstructionTester { do { if let distance = try taskGraph.predecessorDistance(from: task, to: antecedent) { let path = try taskGraph.shortestPath(from: task, to: antecedent) ?? [] - Issue.record(Comment(rawValue: "task '\(task.testIssueDescription)' has an edge forcing it to follow '\(antecedent.testIssueDescription)' " + - "by \(distance) tasks, but it was not expected to have one:\n\n" + - "\(taskGraphPathDebugInfo(path))"), sourceLocation: sourceLocation) + Issue.record(Comment(rawValue: "task '\(task.testIssueDescription)' has an edge forcing it to follow '\(antecedent.testIssueDescription)' " + "by \(distance) tasks, but it was not expected to have one:\n\n" + "\(taskGraphPathDebugInfo(path))"), sourceLocation: sourceLocation) } } catch { Issue.record(error, sourceLocation: sourceLocation) @@ -478,7 +476,7 @@ package final class TaskConstructionTester { Issue.record("missing creator task for mutated node '\(mutatedNodeRef.instance.path.str)'", sourceLocation: sourceLocation) continue } - let creators = Set>(Array(producers).map{ Ref($0) }).subtracting(taskGraph.mutatingTasks) + let creators = Set>(Array(producers).map { Ref($0) }).subtracting(taskGraph.mutatingTasks) guard !creators.isEmpty else { Issue.record("missing creator task for mutated node '\(mutatedNodeRef.instance.path.str)' mutated by task '\(producers.first!.ruleInfo.quotedDescription)'", sourceLocation: sourceLocation) continue @@ -492,17 +490,21 @@ package final class TaskConstructionTester { /// - parameter ignoring: The node to ignore. This means that the node will be disregarded when traversing the graph. This is useful when there are known to be multiple nodes connecting two tasks, as is the case in mutating tasks, which have the node being mutated as both an input and an output, but also a virtual node connecting the task to its predecessor. The concrete node is ignored and only the virtual nodes are used to traverse the graph. /// - returns: The distance between the two nodes, or `nil` if `predecessor` does not precede `origin` in the graph. func predecessorDistance(from origin: any PlannedTask, to predecessor: any PlannedTask, ignoring: any PlannedNode) -> Int? { - return minimumDistance(from: Ref(origin), to: Ref(predecessor), successors: { taskRef in + return minimumDistance( + from: Ref(origin), + to: Ref(predecessor), + successors: { taskRef in let inputNodes = taskGraph.taskInputs[taskRef] ?? [] - let inputs = inputNodes.flatMap { input -> [Ref] in + let inputs = inputNodes.flatMap { input -> [Ref] in if input.uniqueNode === ignoring { return [] } - return taskGraph.producers[Ref(input.uniqueNode)]?.map{ Ref($0) } ?? [] + return taskGraph.producers[Ref(input.uniqueNode)]?.map { Ref($0) } ?? [] } return inputs - }) + } + ) } // Get the mutating tasks for this node and order them topologically, ignoring the node being mutated. @@ -564,7 +566,7 @@ package final class TaskConstructionTester { // Go through all the input nodes and work back through their producer tasks. This iterates through all the nodes in a nondeterministic order, which is fine because we just want coverage. for input in taskGraph.taskInputs[Ref(producer)] ?? [] { - let inputRef = Ref(input.uniqueNode) + let inputRef = Ref(input.uniqueNode) for inputProducer in taskGraph.producers[inputRef] ?? [] { lookForCycle(from: inputRef, through: inputProducer, nodeList: newNodeList, path: newPath) // If we detected a cycle then bail out. @@ -631,7 +633,7 @@ package final class TaskConstructionTester { return [] } else { - return producers[Ref(input.uniqueNode)]?.map{ Ref($0) } ?? [] + return producers[Ref(input.uniqueNode)]?.map { Ref($0) } ?? [] } } } @@ -703,7 +705,7 @@ package final class TaskConstructionTester { } // Compute information about mutated tasks and nodes in the graph. - let mutatedNodes = Set>(task.inputs.map{ Ref(uniqueNode(for: $0)) }).intersection(Set>(task.outputs.map{ Ref(uniqueNode(for: $0)) })) + let mutatedNodes = Set>(task.inputs.map { Ref(uniqueNode(for: $0)) }).intersection(Set>(task.outputs.map { Ref(uniqueNode(for: $0)) })) if !mutatedNodes.isEmpty { for mutatedNode in mutatedNodes { let mutatingTasks = mutatedNodesToTasks.getOrInsert(mutatedNode, { [any PlannedTask]() }) @@ -768,21 +770,22 @@ package final class TaskConstructionTester { let parameters = parameters ?? BuildParameters(configuration: "Debug") // If the build parameters don't specify a run destination, but we were passed one, then use the one we were passed. (checkBuild() defaults this to .macOS.) - let activeRunDestination: RunDestinationInfo? = switch (parameters.activeRunDestination, runDestination) { - case let (.some(lhs), (.some(rhs))): - preconditionFailure("Specified run destinations from both explicit build parameters and default destination: \(lhs), \(rhs)") - case let (.some(destination), nil): - destination - case let (nil, .some(destination)): - destination - case (nil, nil): - nil - } + let activeRunDestination: RunDestinationInfo? = + switch (parameters.activeRunDestination, runDestination) { + case let (.some(lhs), (.some(rhs))): + preconditionFailure("Specified run destinations from both explicit build parameters and default destination: \(lhs), \(rhs)") + case let (.some(destination), nil): + destination + case let (nil, .some(destination)): + destination + case (nil, nil): + nil + } // Define a default set of overrides. var overrides = [ // Always use separate headermaps by forcing ALWAYS_SEARCH_USER_PATHS off, unless the build parameters passed to checkBuild() explicitly enables it. (Since traditional headermaps are currently not supported by Swift Build, doing so is not presently useful.) Doing this also suppresses the warning of traditional headermaps being unsupported. - "ALWAYS_SEARCH_USER_PATHS": "NO", + "ALWAYS_SEARCH_USER_PATHS": "NO" ] if useDefaultToolChainOverride { @@ -808,8 +811,7 @@ package final class TaskConstructionTester { // If we were passed a build request, then reconstruct it using the parameters. let buildTargets = buildRequest.buildTargets.map({ BuildRequest.BuildTargetInfo(parameters: parameters, target: $0.target) }) return buildRequest.with(parameters: parameters, buildTargets: buildTargets) - } - else { + } else { // If we weren't passed a build request, then create one with some default characteristics we use in most task construction tests. let project = workspace.projects[0] let target: Target @@ -838,7 +840,7 @@ package final class TaskConstructionTester { let workspaceContext = WorkspaceContext(core: core, workspace: workspace, fs: fs, processExecutionCache: .sharedForTesting) // Configure fake user and system info. - workspaceContext.updateUserInfo(UserInfo(user: "exampleUser", group: "exampleGroup", uid: 1234, gid:12345, home: Path("/Users/whoever"), environment: processEnvironment)) + workspaceContext.updateUserInfo(UserInfo(user: "exampleUser", group: "exampleGroup", uid: 1234, gid: 12345, home: Path("/Users/whoever"), environment: processEnvironment)) workspaceContext.updateSystemInfo(systemInfo ?? SystemInfo(operatingSystemVersion: Version(99, 98, 97), productBuildVersion: "99A98", nativeArchitecture: "x86_64")) workspaceContext.updateUserPreferences(userPreferences ?? UserPreferences.defaultForTesting) @@ -847,8 +849,7 @@ package final class TaskConstructionTester { if let inputBuildRequest, inputBuildRequest.parameters.action == .indexBuild { // Indexing tests pass exactly the build request they want to use, so we don't mess with it. buildRequest = inputBuildRequest - } - else { + } else { buildRequest = effectiveBuildRequest(inputBuildRequest, targetName: targetName, parameters: parameters) } @@ -868,15 +869,15 @@ package final class TaskConstructionTester { } let results = PlanningResults(core, workspace, sourceFiles, buildRequest, buildPlanRequest, buildPlan, delegate) - /*@MainActor func addAttachments() { - // TODO: This `runActivity` call should be wider in scope, but this would significantly complicate the code flow due to threading requirements without having async/await. - XCTContext.runActivity(named: "Plan Build") { activity in - // TODO: Longer term, we should find a way to share code with CoreQualificationTester, which has a number of APIs for emitting build operation debug info. - activity.attach(name: "Task Graph", string: results.dumpGraph()) - } - } + // @MainActor func addAttachments() { + // // TODO: This `runActivity` call should be wider in scope, but this would significantly complicate the code flow due to threading requirements without having async/await. + // XCTContext.runActivity(named: "Plan Build") { activity in + // // TODO: Longer term, we should find a way to share code with CoreQualificationTester, which has a number of APIs for emitting build operation debug info. + // activity.attach(name: "Task Graph", string: results.dumpGraph()) + // } + // } - await addAttachments()*/ + // await addAttachments() // Check the results. try await body(results) @@ -911,7 +912,8 @@ package final class TaskConstructionTester { appIdentifierPrefix: overrides.appIdentifierPrefix ?? inputs.appIdentifierPrefix, teamIdentifierPrefix: overrides.teamIdentifierPrefix ?? inputs.teamIdentifierPrefix, isEnterpriseTeam: overrides.isEnterpriseTeam ?? inputs.isEnterpriseTeam, - keychainPath: overrides.keychainPath ?? inputs.keychainPath) + keychainPath: overrides.keychainPath ?? inputs.keychainPath + ) } // Otherwise evaluated $(CODE_SIGN_IDENTITY) to use a default set of inputs. @@ -933,14 +935,14 @@ package final class TaskConstructionTester { simulatedEntitlements = [:] case "macosx": signedEntitlements = [ - "com.apple.security.get-task-allow": 1, + "com.apple.security.get-task-allow": 1 ] simulatedEntitlements = [:] default: if settings.platform?.isSimulator == true { // For a simulator platform, the signed entitlements get passed to codesign for the simulator bundle (they're macOS entitlements), while the simulated entitlements get passed to the linker (they're the entitlements for the platform itself, i.e. that would be passed to codesign when building for a device). signedEntitlements = [ - "com.apple.security.get-task-allow": 1, + "com.apple.security.get-task-allow": 1 ] simulatedEntitlements = [ "application-identifier": .plString(appIdentifier), @@ -950,8 +952,7 @@ package final class TaskConstructionTester { appIdentifier ], ] - } - else { + } else { signedEntitlements = [ "application-identifier": .plString(appIdentifier), "com.apple.developer.team-identifier": .plString(teamIdentifierPrefix), @@ -967,7 +968,7 @@ package final class TaskConstructionTester { // Create the provisioning task inputs. let isMacOS = settings.platform?.familyName == "macOS" switch codeSignIdentity { - // FIXME: Remove workaround for projects in ExternalTests data which are still using the deprecated value "Don't Code Sign" for CODE_SIGN_IDENTITY. + // FIXME: Remove workaround for projects in ExternalTests data which are still using the deprecated value "Don't Code Sign" for CODE_SIGN_IDENTITY. case "", "Don't Code Sign": // Don't sign return ProvisioningTaskInputs() @@ -975,20 +976,23 @@ package final class TaskConstructionTester { // Ad-hoc signing - we support all platforms here. return mergeProvisioningTaskInputs( inputs: ProvisioningTaskInputs(identityHash: "-", identityName: "-", signedEntitlements: .plDict(signedEntitlements), simulatedEntitlements: .plDict(simulatedEntitlements)), - overrides: provisioningOverrides) + overrides: provisioningOverrides + ) case "Mac Developer", - "Apple Development" where isMacOS: + "Apple Development" where isMacOS: // Mac developer signing - we never have simulated entitlements here. return mergeProvisioningTaskInputs( inputs: ProvisioningTaskInputs(identityHash: "3ACDE4E702E4", identityName: codeSignIdentity, signedEntitlements: .plDict(signedEntitlements)), - overrides: provisioningOverrides) + overrides: provisioningOverrides + ) case "iOS Developer", "iPhone Developer", - "Apple Development" where !isMacOS: + "Apple Development" where !isMacOS: // iOS developer signing let profileUUID = "8db0e92c-592c-4f06-bfed-9d945841b78d" return mergeProvisioningTaskInputs( inputs: ProvisioningTaskInputs(identityHash: "105DE4E702E4", identityName: codeSignIdentity, profileName: "iOS Team Provisioning Profile: *", profileUUID: profileUUID, profilePath: workspaceContext.userInfo!.home.join("Library/MobileDevice/Provisioning Profiles/\(profileUUID).mobileprovision"), signedEntitlements: .plDict(signedEntitlements), simulatedEntitlements: .plDict(simulatedEntitlements), appIdentifierPrefix: "\(appIdentifierPrefix).", teamIdentifierPrefix: "\(teamIdentifierPrefix).", isEnterpriseTeam: false), - overrides: provisioningOverrides) + overrides: provisioningOverrides + ) default: // We have no default inputs for this identity. fatalError("unsupported CODE_SIGN_IDENTITY '\(codeSignIdentity)' - no default provisioning task inputs available for this identity") @@ -997,10 +1001,10 @@ package final class TaskConstructionTester { } @available(*, unavailable) -extension TaskConstructionTester: Sendable { } +extension TaskConstructionTester: Sendable {} @available(*, unavailable) -extension TaskConstructionTester.PlanningResults: Sendable { } +extension TaskConstructionTester.PlanningResults: Sendable {} extension TaskConstructionTester { /// Construct the tasks for an index build operation, and test the result. @@ -1024,7 +1028,6 @@ extension TaskConstructionTester { } } - package final class TestHeadermapContents { package let rawBytes: [UInt8] package private(set) var contents = [String: String]() @@ -1061,7 +1064,7 @@ package final class TestHeadermapContents { } @available(*, unavailable) -extension TestHeadermapContents: Sendable { } +extension TestHeadermapContents: Sendable {} /// Helper protocol to share command-line checking functions /// between PlannedTaskBuilder and MockTestTask. @@ -1330,8 +1333,7 @@ extension EnvironmentBindings { } else { Issue.record("environment value '\(key)' should be defined but is nil", sourceLocation: sourceLocation) } - } - else { + } else { #expect(valueOpt == nil, "environment value '\(key)' should be nil but is '\(valueOpt!)'", sourceLocation: sourceLocation) } } diff --git a/Sources/SWBTestSupport/TaskExecutionTestSupport.swift b/Sources/SWBTestSupport/TaskExecutionTestSupport.swift index 555f47db..fb00c4e3 100644 --- a/Sources/SWBTestSupport/TaskExecutionTestSupport.swift +++ b/Sources/SWBTestSupport/TaskExecutionTestSupport.swift @@ -27,7 +27,7 @@ package final class MockTestBuildDescriptionConstructionDelegate: BuildDescripti .init(diagnosticsEngines.withLock { $0.getOrInsert(target, { DiagnosticsEngine() }) }) } - package var diagnostics: [ConfiguredTarget? : [Diagnostic]] { + package var diagnostics: [ConfiguredTarget?: [Diagnostic]] { diagnosticsEngines.withLock { $0.mapValues { $0.diagnostics } } } @@ -46,10 +46,10 @@ package final class MockTestBuildDescriptionConstructionDelegate: BuildDescripti package func updateProgress(statusMessage: String, showInLog: Bool) {} package func beginActivity(ruleInfo: String, executionDescription: String, signature: ByteString, target: ConfiguredTarget?, parentActivity: ActivityID?) -> ActivityID { .init(rawValue: -1) } - package func endActivity(id: ActivityID, signature: ByteString, status: BuildOperationTaskEnded.Status) { } - package func emit(data: [UInt8], for activity: ActivityID, signature: ByteString) { } + package func endActivity(id: ActivityID, signature: ByteString, status: BuildOperationTaskEnded.Status) {} + package func emit(data: [UInt8], for activity: ActivityID, signature: ByteString) {} package func emit(diagnostic: Diagnostic, for activity: ActivityID, signature: ByteString) { - diagnosticsEngine(for: nil).emit(diagnostic) // FIXME: Technically this should be a "global task" diagnostic + diagnosticsEngine(for: nil).emit(diagnostic) // FIXME: Technically this should be a "global task" diagnostic } package func emit(_ diagnostic: Diagnostic) { @@ -94,7 +94,7 @@ package struct TestManifest: Sendable { extension BuildDescription { /// Convenience testing method which omits the `capturedBuildInfo:` parameter. - static package func construct(workspace: Workspace, tasks: [any PlannedTask], path: Path, signature: BuildDescriptionSignature, buildCommand: BuildCommand, diagnostics: [ConfiguredTarget?: [Diagnostic]] = [:], indexingInfo: [(forTarget: ConfiguredTarget?, path: Path, indexingInfo: any SourceFileIndexingInfo)] = [], fs: any FSProxy = localFS, bypassActualTasks: Bool = false, moduleSessionFilePath: Path? = nil, invalidationPaths: [Path] = [], recursiveSearchPathResults: [RecursiveSearchPathResolver.CachedResult] = [], copiedPathMap: [String: String] = [:], rootPathsPerTarget: [ConfiguredTarget:[Path]] = [:], moduleCachePathsPerTarget: [ConfiguredTarget: [Path]] = [:], artifactInfoPerTarget: [ConfiguredTarget: ArtifactInfo] = [:], casValidationInfos: [BuildDescription.CASValidationInfo] = [], staleFileRemovalIdentifierPerTarget: [ConfiguredTarget: String] = [:], settingsPerTarget: [ConfiguredTarget: Settings] = [:], delegate: any BuildDescriptionConstructionDelegate, targetDependencies: [TargetDependencyRelationship] = [], definingTargetsByModuleName: [String: OrderedSet] = [:]) async throws -> BuildDescription? { + static package func construct(workspace: Workspace, tasks: [any PlannedTask], path: Path, signature: BuildDescriptionSignature, buildCommand: BuildCommand, diagnostics: [ConfiguredTarget?: [Diagnostic]] = [:], indexingInfo: [(forTarget: ConfiguredTarget?, path: Path, indexingInfo: any SourceFileIndexingInfo)] = [], fs: any FSProxy = localFS, bypassActualTasks: Bool = false, moduleSessionFilePath: Path? = nil, invalidationPaths: [Path] = [], recursiveSearchPathResults: [RecursiveSearchPathResolver.CachedResult] = [], copiedPathMap: [String: String] = [:], rootPathsPerTarget: [ConfiguredTarget: [Path]] = [:], moduleCachePathsPerTarget: [ConfiguredTarget: [Path]] = [:], artifactInfoPerTarget: [ConfiguredTarget: ArtifactInfo] = [:], casValidationInfos: [BuildDescription.CASValidationInfo] = [], staleFileRemovalIdentifierPerTarget: [ConfiguredTarget: String] = [:], settingsPerTarget: [ConfiguredTarget: Settings] = [:], delegate: any BuildDescriptionConstructionDelegate, targetDependencies: [TargetDependencyRelationship] = [], definingTargetsByModuleName: [String: OrderedSet] = [:]) async throws -> BuildDescription? { return try await construct(workspace: workspace, tasks: tasks, path: path, signature: signature, buildCommand: buildCommand, diagnostics: diagnostics, indexingInfo: indexingInfo, fs: fs, bypassActualTasks: bypassActualTasks, moduleSessionFilePath: moduleSessionFilePath, invalidationPaths: invalidationPaths, recursiveSearchPathResults: recursiveSearchPathResults, copiedPathMap: copiedPathMap, rootPathsPerTarget: rootPathsPerTarget, moduleCachePathsPerTarget: moduleCachePathsPerTarget, artifactInfoPerTarget: artifactInfoPerTarget, casValidationInfos: casValidationInfos, staleFileRemovalIdentifierPerTarget: staleFileRemovalIdentifierPerTarget, settingsPerTarget: settingsPerTarget, delegate: delegate, targetDependencies: targetDependencies, definingTargetsByModuleName: definingTargetsByModuleName, userPreferences: .defaultForTesting) } } diff --git a/Sources/SWBTestSupport/TaskPlanningTestSupport.swift b/Sources/SWBTestSupport/TaskPlanningTestSupport.swift index dc2c6988..b500aff2 100644 --- a/Sources/SWBTestSupport/TaskPlanningTestSupport.swift +++ b/Sources/SWBTestSupport/TaskPlanningTestSupport.swift @@ -197,7 +197,7 @@ open class MockTestTaskPlanningClientDelegate: TaskPlanningClientDelegate, @unch switch commandLine.first.map(Path.init)?.basenameWithoutSuffix { case "actool" where args == ["--version", "--output-format", "xml1"]: return .deferred - case "cat": // docc + case "cat": // docc return .deferred case "clang" where args.first == "-v": return .deferred @@ -247,9 +247,11 @@ package class TestTaskPlanningDelegate: TaskPlanningDelegate, @unchecked Sendabl package let diagnosticContext: DiagnosticContextData package func diagnosticsEngine(for target: ConfiguredTarget?) -> DiagnosticProducingDelegateProtocolPrivate { - .init(_diagnosticsEngines.withLock { diagnosticsEngines in - diagnosticsEngines.getOrInsert(target, { DiagnosticsEngine() }) - }) + .init( + _diagnosticsEngines.withLock { diagnosticsEngines in + diagnosticsEngines.getOrInsert(target, { DiagnosticsEngine() }) + } + ) } var diagnostics: [ConfiguredTarget?: [Diagnostic]] { @@ -277,7 +279,7 @@ package class TestTaskPlanningDelegate: TaskPlanningDelegate, @unchecked Sendabl } package var cancelled: Bool { return false } - package func updateProgress(statusMessage: String, showInLog: Bool) { } + package func updateProgress(statusMessage: String, showInLog: Bool) {} package func createVirtualNode(_ name: String) -> PlannedVirtualNode { return MakePlannedVirtualNode(name) @@ -504,26 +506,26 @@ package final class CancellingTaskPlanningDelegate: TestTaskPlanningDelegate, @u } package override var cancelled: Bool { - return (queue.blocking_sync{ numNodesSeen }) > afterNodes || (queue.blocking_sync{ numTasksSeen }) > afterTasks + return (queue.blocking_sync { numNodesSeen }) > afterNodes || (queue.blocking_sync { numTasksSeen }) > afterTasks } package override func createVirtualNode(_ name: String) -> PlannedVirtualNode { - queue.blocking_sync{ numNodesSeen += 1 } + queue.blocking_sync { numNodesSeen += 1 } return super.createVirtualNode(name) } package override func createDirectoryTreeNode(absolutePath path: Path, excluding: [String]) -> PlannedDirectoryTreeNode { - queue.blocking_sync{ numNodesSeen += 1 } + queue.blocking_sync { numNodesSeen += 1 } return super.createDirectoryTreeNode(absolutePath: path, excluding: excluding) } package override func createNode(absolutePath path: Path) -> PlannedPathNode { - queue.blocking_sync{ numNodesSeen += 1 } + queue.blocking_sync { numNodesSeen += 1 } return super.createNode(absolutePath: path) } package override func createTask(_ builder: inout PlannedTaskBuilder) -> any PlannedTask { - queue.blocking_sync{ numTasksSeen += 1 } + queue.blocking_sync { numTasksSeen += 1 } return super.createTask(&builder) } } diff --git a/Sources/SWBTestSupport/TasksCheckingResult.swift b/Sources/SWBTestSupport/TasksCheckingResult.swift index d53387ae..2e2a6835 100644 --- a/Sources/SWBTestSupport/TasksCheckingResult.swift +++ b/Sources/SWBTestSupport/TasksCheckingResult.swift @@ -123,7 +123,7 @@ extension TasksCheckingResult { // Run the matcher. return try body(task) } else { - return try #require(nil) // findOneMatchingTask has already emitted a failure message with a nice error + return try #require(nil) // findOneMatchingTask has already emitted a failure message with a nice error } } @@ -220,12 +220,10 @@ extension CommandLineCheckable { // Report that we couldn't find this string. if let lastStringFound = lastStringFound { return "couldn't find string '\(string)' after string '\(lastStringFound)' in command line: \(commandLineString)" - } - else { + } else { return "couldn't find string '\(string)' in command line: \(commandLineString)" } - } - else { + } else { // If we found a match, then remember the last string found and advance the startSearchIdx to search for the next string. lastStringFound = string startSearchIdx = curIdx @@ -271,15 +269,17 @@ extension CommandLineCheckable { } package func checkCommandLineMatches(_ patterns: [StringPattern], sourceLocation: SourceLocation = #_sourceLocation) { - let directlyComparable = _commandLineAsStrings.count == patterns.count && !patterns.contains(where: { - switch $0 { + let directlyComparable = + _commandLineAsStrings.count == patterns.count + && !patterns.contains(where: { + switch $0 { // These cases never matches individual items, they are just used for matching string lists. - case .start, .end, .anySequence: - return true - case .any, .contains, .equal, .regex, .prefix, .suffix, .and, .or, .not, .pathEqual: - return false - } - }) + case .start, .end, .anySequence: + return true + case .any, .contains, .equal, .regex, .prefix, .suffix, .and, .or, .not, .pathEqual: + return false + } + }) let matchMessage: String? if directlyComparable { var messageComponents = _commandLineAsStrings.enumerated().compactMap { index, value in diff --git a/Sources/SWBTestSupport/TestWorkspaces.swift b/Sources/SWBTestSupport/TestWorkspaces.swift index da442f11..b09aa49c 100644 --- a/Sources/SWBTestSupport/TestWorkspaces.swift +++ b/Sources/SWBTestSupport/TestWorkspaces.swift @@ -74,8 +74,7 @@ private protocol TestInternalObjectItem: TestInternalItem { func toObject(_ resolver: any Resolver) throws -> PropertyListItem } - -package protocol TestStructureItem { } +package protocol TestStructureItem {} private protocol TestInternalStructureItem: TestInternalItem, TestStructureItem, Sendable { func toProtocol(_ resolver: any Resolver) throws -> SWBProtocol.GroupTreeReference } @@ -115,7 +114,7 @@ extension TestTarget { package enum TestSourceTree: Equatable, Sendable { case absolute case groupRelative - case buildSetting(String) // FIXME: This should be a MacroExpressionSource. + case buildSetting(String) // FIXME: This should be a MacroExpressionSource. } extension TestSourceTree { @@ -366,7 +365,7 @@ package final class TestGroup: TestInternalItem, TestInternalStructureItem, Cust fileprivate func toProtocol(_ resolver: any Resolver, isRoot: Bool) throws -> SWBProtocol.FileGroup { let sourceTree = self.sourceTree ?? (isRoot ? .buildSetting("PROJECT_DIR") : .groupRelative) - return try SWBProtocol.FileGroup(guid: guid, sourceTree: sourceTree.toProtocol(), path: .string(path ?? (sourceTree == .buildSetting("PROJECT_DIR") ? "" : name)), name: name, children: children.map{ try $0.toProtocol(resolver) }) + return try SWBProtocol.FileGroup(guid: guid, sourceTree: sourceTree.toProtocol(), path: .string(path ?? (sourceTree == .buildSetting("PROJECT_DIR") ? "" : name)), name: name, children: children.map { try $0.toProtocol(resolver) }) } package var description: String { @@ -386,7 +385,7 @@ package final class TestVariantGroup: TestInternalItem, TestInternalStructureIte } fileprivate func toProtocol(_ resolver: any Resolver) throws -> SWBProtocol.GroupTreeReference { - return try SWBProtocol.VariantGroup(guid: guid, sourceTree: .groupRelative, path: .string(""), name: name, children: children.map{ try $0.toProtocol(resolver) }) + return try SWBProtocol.VariantGroup(guid: guid, sourceTree: .groupRelative, path: .string(""), name: name, children: children.map { try $0.toProtocol(resolver) }) } package var description: String { @@ -411,7 +410,7 @@ package final class TestVersionGroup: TestInternalItem, TestInternalStructureIte fileprivate func toProtocol(_ resolver: any Resolver) throws -> SWBProtocol.GroupTreeReference { let sourceTree = self.sourceTree ?? .groupRelative - return try SWBProtocol.VersionGroup(guid: guid, sourceTree: .groupRelative, path: .string(path ?? (sourceTree == .buildSetting("PROJECT_DIR") ? "" : name)), children: children.map{ try $0.toProtocol(resolver) }) + return try SWBProtocol.VersionGroup(guid: guid, sourceTree: .groupRelative, path: .string(path ?? (sourceTree == .buildSetting("PROJECT_DIR") ? "" : name)), children: children.map { try $0.toProtocol(resolver) }) } package var description: String { @@ -523,7 +522,7 @@ package final class TestBuildFile: TestInternalItem, Sendable { case let .namedReference(name, fileTypeIdentifier): buildableItemGUID = .namedReference(name: name, fileTypeIdentifier: fileTypeIdentifier) } - return SWBProtocol.BuildFile(guid: guid, buildableItemGUID: buildableItemGUID, additionalArgs: additionalArgs.map{ .stringList($0) }, decompress: decompress, headerVisibility: headerVisibility?.toProtocol(), migCodegenFiles: migCodegenFiles?.toProtocol(), intentsCodegenVisibility: intentsCodegenVisibility, resourceRule: resourceRule.toProtocol(), codeSignOnCopy: codeSignOnCopy ?? false, removeHeadersOnCopy: removeHeadersOnCopy ?? false, shouldLinkWeakly: shouldLinkWeakly ?? false, assetTags: assetTags, platformFilters: platformFilters, shouldWarnIfNoRuleToProcess: shouldWarnIfNoRuleToProcess) + return SWBProtocol.BuildFile(guid: guid, buildableItemGUID: buildableItemGUID, additionalArgs: additionalArgs.map { .stringList($0) }, decompress: decompress, headerVisibility: headerVisibility?.toProtocol(), migCodegenFiles: migCodegenFiles?.toProtocol(), intentsCodegenVisibility: intentsCodegenVisibility, resourceRule: resourceRule.toProtocol(), codeSignOnCopy: codeSignOnCopy ?? false, removeHeadersOnCopy: removeHeadersOnCopy ?? false, shouldLinkWeakly: shouldLinkWeakly ?? false, assetTags: assetTags, platformFilters: platformFilters, shouldWarnIfNoRuleToProcess: shouldWarnIfNoRuleToProcess) } } @@ -571,7 +570,7 @@ extension TestBuildFile: ExpressibleByStringLiteral { } } -package protocol TestBuildPhase { } +package protocol TestBuildPhase {} fileprivate protocol TestInternalBuildPhase: TestInternalItem, TestBuildPhase, Sendable { func toProtocol(_ resolver: any Resolver) throws -> SWBProtocol.BuildPhase } @@ -622,7 +621,8 @@ package final class TestCopyFilesBuildPhase: TestInternalBuildPhase { buildFiles: buildFiles.map { try $0.toProtocol(resolver) }, destinationSubfolder: .string(destinationSubfolder), destinationSubpath: .string(destinationSubpath), - runOnlyForDeploymentPostprocessing: onlyForDeployment) + runOnlyForDeploymentPostprocessing: onlyForDeployment + ) } } @@ -636,7 +636,7 @@ package final class TestAppleScriptBuildPhase: TestInternalBuildPhase { } fileprivate func toProtocol(_ resolver: any Resolver) throws -> SWBProtocol.BuildPhase { - return try SWBProtocol.AppleScriptBuildPhase(guid: guid, buildFiles: buildFiles.map{ try $0.toProtocol(resolver) }) + return try SWBProtocol.AppleScriptBuildPhase(guid: guid, buildFiles: buildFiles.map { try $0.toProtocol(resolver) }) } } @@ -650,7 +650,7 @@ package final class TestFrameworksBuildPhase: TestInternalBuildPhase { } fileprivate func toProtocol(_ resolver: any Resolver) throws -> SWBProtocol.BuildPhase { - return try SWBProtocol.FrameworksBuildPhase(guid: guid, buildFiles: buildFiles.map{ try $0.toProtocol(resolver) }) + return try SWBProtocol.FrameworksBuildPhase(guid: guid, buildFiles: buildFiles.map { try $0.toProtocol(resolver) }) } } @@ -664,7 +664,7 @@ package final class TestHeadersBuildPhase: TestInternalBuildPhase { } fileprivate func toProtocol(_ resolver: any Resolver) throws -> SWBProtocol.BuildPhase { - return try SWBProtocol.HeadersBuildPhase(guid: guid, buildFiles: buildFiles.map{ try $0.toProtocol(resolver) }) + return try SWBProtocol.HeadersBuildPhase(guid: guid, buildFiles: buildFiles.map { try $0.toProtocol(resolver) }) } } package final class TestShellScriptBuildPhase: TestInternalBuildPhase { @@ -709,10 +709,10 @@ package final class TestShellScriptBuildPhase: TestInternalBuildPhase { } fileprivate func toProtocol(_ resolver: any Resolver) -> SWBProtocol.BuildPhase { - let inputs = self.inputs.map{ MacroExpressionSource.string($0) } - let outputs = self.outputs.map{ MacroExpressionSource.string($0) } - let inputFileLists = self.inputFileLists.map{ MacroExpressionSource.string($0) } - let outputFileLists = self.outputFileLists.map{ MacroExpressionSource.string($0) } + let inputs = self.inputs.map { MacroExpressionSource.string($0) } + let outputs = self.outputs.map { MacroExpressionSource.string($0) } + let inputFileLists = self.inputFileLists.map { MacroExpressionSource.string($0) } + let outputFileLists = self.outputFileLists.map { MacroExpressionSource.string($0) } return SWBProtocol.ShellScriptBuildPhase( guid: guid, name: name, @@ -744,7 +744,7 @@ package final class TestResourcesBuildPhase: TestInternalBuildPhase { } fileprivate func toProtocol(_ resolver: any Resolver) throws -> SWBProtocol.BuildPhase { - return try SWBProtocol.ResourcesBuildPhase(guid: guid, buildFiles: buildFiles.map{ try $0.toProtocol(resolver) }) + return try SWBProtocol.ResourcesBuildPhase(guid: guid, buildFiles: buildFiles.map { try $0.toProtocol(resolver) }) } } @@ -758,7 +758,7 @@ package final class TestRezBuildPhase: TestInternalBuildPhase { } fileprivate func toProtocol(_ resolver: any Resolver) throws -> SWBProtocol.BuildPhase { - return try SWBProtocol.RezBuildPhase(guid: guid, buildFiles: buildFiles.map{ try $0.toProtocol(resolver) }) + return try SWBProtocol.RezBuildPhase(guid: guid, buildFiles: buildFiles.map { try $0.toProtocol(resolver) }) } } @@ -772,7 +772,7 @@ package final class TestSourcesBuildPhase: TestInternalBuildPhase { } fileprivate func toProtocol(_ resolver: any Resolver) throws -> SWBProtocol.BuildPhase { - return try SWBProtocol.SourcesBuildPhase(guid: guid, buildFiles: buildFiles.map{ try $0.toProtocol(resolver) }) + return try SWBProtocol.SourcesBuildPhase(guid: guid, buildFiles: buildFiles.map { try $0.toProtocol(resolver) }) } } @@ -811,15 +811,15 @@ package final class TestBuildRule: TestInternalItem, Sendable { self.name = name self.inputSpecifier = inputSpecifier - let outputs = outputs.enumerated().map{ (entry) -> SWBProtocol.BuildRule.ShellScriptOutputInfo in - let (i, output) = entry - if i < outputFilesCompilerFlags.count { - return .init(path: .string(output), additionalCompilerFlags: .stringList(outputFilesCompilerFlags[i])) - } else { - return .init(path: .string(output), additionalCompilerFlags: nil) - } + let outputs = outputs.enumerated().map { (entry) -> SWBProtocol.BuildRule.ShellScriptOutputInfo in + let (i, output) = entry + if i < outputFilesCompilerFlags.count { + return .init(path: .string(output), additionalCompilerFlags: .stringList(outputFilesCompilerFlags[i])) + } else { + return .init(path: .string(output), additionalCompilerFlags: nil) } - self.actionSpecifier = .shellScript(contents: script, inputs: inputs.map{ .string($0) }, inputFileLists: inputFileLists.map { .string($0) }, outputs: outputs, outputFileLists: outputFileLists.map { .string($0) }, dependencyInfo: dependencyInfo, runOncePerArchitecture: runOncePerArchitecture ?? true) + } + self.actionSpecifier = .shellScript(contents: script, inputs: inputs.map { .string($0) }, inputFileLists: inputFileLists.map { .string($0) }, outputs: outputs, outputFileLists: outputFileLists.map { .string($0) }, dependencyInfo: dependencyInfo, runOncePerArchitecture: runOncePerArchitecture ?? true) } fileprivate func toProtocol(_ resolver: any Resolver) -> SWBProtocol.BuildRule { @@ -837,7 +837,7 @@ package final class TestCustomTask: Sendable { package let enableSandboxing: Bool package let preparesForIndexing: Bool - package init(commandLine: [String], environment: [String : String], workingDirectory: String, executionDescription: String, inputs: [String], outputs: [String], enableSandboxing: Bool, preparesForIndexing: Bool) { + package init(commandLine: [String], environment: [String: String], workingDirectory: String, executionDescription: String, inputs: [String], outputs: [String], enableSandboxing: Bool, preparesForIndexing: Bool) { self.commandLine = commandLine self.environment = environment self.workingDirectory = workingDirectory @@ -904,7 +904,7 @@ extension TestInternalTarget { return .plDict([ "signature": .plString(signature), "type": .plString("target"), - "contents": .plDict(["data": .plArray(serializer.byteString.bytes.map { .plInt(Int($0)) })]) + "contents": .plDict(["data": .plArray(serializer.byteString.bytes.map { .plInt(Int($0)) })]), ]) } } @@ -1022,18 +1022,18 @@ package final class TestStandardTarget: TestInternalTarget, Sendable { func computeProductReferenceName(_ name: String) -> String { switch self { case .application, - .watchKit1App, - .watchKitApp, - .watchKitAppContainer, - .messagesApp, - .appClip: + .watchKit1App, + .watchKitApp, + .watchKitAppContainer, + .messagesApp, + .appClip: return "\(name).app" case .commandLineTool, - .hostBuildTool, - .swiftpmTestRunner: + .hostBuildTool, + .swiftpmTestRunner: return "\(name)" case .framework, - .staticFramework: + .staticFramework: return "\(name).framework" case .staticLibrary: return "lib\(name).a" @@ -1052,11 +1052,11 @@ package final class TestStandardTarget: TestInternalTarget, Sendable { case .xpcService: return "\(name).xpc" case .applicationExtension, - .extensionKitExtension, - .xcodeExtension, - .watchKitExtension, - .messagesExtension, - .messagesStickerPackExtension: + .extensionKitExtension, + .xcodeExtension, + .watchKitExtension, + .messagesExtension, + .messagesStickerPackExtension: return "\(name).appex" case .unitTest, .uiTest, .multiDeviceUITest: return "\(name).xctest" @@ -1100,24 +1100,26 @@ package final class TestStandardTarget: TestInternalTarget, Sendable { self.buildRules = buildRules self.customTasks = customTasks self.dependencies = dependencies - self.explicitProductReferenceName = productReferenceName ?? { - // Try to correctly determine the product reference if not specified explicitly - let productNames = Set((buildConfigurations ?? []).compactMap { $0.buildSettings["PRODUCT_NAME"] }) - if productNames.count > 1 { - preconditionFailure("productReferenceName must be explicitly set for this target because it cannot be determined automatically in this context") - } + self.explicitProductReferenceName = + productReferenceName + ?? { + // Try to correctly determine the product reference if not specified explicitly + let productNames = Set((buildConfigurations ?? []).compactMap { $0.buildSettings["PRODUCT_NAME"] }) + if productNames.count > 1 { + preconditionFailure("productReferenceName must be explicitly set for this target because it cannot be determined automatically in this context") + } - // Just return nil; we'll end up using the target name - if productNames.first == "$(TARGET_NAME)" { - return nil - } + // Just return nil; we'll end up using the target name + if productNames.first == "$(TARGET_NAME)" { + return nil + } - if productNames.first?.contains("$") == true { - preconditionFailure("productReferenceName must be explicitly set for this target because it cannot be determined automatically in this context (build setting references are not evaluated here)") - } + if productNames.first?.contains("$") == true { + preconditionFailure("productReferenceName must be explicitly set for this target because it cannot be determined automatically in this context (build setting references are not evaluated here)") + } - return productNames.first.map { type.computeProductReferenceName($0) } - }() + return productNames.first.map { type.computeProductReferenceName($0) } + }() self.predominantSourceCodeLanguage = predominantSourceCodeLanguage self.provisioningSourceData = provisioningSourceData self.dynamicTargetVariantName = dynamicTargetVariantName @@ -1135,7 +1137,7 @@ package final class TestStandardTarget: TestInternalTarget, Sendable { fileprivate func toProtocol(_ resolver: any Resolver) throws -> SWBProtocol.Target { let ref = SWBProtocol.ProductReference(guid: productReferenceGUID, name: productReferenceName) let performanceTestsBaselinePath = try (type == .unitTest) ? resolver.findProject(for: self).getPath(resolver).join("xcshareddata/xcbaselines").join("\(guid).xcbaseline") : nil - return try SWBProtocol.StandardTarget(guid: guid, name: name, buildConfigurations: buildConfigurations.map{ try $0.toProtocol(resolver) }, customTasks: customTasks.map { $0.toProtocol(resolver) }, dependencies: dependencies.map{ $0.toProtocol(resolver) }, buildPhases: buildPhases.map{ try $0.toProtocol(resolver) }, buildRules: buildRules.map{ $0.toProtocol(resolver) }, productTypeIdentifier: type.productTypeIdentifier, productReference: ref, performanceTestsBaselinesPath: performanceTestsBaselinePath?.str, predominantSourceCodeLanguage: predominantSourceCodeLanguage.description, provisioningSourceData: provisioningSourceData, dynamicTargetVariantGuid: dynamicTargetVariantName?.nilIfEmpty.map(resolver.findTarget)??.guid, approvedByUser: approvedByUser) + return try SWBProtocol.StandardTarget(guid: guid, name: name, buildConfigurations: buildConfigurations.map { try $0.toProtocol(resolver) }, customTasks: customTasks.map { $0.toProtocol(resolver) }, dependencies: dependencies.map { $0.toProtocol(resolver) }, buildPhases: buildPhases.map { try $0.toProtocol(resolver) }, buildRules: buildRules.map { $0.toProtocol(resolver) }, productTypeIdentifier: type.productTypeIdentifier, productReference: ref, performanceTestsBaselinesPath: performanceTestsBaselinePath?.str, predominantSourceCodeLanguage: predominantSourceCodeLanguage.description, provisioningSourceData: provisioningSourceData, dynamicTargetVariantGuid: dynamicTargetVariantName?.nilIfEmpty.map(resolver.findTarget)??.guid, approvedByUser: approvedByUser) } } @@ -1167,7 +1169,7 @@ package final class TestAggregateTarget: TestInternalTarget { fileprivate func toProtocol(_ resolver: any Resolver) throws -> SWBProtocol.Target { let deps = dependencies.map { SWBProtocol.TargetDependency(guid: resolver.findTarget($0)?.guid ?? $0, name: $0) } - return try SWBProtocol.AggregateTarget(guid: guid, name: name, buildConfigurations: buildConfigurations.map{ try $0.toProtocol(resolver) }, customTasks: customTasks.map { $0.toProtocol(resolver) }, dependencies: deps, buildPhases: buildPhases.map{ try $0.toProtocol(resolver) }) + return try SWBProtocol.AggregateTarget(guid: guid, name: name, buildConfigurations: buildConfigurations.map { try $0.toProtocol(resolver) }, customTasks: customTasks.map { $0.toProtocol(resolver) }, dependencies: deps, buildPhases: buildPhases.map { try $0.toProtocol(resolver) }) } } @@ -1206,7 +1208,7 @@ package final class TestExternalTarget: TestInternalTarget { } fileprivate func toProtocol(_ resolver: any Resolver) throws -> SWBProtocol.Target { - return try SWBProtocol.ExternalTarget(guid: actualGUID, name: name, buildConfigurations: buildConfigurations.map{ try $0.toProtocol(resolver) }, customTasks: customTasks.map { $0.toProtocol(resolver) }, dependencies: dependencies.map { TargetDependency(guid: resolver.findTarget($0)?.guid ?? $0, name: $0) }, toolPath: .string(toolPath), arguments: .string(arguments), workingDirectory: .string(workingDirectory), passBuildSettingsInEnvironment: passBuildSettingsInEnvironment ?? true) + return try SWBProtocol.ExternalTarget(guid: actualGUID, name: name, buildConfigurations: buildConfigurations.map { try $0.toProtocol(resolver) }, customTasks: customTasks.map { $0.toProtocol(resolver) }, dependencies: dependencies.map { TargetDependency(guid: resolver.findTarget($0)?.guid ?? $0, name: $0) }, toolPath: .string(toolPath), arguments: .string(arguments), workingDirectory: .string(workingDirectory), passBuildSettingsInEnvironment: passBuildSettingsInEnvironment ?? true) } } @@ -1240,7 +1242,7 @@ package final class TestPackageProductTarget: TestInternalTarget { fileprivate func toProtocol(_ resolver: any Resolver) throws -> SWBProtocol.Target { let deps = dependencies.map { SWBProtocol.TargetDependency(guid: resolver.findTarget($0)?.guid ?? $0, name: $0) } - return try SWBProtocol.PackageProductTarget(guid: guid, name: name, buildConfigurations: buildConfigurations.map{ try $0.toProtocol(resolver) }, customTasks: customTasks.map { $0.toProtocol(resolver) }, dependencies: deps, frameworksBuildPhase: frameworksBuildPhase.toProtocol(resolver) as! SWBProtocol.FrameworksBuildPhase, dynamicTargetVariantGuid: dynamicTargetVariantName?.nilIfEmpty.map(resolver.findTarget)??.guid, approvedByUser: approvedByUser) + return try SWBProtocol.PackageProductTarget(guid: guid, name: name, buildConfigurations: buildConfigurations.map { try $0.toProtocol(resolver) }, customTasks: customTasks.map { $0.toProtocol(resolver) }, dependencies: deps, frameworksBuildPhase: frameworksBuildPhase.toProtocol(resolver) as! SWBProtocol.FrameworksBuildPhase, dynamicTargetVariantGuid: dynamicTargetVariantName?.nilIfEmpty.map(resolver.findTarget)??.guid, approvedByUser: approvedByUser) } } @@ -1261,8 +1263,8 @@ package final class TestBuildConfiguration: TestInternalItem, Sendable { } fileprivate func toProtocol(_ resolver: any Resolver) throws -> SWBProtocol.BuildConfiguration { - let baseConfigGUID = try self.baseConfig.map{ try resolver.findFile($0) } - return SWBProtocol.BuildConfiguration(name: name, buildSettings: buildSettings.map{ .init(key: $0.0, value: .string($0.1)) }, baseConfigurationFileReferenceGUID: baseConfigGUID, impartedBuildProperties: impartedBuildProperties.toProtocol(resolver)) + let baseConfigGUID = try self.baseConfig.map { try resolver.findFile($0) } + return SWBProtocol.BuildConfiguration(name: name, buildSettings: buildSettings.map { .init(key: $0.0, value: .string($0.1)) }, baseConfigurationFileReferenceGUID: baseConfigGUID, impartedBuildProperties: impartedBuildProperties.toProtocol(resolver)) } } @@ -1276,7 +1278,7 @@ package final class TestImpartedBuildProperties: TestInternalItem, Sendable { } fileprivate func toProtocol(_ resolver: any Resolver) -> SWBProtocol.ImpartedBuildProperties { - return SWBProtocol.ImpartedBuildProperties(buildSettings: buildSettings.map{ .init(key: $0.0, value: .string($0.1)) }) + return SWBProtocol.ImpartedBuildProperties(buildSettings: buildSettings.map { .init(key: $0.0, value: .string($0.1)) }) } } @@ -1328,13 +1330,13 @@ package class TestProject: TestInternalObjectItem, @unchecked Sendable { return .plDict([ "signature": .plString(signature), "type": .plString("project"), - "contents": .plDict(["data": .plArray(serializer.byteString.bytes.map { .plInt(Int($0)) })]) + "contents": .plDict(["data": .plArray(serializer.byteString.bytes.map { .plInt(Int($0)) })]), ]) } fileprivate func toProtocol(_ resolver: any Resolver) throws -> SWBProtocol.Project { let path = getPath(resolver) - return try SWBProtocol.Project(guid: guid, isPackage: isPackage, xcodeprojPath: path, sourceRoot: sourceRoot ?? path.dirname, targetSignatures: _targets.map{ $0.signature }, groupTree: groupTree.toProtocol(resolver, isRoot: true), buildConfigurations: buildConfigurations.map{ try $0.toProtocol(resolver) }, defaultConfigurationName: defaultConfigurationName, developmentRegion: developmentRegion, classPrefix: classPrefix, appPreferencesBuildSettings: appPreferencesBuildSettings.map{ .init(key: $0.0, value: .string($0.1)) }) + return try SWBProtocol.Project(guid: guid, isPackage: isPackage, xcodeprojPath: path, sourceRoot: sourceRoot ?? path.dirname, targetSignatures: _targets.map { $0.signature }, groupTree: groupTree.toProtocol(resolver, isRoot: true), buildConfigurations: buildConfigurations.map { try $0.toProtocol(resolver) }, defaultConfigurationName: defaultConfigurationName, developmentRegion: developmentRegion, classPrefix: classPrefix, appPreferencesBuildSettings: appPreferencesBuildSettings.map { .init(key: $0.0, value: .string($0.1)) }) } } @@ -1376,12 +1378,12 @@ package final class TestWorkspace: Resolver, TestInternalItem, Sendable { } /// Load the test workspace into a helper which can provide various derived objects. - package func loadHelper(_ core: Core) throws-> WorkspaceTestHelper { + package func loadHelper(_ core: Core) throws -> WorkspaceTestHelper { return WorkspaceTestHelper(try load(core), core: core) } package func toObjects() throws -> [PropertyListItem] { - return try [toObject(self)] + projects.map{ try $0.toObject(self) } + projects.flatMap{ try $0._targets.map{ try $0.toObject(self) } } + return try [toObject(self)] + projects.map { try $0.toObject(self) } + projects.flatMap { try $0._targets.map { try $0.toObject(self) } } } fileprivate func toObject(_ resolver: any Resolver) -> PropertyListItem { @@ -1390,12 +1392,12 @@ package final class TestWorkspace: Resolver, TestInternalItem, Sendable { return .plDict([ "signature": .plString(signature), "type": .plString("workspace"), - "contents": .plDict(["data": .plArray(serializer.byteString.bytes.map { .plInt(Int($0)) })]) + "contents": .plDict(["data": .plArray(serializer.byteString.bytes.map { .plInt(Int($0)) })]), ]) } fileprivate func toProtocol(_ resolver: any Resolver) -> SWBProtocol.Workspace { - return SWBProtocol.Workspace(guid: guid, name: name, path: sourceRoot.join("\(name).xcworkspace"), projectSignatures: projects.map{ $0.signature }) + return SWBProtocol.Workspace(guid: guid, name: name, path: sourceRoot.join("\(name).xcworkspace"), projectSignatures: projects.map { $0.signature }) } var workspaceName: String { @@ -1562,7 +1564,7 @@ package final class WorkspaceTestHelper: Sendable { self.core = core self.workspace = workspace self.workspaceContext = WorkspaceContext(core: core, workspace: workspace, processExecutionCache: .sharedForTesting) - self.workspaceContext.updateUserInfo(UserInfo(user: "exampleUser", group: "exampleGroup", uid: 1234, gid:12345, home: Path("/Users/exampleUser"), environment: [:])) + self.workspaceContext.updateUserInfo(UserInfo(user: "exampleUser", group: "exampleGroup", uid: 1234, gid: 12345, home: Path("/Users/exampleUser"), environment: [:])) self.workspaceContext.updateSystemInfo(SystemInfo(operatingSystemVersion: Version(99, 98, 97), productBuildVersion: "99A98", nativeArchitecture: "x86_64")) } @@ -1597,7 +1599,8 @@ extension UserPreferences { enableBuildSystemCaching: true, activityTextShorteningLevel: .default, usePerConfigurationBuildLocations: nil, - allowsExternalToolExecution: false) + allowsExternalToolExecution: false + ) package func with( enableDebugActivityLogs: Bool? = nil, diff --git a/Sources/SWBTestSupport/UserDefaultTestTrait.swift b/Sources/SWBTestSupport/UserDefaultTestTrait.swift index 31739a7f..6e27a928 100644 --- a/Sources/SWBTestSupport/UserDefaultTestTrait.swift +++ b/Sources/SWBTestSupport/UserDefaultTestTrait.swift @@ -11,41 +11,41 @@ //===----------------------------------------------------------------------===// #if compiler(<6.1) -public import Testing + public import Testing -extension Trait where Self == Testing.ConditionTrait { - public static func userDefaults(_ userDefaults: [String: String], clean: Bool = false, sourceLocation: SourceLocation = #_sourceLocation) -> Self { - disabled("Custom execution traits are not supported in this build") + extension Trait where Self == Testing.ConditionTrait { + public static func userDefaults(_ userDefaults: [String: String], clean: Bool = false, sourceLocation: SourceLocation = #_sourceLocation) -> Self { + disabled("Custom execution traits are not supported in this build") + } } -} #else -package import Testing -@_spi(Testing) import SWBUtil -import Foundation + package import Testing + @_spi(Testing) import SWBUtil + import Foundation -package struct UserDefaultsTestTrait: TestTrait & SuiteTrait & TestScoping { - let userDefaults: [String: String] - let clean: Bool + package struct UserDefaultsTestTrait: TestTrait & SuiteTrait & TestScoping { + let userDefaults: [String: String] + let clean: Bool - package var isRecursive: Bool { - true - } + package var isRecursive: Bool { + true + } - package func provideScope(for test: Testing.Test, testCase: Testing.Test.Case?, performing function: @Sendable () async throws -> Void) async throws { - if testCase == nil || test.isSuite { - try await function() - } else { - try await UserDefaults.withEnvironment(userDefaults, clean: clean) { + package func provideScope(for test: Testing.Test, testCase: Testing.Test.Case?, performing function: @Sendable () async throws -> Void) async throws { + if testCase == nil || test.isSuite { try await function() + } else { + try await UserDefaults.withEnvironment(userDefaults, clean: clean) { + try await function() + } } } } -} -extension Trait where Self == UserDefaultsTestTrait { - /// Causes a test to be executed while the specified user defaults are applied. - package static func userDefaults(_ userDefaults: [String: String], clean: Bool = false, sourceLocation: SourceLocation = #_sourceLocation) -> Self { - Self(userDefaults: userDefaults, clean: clean) + extension Trait where Self == UserDefaultsTestTrait { + /// Causes a test to be executed while the specified user defaults are applied. + package static func userDefaults(_ userDefaults: [String: String], clean: Bool = false, sourceLocation: SourceLocation = #_sourceLocation) -> Self { + Self(userDefaults: userDefaults, clean: clean) + } } -} #endif diff --git a/Sources/SWBUniversalPlatform/BareMetal.swift b/Sources/SWBUniversalPlatform/BareMetal.swift index 7a30cfea..b572e7d3 100644 --- a/Sources/SWBUniversalPlatform/BareMetal.swift +++ b/Sources/SWBUniversalPlatform/BareMetal.swift @@ -18,15 +18,18 @@ import Foundation struct BareMetalPlatformExtension: PlatformInfoExtension { func additionalPlatforms(context: any PlatformInfoExtensionAdditionalPlatformsContext) throws -> [(path: Path, data: [String: PropertyListItem])] { [ - (.root, [ - "Type": .plString("Platform"), - "Name": .plString("none"), - "Identifier": .plString("none"), - "Description": .plString("Bare Metal"), - "FamilyName": .plString("None"), - "FamilyIdentifier": .plString("none"), - "IsDeploymentPlatform": .plString("YES"), - ]) + ( + .root, + [ + "Type": .plString("Platform"), + "Name": .plString("none"), + "Identifier": .plString("none"), + "Description": .plString("Bare Metal"), + "FamilyName": .plString("None"), + "FamilyIdentifier": .plString("none"), + "IsDeploymentPlatform": .plString("YES"), + ] + ) ] } } @@ -38,24 +41,31 @@ struct BareMetalPlatformExtension: PlatformInfoExtension { } let defaultProperties: [String: PropertyListItem] = [ - "SDK_STAT_CACHE_ENABLE": "NO", + "SDK_STAT_CACHE_ENABLE": "NO" ] - return [(.root, platform, [ - "Type": .plString("SDK"), - "Version": .plString("0.0.0"), - "CanonicalName": .plString("none"), - "IsBaseSDK": .plBool(true), - "DefaultProperties": .plDict([ - "PLATFORM_NAME": .plString("none"), - ].merging(defaultProperties, uniquingKeysWith: { _, new in new })), - "CustomProperties": .plDict([:]), - "SupportedTargets": .plDict([ - "none": .plDict([ - "Archs": .plArray([]), - "LLVMTargetTripleSys": .plString("none"), - ]) - ]), - ])] + return [ + ( + .root, platform, + [ + "Type": .plString("SDK"), + "Version": .plString("0.0.0"), + "CanonicalName": .plString("none"), + "IsBaseSDK": .plBool(true), + "DefaultProperties": .plDict( + [ + "PLATFORM_NAME": .plString("none") + ].merging(defaultProperties, uniquingKeysWith: { _, new in new }) + ), + "CustomProperties": .plDict([:]), + "SupportedTargets": .plDict([ + "none": .plDict([ + "Archs": .plArray([]), + "LLVMTargetTripleSys": .plString("none"), + ]) + ]), + ] + ) + ] } } diff --git a/Sources/SWBUniversalPlatform/CppTool.swift b/Sources/SWBUniversalPlatform/CppTool.swift index 08fc3de7..ba01a8bd 100644 --- a/Sources/SWBUniversalPlatform/CppTool.swift +++ b/Sources/SWBUniversalPlatform/CppTool.swift @@ -12,6 +12,6 @@ import SWBCore -final class CppToolSpec : GenericCommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { +final class CppToolSpec: GenericCommandLineToolSpec, SpecIdentifierType, @unchecked Sendable { static let identifier = "com.apple.compilers.cpp" } diff --git a/Sources/SWBUniversalPlatform/LexCompiler.swift b/Sources/SWBUniversalPlatform/LexCompiler.swift index 36f441e7..4df408f5 100644 --- a/Sources/SWBUniversalPlatform/LexCompiler.swift +++ b/Sources/SWBUniversalPlatform/LexCompiler.swift @@ -14,7 +14,7 @@ import SWBUtil import SWBCore import SWBMacro -final class LexCompilerSpec : CompilerSpec, SpecIdentifierType, @unchecked Sendable { +final class LexCompilerSpec: CompilerSpec, SpecIdentifierType, @unchecked Sendable { static let identifier = "com.apple.compilers.lex" static let extensionMappings = [ @@ -22,7 +22,8 @@ final class LexCompilerSpec : CompilerSpec, SpecIdentifierType, @unchecked Senda ".lmm": ".mm", ".LMM": ".MM", ".lp": ".cp", ".LP": ".CP", ".lpp": ".cpp", ".LPP": ".CPP", - ".lxx": ".cxx", ".LXX": ".CXX"] + ".lxx": ".cxx", ".LXX": ".CXX", + ] override var toolBasenameAliases: [String] { return ["flex"] @@ -36,7 +37,7 @@ final class LexCompilerSpec : CompilerSpec, SpecIdentifierType, @unchecked Senda // Compute the input and output path. let input = cbc.input let inputBasename = input.absolutePath.basename - let (inputPrefix,inputExt) = Path(inputBasename).splitext() + let (inputPrefix, inputExt) = Path(inputBasename).splitext() let outputExt = LexCompilerSpec.extensionMappings[inputExt] ?? ".c" let outputPath = cbc.scope.evaluate(BuiltinMacros.DERIVED_FILE_DIR).join(inputPrefix + ".yy" + outputExt) let lexFlags = cbc.scope.evaluate(BuiltinMacros.LEXFLAGS) diff --git a/Sources/SWBUniversalPlatform/Plugin.swift b/Sources/SWBUniversalPlatform/Plugin.swift index 91b2392e..5086da63 100644 --- a/Sources/SWBUniversalPlatform/Plugin.swift +++ b/Sources/SWBUniversalPlatform/Plugin.swift @@ -39,7 +39,7 @@ struct UniversalPlatformSpecsExtension: SpecificationsExtension { func specificationImplementations() -> [any SpecImplementationType.Type] { [ - DiffToolSpec.self, + DiffToolSpec.self ] } @@ -83,7 +83,7 @@ struct UniversalPlatformTaskProducerExtension: TaskProducerExtension { } struct UniversalPlatformTaskActionExtension: TaskActionExtension { - var taskActionImplementations: [SWBUtil.SerializableTypeCode : any SWBUtil.PolymorphicSerializable.Type] { + var taskActionImplementations: [SWBUtil.SerializableTypeCode: any SWBUtil.PolymorphicSerializable.Type] { [44: TestEntryPointGenerationTaskAction.self] } } diff --git a/Sources/SWBUniversalPlatform/TestEntryPointGenerationTaskAction.swift b/Sources/SWBUniversalPlatform/TestEntryPointGenerationTaskAction.swift index bad72bbd..e64c52c7 100644 --- a/Sources/SWBUniversalPlatform/TestEntryPointGenerationTaskAction.swift +++ b/Sources/SWBUniversalPlatform/TestEntryPointGenerationTaskAction.swift @@ -50,70 +50,75 @@ class TestEntryPointGenerationTaskAction: TaskAction { } } - try executionDelegate.fs.write(options.output, contents: ByteString(encodingAsUTF8: """ - #if canImport(Testing) - import Testing - #endif - - \(testObservationFragment) - - public import XCTest - \(discoveredTestsFragment(tests: tests, options: options)) - - @main - @available(macOS 10.15, iOS 11, watchOS 4, tvOS 11, visionOS 1, *) - @available(*, deprecated, message: "Not actually deprecated. Marked as deprecated to allow inclusion of deprecated tests (which test deprecated functionality) without warnings") - struct Runner { - private static func testingLibrary() -> String { - var iterator = CommandLine.arguments.makeIterator() - while let argument = iterator.next() { - if argument == "--testing-library", let libraryName = iterator.next() { - return libraryName.lowercased() - } - } - - // Fallback if not specified: run XCTest (legacy behavior) - return "xctest" - } - - private static func testOutputPath() -> String? { - var iterator = CommandLine.arguments.makeIterator() - while let argument = iterator.next() { - if argument == "--testing-output-path", let outputPath = iterator.next() { - return outputPath - } - } - return nil - } + try executionDelegate.fs.write( + options.output, + contents: ByteString( + encodingAsUTF8: """ + #if canImport(Testing) + import Testing + #endif + + \(testObservationFragment) + + public import XCTest + \(discoveredTestsFragment(tests: tests, options: options)) + + @main + @available(macOS 10.15, iOS 11, watchOS 4, tvOS 11, visionOS 1, *) + @available(*, deprecated, message: "Not actually deprecated. Marked as deprecated to allow inclusion of deprecated tests (which test deprecated functionality) without warnings") + struct Runner { + private static func testingLibrary() -> String { + var iterator = CommandLine.arguments.makeIterator() + while let argument = iterator.next() { + if argument == "--testing-library", let libraryName = iterator.next() { + return libraryName.lowercased() + } + } + + // Fallback if not specified: run XCTest (legacy behavior) + return "xctest" + } - #if os(Linux) - @_silgen_name("$ss13_runAsyncMainyyyyYaKcF") - private static func _runAsyncMain(_ asyncFun: @Sendable @escaping () async throws -> ()) + private static func testOutputPath() -> String? { + var iterator = CommandLine.arguments.makeIterator() + while let argument = iterator.next() { + if argument == "--testing-output-path", let outputPath = iterator.next() { + return outputPath + } + } + return nil + } - static func main() { - let testingLibrary = Self.testingLibrary() - #if canImport(Testing) - if testingLibrary == "swift-testing" { - _runAsyncMain { - await Testing.__swiftPMEntryPoint() as Never + #if os(Linux) + @_silgen_name("$ss13_runAsyncMainyyyyYaKcF") + private static func _runAsyncMain(_ asyncFun: @Sendable @escaping () async throws -> ()) + + static func main() { + let testingLibrary = Self.testingLibrary() + #if canImport(Testing) + if testingLibrary == "swift-testing" { + _runAsyncMain { + await Testing.__swiftPMEntryPoint() as Never + } + } + #endif + \(xctestFragment(enableExperimentalTestOutput: options.enableExperimentalTestOutput, disable: !options.discoverTests)) + } + #else + static func main() async { + let testingLibrary = Self.testingLibrary() + #if canImport(Testing) + if testingLibrary == "swift-testing" { + await Testing.__swiftPMEntryPoint() as Never + } + #endif + \(xctestFragment(enableExperimentalTestOutput: options.enableExperimentalTestOutput, disable: !options.discoverTests)) + } + #endif } - } - #endif - \(xctestFragment(enableExperimentalTestOutput: options.enableExperimentalTestOutput, disable: !options.discoverTests)) - } - #else - static func main() async { - let testingLibrary = Self.testingLibrary() - #if canImport(Testing) - if testingLibrary == "swift-testing" { - await Testing.__swiftPMEntryPoint() as Never - } - #endif - \(xctestFragment(enableExperimentalTestOutput: options.enableExperimentalTestOutput, disable: !options.discoverTests)) - } - #endif - } - """)) + """ + ) + ) return .succeeded } catch { @@ -142,11 +147,11 @@ class TestEntryPointGenerationTaskAction: TaskAction { fragment += "@testable import \(moduleName)\n" } fragment += """ - @available(*, deprecated, message: "Not actually deprecated. Marked as deprecated to allow inclusion of deprecated tests (which test deprecated functionality) without warnings") - public func __allDiscoveredTests() -> [XCTestCaseEntry] { - return [ + @available(*, deprecated, message: "Not actually deprecated. Marked as deprecated to allow inclusion of deprecated tests (which test deprecated functionality) without warnings") + public func __allDiscoveredTests() -> [XCTestCaseEntry] { + return [ - """ + """ for testClass in tests { let testTuples = testClass.testMethods.map { method in @@ -160,9 +165,9 @@ class TestEntryPointGenerationTaskAction: TaskAction { fragment += " testCase([\(testTuples.joined(separator: ",\n"))]),\n" } fragment += """ - ] - } - """ + ] + } + """ return fragment } @@ -171,17 +176,17 @@ class TestEntryPointGenerationTaskAction: TaskAction { return "" } return """ - if testingLibrary == "xctest" { - #if !os(Windows) && \(enableExperimentalTestOutput) - _ = Self.testOutputPath().map { __SwiftPMXCTestObserver(testOutputPath: testOutputPath) } - #endif - #if os(WASI) - await XCTMain(__allDiscoveredTests()) as Never - #else - XCTMain(__allDiscoveredTests()) as Never - #endif - } - """ + if testingLibrary == "xctest" { + #if !os(Windows) && \(enableExperimentalTestOutput) + _ = Self.testOutputPath().map { __SwiftPMXCTestObserver(testOutputPath: testOutputPath) } + #endif + #if os(WASI) + await XCTMain(__allDiscoveredTests()) as Never + #else + XCTMain(__allDiscoveredTests()) as Never + #endif + } + """ } private var testObservationFragment: String = diff --git a/Sources/SWBUniversalPlatform/TestEntryPointGenerationTool.swift b/Sources/SWBUniversalPlatform/TestEntryPointGenerationTool.swift index ae18b283..decf6034 100644 --- a/Sources/SWBUniversalPlatform/TestEntryPointGenerationTool.swift +++ b/Sources/SWBUniversalPlatform/TestEntryPointGenerationTool.swift @@ -25,7 +25,7 @@ final class TestEntryPointGenerationToolSpec: GenericCommandLineToolSpec, SpecId let format = cbc.scope.evaluate(BuiltinMacros.LINKER_FILE_LIST_FORMAT) args.append(contentsOf: ["--linker-file-list-format", .literal(.init(encodingAsUTF8: format.rawValue))]) - for toolchainLibrarySearchPath in cbc.producer.toolchains.map({ StackedSearchPath(paths: $0.librarySearchPaths.paths + $0.fallbackLibrarySearchPaths.paths, fs: $0.librarySearchPaths.fs) } ) { + for toolchainLibrarySearchPath in cbc.producer.toolchains.map({ StackedSearchPath(paths: $0.librarySearchPaths.paths + $0.fallbackLibrarySearchPaths.paths, fs: $0.librarySearchPaths.fs) }) { if let path = toolchainLibrarySearchPath.findLibrary(operatingSystem: cbc.producer.hostOperatingSystem, basename: "IndexStore") { args.append(contentsOf: ["--index-store-library-path", .path(path)]) break diff --git a/Sources/SWBUniversalPlatform/YaccCompiler.swift b/Sources/SWBUniversalPlatform/YaccCompiler.swift index 004a744d..4fef98ab 100644 --- a/Sources/SWBUniversalPlatform/YaccCompiler.swift +++ b/Sources/SWBUniversalPlatform/YaccCompiler.swift @@ -14,7 +14,7 @@ import SWBUtil import SWBCore import SWBMacro -final class YaccCompilerSpec : CompilerSpec, SpecIdentifierType, @unchecked Sendable { +final class YaccCompilerSpec: CompilerSpec, SpecIdentifierType, @unchecked Sendable { static let identifier = "com.apple.compilers.yacc" static let extensionMappings = [ @@ -22,7 +22,8 @@ final class YaccCompilerSpec : CompilerSpec, SpecIdentifierType, @unchecked Send ".ymm": ".mm", ".YMM": ".MM", ".yp": ".cp", ".YP": ".CP", ".ypp": ".cpp", ".YPP": ".CPP", - ".yxx": ".cxx", ".YXX": ".CXX"] + ".yxx": ".cxx", ".YXX": ".CXX", + ] override var toolBasenameAliases: [String] { return ["bison"] @@ -36,7 +37,7 @@ final class YaccCompilerSpec : CompilerSpec, SpecIdentifierType, @unchecked Send // Compute the input and output path. let input = cbc.input let inputBasename = input.absolutePath.basename - let (inputPrefix,inputExt) = Path(inputBasename).splitext() + let (inputPrefix, inputExt) = Path(inputBasename).splitext() let outputPrefix: String if cbc.scope.evaluate(BuiltinMacros.YACC_GENERATED_FILE_STEM) == "InputFileStem" { diff --git a/Sources/SWBUtil/Architecture.swift b/Sources/SWBUtil/Architecture.swift index d7518ec8..99e4ac6b 100644 --- a/Sources/SWBUtil/Architecture.swift +++ b/Sources/SWBUtil/Architecture.swift @@ -14,11 +14,11 @@ import Foundation import SWBLibc #if canImport(Darwin) -import Darwin -import MachO -#if canImport(MachO.dyld.utils) -import MachO.dyld.utils -#endif + import Darwin + import MachO + #if canImport(MachO.dyld.utils) + import MachO.dyld.utils + #endif #endif public struct Architecture: Sendable { @@ -31,13 +31,13 @@ public struct Architecture: Sendable { /// Returns the native architecture of the host machine, not including subtypes like arm64e and x86_64h. public static let host: Architecture = { #if canImport(Darwin) && canImport(MachO.dyld.utils) - if let value = macho_arch_name_for_mach_header(nil) { - var cputype: cpu_type_t = 0 - var cpusubtype: cpu_subtype_t = 0 - if macho_cpu_type_for_arch_name(value, &cputype, &cpusubtype) { - return Self(cputype: cputype) + if let value = macho_arch_name_for_mach_header(nil) { + var cputype: cpu_type_t = 0 + var cpusubtype: cpu_subtype_t = 0 + if macho_cpu_type_for_arch_name(value, &cputype, &cpusubtype) { + return Self(cputype: cputype) + } } - } #endif return Self() }() @@ -45,83 +45,84 @@ public struct Architecture: Sendable { /// Returns the 32-bit counterpart of the architecture, which may be the same value. public var as32bit: Architecture { #if canImport(Darwin) - return Self(cputype: cputype & ~CPU_ARCH_ABI64) + return Self(cputype: cputype & ~CPU_ARCH_ABI64) #else - return Self() + return Self() #endif } /// Returns the 64-bit counterpart of the architecture, which may be the same value. public var as64bit: Architecture { #if canImport(Darwin) - return Self(cputype: cputype | CPU_ARCH_ABI64) + return Self(cputype: cputype | CPU_ARCH_ABI64) #else - return Self() + return Self() #endif } /// Returns the string representation of the architecture, or `nil` if it cannot be determined. public var stringValue: String? { #if canImport(Darwin) - // This only needs to consider the known 4 values as it's only for computing the host architecture build settings. - switch cputype { - case CPU_TYPE_ARM: - return "arm" - case CPU_TYPE_ARM64: - return "arm64" - case CPU_TYPE_I386: - return "i386" - case CPU_TYPE_X86_64: - return "x86_64" - default: - break - } + // This only needs to consider the known 4 values as it's only for computing the host architecture build settings. + switch cputype { + case CPU_TYPE_ARM: + return "arm" + case CPU_TYPE_ARM64: + return "arm64" + case CPU_TYPE_I386: + return "i386" + case CPU_TYPE_X86_64: + return "x86_64" + default: + break + } #endif return nil } public static var hostStringValue: String? { - return host.stringValue ?? { () -> String? in - #if os(Windows) - var sysInfo = SYSTEM_INFO() - GetSystemInfo(&sysInfo) - switch Int32(sysInfo.wProcessorArchitecture) { - case PROCESSOR_ARCHITECTURE_AMD64: - return "x86_64" - case PROCESSOR_ARCHITECTURE_ARM64: - return "aarch64" - default: - return nil - } - #else - var buf = utsname() - if uname(&buf) == 0 { - return withUnsafeBytes(of: &buf.machine) { buf in - let data = Data(buf) - let value = String(decoding: data[0...(data.lastIndex(where: { $0 != 0 }) ?? 0)], as: UTF8.self) - #if os(FreeBSD) - switch value { - case "amd64": + return host.stringValue + ?? { () -> String? in + #if os(Windows) + var sysInfo = SYSTEM_INFO() + GetSystemInfo(&sysInfo) + switch Int32(sysInfo.wProcessorArchitecture) { + case PROCESSOR_ARCHITECTURE_AMD64: return "x86_64" - case "arm64": + case PROCESSOR_ARCHITECTURE_ARM64: return "aarch64" default: - break + return nil } - #endif - return value - } - } - return nil - #endif - }() + #else + var buf = utsname() + if uname(&buf) == 0 { + return withUnsafeBytes(of: &buf.machine) { buf in + let data = Data(buf) + let value = String(decoding: data[0...(data.lastIndex(where: { $0 != 0 }) ?? 0)], as: UTF8.self) + #if os(FreeBSD) + switch value { + case "amd64": + return "x86_64" + case "arm64": + return "aarch64" + default: + break + } + #endif + return value + } + } + return nil + #endif + }() } static func stringValue(cputype: cpu_type_t, cpusubtype: cpu_subtype_t) -> String? { #if canImport(Darwin) && canImport(MachO.dyld.utils) - return macho_arch_name_for_cpu_type(cputype, cpusubtype).map(String.init(cString:)) + return macho_arch_name_for_cpu_type(cputype, cpusubtype).map(String.init(cString:)) #else - return nil + return nil #endif } } diff --git a/Sources/SWBUtil/ArgumentSplitting.swift b/Sources/SWBUtil/ArgumentSplitting.swift index b0580849..fcb5667b 100644 --- a/Sources/SWBUtil/ArgumentSplitting.swift +++ b/Sources/SWBUtil/ArgumentSplitting.swift @@ -130,7 +130,7 @@ public final class UNIXShellCommandCodec: CommandSequenceEncodable, Sendable { if $0 == "\n" { return "'\n'" } else if isSpecialShellCharacter(ch: $0) { - return "\\\($0)" + return "\\\($0)" } else { return "\($0)" } @@ -289,7 +289,7 @@ public final class WindowsProcessArgumentsCodec: CommandSequenceEncodable, Senda public func encode(_ sequence: [String]) -> String { return sequence.lazy .map { arg in - if !arg.contains(where: {" \t\n\"".contains($0)}) { + if !arg.contains(where: { " \t\n\"".contains($0) }) { return arg } diff --git a/Sources/SWBUtil/Array.swift b/Sources/SWBUtil/Array.swift index 5f01ef96..c7e105b9 100644 --- a/Sources/SWBUtil/Array.swift +++ b/Sources/SWBUtil/Array.swift @@ -10,16 +10,14 @@ // //===----------------------------------------------------------------------===// -public extension Array -{ +public extension Array { // Typechecking of operator+ can be extraordinarily slow, even for small expressions; use this instead when needed. func appending(contentsOf other: [Element]) -> [Element] { return self + other } } -public extension Array where Element: Equatable -{ +public extension Array where Element: Equatable { /// Returns the element immediately following the first contiguous subsequence of elements equal to `elements`. func elementAfterElements(_ elements: [Element]) -> Element? { if let range = self.firstRange(of: elements), range.upperBound != endIndex { @@ -110,7 +108,7 @@ public extension Array where Element: FloatingPoint { return Element(0) } let average = self.average() - let v = self.reduce(0) { (acc: Element, next: Element) in acc + (next-average)*(next-average) } + let v = self.reduce(0) { (acc: Element, next: Element) in acc + (next - average) * (next - average) } return (v / (Element(self.count) - 1)).squareRoot() } @@ -135,7 +133,7 @@ extension Sequence { return elements } - public func asyncFlatMap(_ transform: (Self.Element) async throws(E) -> SegmentOfResult) async throws(E) -> [SegmentOfResult.Element] where SegmentOfResult : Sequence { + public func asyncFlatMap(_ transform: (Self.Element) async throws(E) -> SegmentOfResult) async throws(E) -> [SegmentOfResult.Element] where SegmentOfResult: Sequence { var elements: [SegmentOfResult.Element] = [] for element in self { try await elements.append(contentsOf: transform(element)) @@ -210,13 +208,13 @@ public func nWayMerge(_ arrays: [[T]]) -> [NWayMergeEle var merged = arrays[0].map { NWayMergeElement(element: $0, elementOf: Set([0])) } for (idx, array) in arrays.enumerated().dropFirst() { merged = merged.map { NWayMergeElement(element: $0.element, elementOf: $0.elementOf.union([idx])) } - let next = array.map { NWayMergeElement(element: $0, elementOf: Set([idx])) } + let next = array.map { NWayMergeElement(element: $0, elementOf: Set([idx])) } let diff = next.difference(from: merged, by: { first, second in first.element == second.element }) for change in diff { switch change { case .insert(offset: let offset, element: let element, associatedWith: _): let adjustment = diff.removals.filter { $0.offset <= offset }.count - merged.insert(element, at: offset+adjustment) + merged.insert(element, at: offset + adjustment) case .remove(offset: let offset, element: _, associatedWith: _): merged[offset].elementOf.remove(idx) } diff --git a/Sources/SWBUtil/AsyncCache.swift b/Sources/SWBUtil/AsyncCache.swift index 6ce4c41c..8475e0a2 100644 --- a/Sources/SWBUtil/AsyncCache.swift +++ b/Sources/SWBUtil/AsyncCache.swift @@ -24,7 +24,7 @@ public actor AsyncCache { private var cache: [Key: KeyState] = [:] /// Creates a new cache. - public init() { } + public init() {} /// Retrieves the value for the specified key, invoking the `body` closure to cache the value if it is not already present. /// diff --git a/Sources/SWBUtil/AsyncFlatteningSequence.swift b/Sources/SWBUtil/AsyncFlatteningSequence.swift index 019eb7d6..023f7aeb 100644 --- a/Sources/SWBUtil/AsyncFlatteningSequence.swift +++ b/Sources/SWBUtil/AsyncFlatteningSequence.swift @@ -89,7 +89,7 @@ extension AsyncSequence where Self.Element: RandomAccessCollection, Element.Elem } } -extension AsyncFlatteningSequence: Sendable where Base: Sendable { } +extension AsyncFlatteningSequence: Sendable where Base: Sendable {} @available(*, unavailable) -extension AsyncFlatteningSequence.AsyncIterator: Sendable { } +extension AsyncFlatteningSequence.AsyncIterator: Sendable {} diff --git a/Sources/SWBUtil/AsyncLock.swift b/Sources/SWBUtil/AsyncLock.swift index 5a7c3d34..81ab087c 100644 --- a/Sources/SWBUtil/AsyncLock.swift +++ b/Sources/SWBUtil/AsyncLock.swift @@ -30,7 +30,7 @@ public actor ActorLock { if let next = queue.popFirst() { next.resume(returning: ()) } else { - queue = [] // reallocate buffer if it's empty + queue = [] // reallocate buffer if it's empty } } return try await body() diff --git a/Sources/SWBUtil/AsyncOperationQueue.swift b/Sources/SWBUtil/AsyncOperationQueue.swift index 1ef3b936..ea550631 100644 --- a/Sources/SWBUtil/AsyncOperationQueue.swift +++ b/Sources/SWBUtil/AsyncOperationQueue.swift @@ -102,31 +102,31 @@ public final class AsyncOperationQueue: @unchecked Sendable { } switch waitingTasks[index] { - case .cancelled: - // If the task was cancelled in between creating the task cancellation handler and acquiring the lock, - // we should resume the continuation with a `CancellationError`. + case .cancelled: + // If the task was cancelled in between creating the task cancellation handler and acquiring the lock, + // we should resume the continuation with a `CancellationError`. + waitingTasks.remove(at: index) + return .cancel(continuation) + case .creating, .running, .waiting: + // A task may have completed since we initially checked if we should wait. Check again in this locked + // section and if we can start it, remove it from the waiting tasks and start it immediately. + if waitingTasks.count >= concurrentTasks { + waitingTasks[index] = .waiting(taskId, continuation) + return nil + } else { waitingTasks.remove(at: index) - return .cancel(continuation) - case .creating, .running, .waiting: - // A task may have completed since we initially checked if we should wait. Check again in this locked - // section and if we can start it, remove it from the waiting tasks and start it immediately. - if waitingTasks.count >= concurrentTasks { - waitingTasks[index] = .waiting(taskId, continuation) - return nil - } else { - waitingTasks.remove(at: index) - return .start(continuation) - } + return .start(continuation) + } } } switch action { - case .some(.cancel(let continuation)): - continuation.resume(throwing: _Concurrency.CancellationError()) - case .some(.start(let continuation)): - continuation.resume() - case .none: - return + case .some(.cancel(let continuation)): + continuation.resume(throwing: _Concurrency.CancellationError()) + case .some(.start(let continuation)): + continuation.resume() + case .none: + return } } } onCancel: { @@ -136,20 +136,20 @@ public final class AsyncOperationQueue: @unchecked Sendable { } switch self.waitingTasks[taskIndex] { - case .waiting(_, let continuation): - self.waitingTasks.remove(at: taskIndex) - - // If the parent task is cancelled then we need to manually handle resuming the - // continuation for the waiting task with a `CancellationError`. Return the continuation - // here so it can be resumed once the `waitingTasksLock` is released. - return continuation - case .creating, .running: - // If the task was still being created, mark it as cancelled in `waitingTasks` so that - // the handler for `withCheckedThrowingContinuation` can immediately cancel it. - self.waitingTasks[taskIndex] = .cancelled(taskId) - return nil - case .cancelled: - preconditionFailure("Attempting to cancel a task that was already cancelled") + case .waiting(_, let continuation): + self.waitingTasks.remove(at: taskIndex) + + // If the parent task is cancelled then we need to manually handle resuming the + // continuation for the waiting task with a `CancellationError`. Return the continuation + // here so it can be resumed once the `waitingTasksLock` is released. + return continuation + case .creating, .running: + // If the task was still being created, mark it as cancelled in `waitingTasks` so that + // the handler for `withCheckedThrowingContinuation` can immediately cancel it. + self.waitingTasks[taskIndex] = .cancelled(taskId) + return nil + case .cancelled: + preconditionFailure("Attempting to cancel a task that was already cancelled") } } diff --git a/Sources/SWBUtil/AsyncSingleValueCache.swift b/Sources/SWBUtil/AsyncSingleValueCache.swift index 4310faaf..bc3faa18 100644 --- a/Sources/SWBUtil/AsyncSingleValueCache.swift +++ b/Sources/SWBUtil/AsyncSingleValueCache.swift @@ -13,7 +13,7 @@ public struct AsyncSingleValueCache: ~Copyable, Sendable { private let value = AsyncLockedValue(nil) - public init() { } + public init() {} public func value(body: sending () async throws(E) -> sending Value) async throws(E) -> sending Value { try await value.withLock { value throws(E) in diff --git a/Sources/SWBUtil/ByteCount.swift b/Sources/SWBUtil/ByteCount.swift index feadcdd1..dbaad72b 100644 --- a/Sources/SWBUtil/ByteCount.swift +++ b/Sources/SWBUtil/ByteCount.swift @@ -38,7 +38,7 @@ extension ByteCount: Serializable { self.count = try .init(from: deserializer) } - public func serialize(to serializer: T) where T : Serializer { + public func serialize(to serializer: T) where T: Serializer { self.count.serialize(to: serializer) } } diff --git a/Sources/SWBUtil/ByteString.swift b/Sources/SWBUtil/ByteString.swift index f13375c5..ad6c8be4 100644 --- a/Sources/SWBUtil/ByteString.swift +++ b/Sources/SWBUtil/ByteString.swift @@ -91,11 +91,11 @@ public struct ByteString: ExpressibleByArrayLiteral, Sendable { _bytes.hasSuffix(suffix._bytes) } - public static func +=(lhs: inout ByteString, rhs: ByteString) { + public static func += (lhs: inout ByteString, rhs: ByteString) { lhs = lhs + rhs } - public static func +(lhs: ByteString, rhs: ByteString) -> ByteString { + public static func + (lhs: ByteString, rhs: ByteString) -> ByteString { return ByteString(lhs._bytes + rhs.bytes) } } @@ -109,10 +109,10 @@ extension ByteString: CustomStringConvertible { } /// Hashable conformance for a ByteString. -extension ByteString: Hashable { } +extension ByteString: Hashable {} /// Comparison with strings (as UTF8). -public func ==(lhs: ByteString, rhs: String) -> Bool { +public func == (lhs: ByteString, rhs: String) -> Bool { // FIXME: Is Swift's String.UTF8View.count O(1)? let utf8 = rhs.utf8 if lhs.bytes.count != utf8.count { diff --git a/Sources/SWBUtil/Cache.swift b/Sources/SWBUtil/Cache.swift index eaa9e4e0..484ebb5c 100644 --- a/Sources/SWBUtil/Cache.swift +++ b/Sources/SWBUtil/Cache.swift @@ -58,7 +58,7 @@ public final class Cache: NSObject, KeyValueStorage, NSCac private let cache: UnsafeNSCacheSendableWrapper private let willEvictCallback: (@Sendable (Value) -> Void)? - public init(willEvictCallback: (@Sendable (Value)->Void)? = nil, totalCostLimit: Int? = nil) { + public init(willEvictCallback: (@Sendable (Value) -> Void)? = nil, totalCostLimit: Int? = nil) { self.cache = .init(value: NSCache()) self.willEvictCallback = willEvictCallback super.init() @@ -88,19 +88,19 @@ public final class Cache: NSObject, KeyValueStorage, NSCac } set { #if os(Linux) - if let newValue = newValue { - cache.value.setObject(ValueWrapper(newValue), forKey: KeyWrapper(key)) - } else { - cache.value.removeObject(forKey: KeyWrapper(key)) - } + if let newValue = newValue { + cache.value.setObject(ValueWrapper(newValue), forKey: KeyWrapper(key)) + } else { + cache.value.removeObject(forKey: KeyWrapper(key)) + } #else - if let newValue, let cacheableValue = newValue as? (any CacheableValue) { - cache.value.setObject(ValueWrapper(newValue), forKey: KeyWrapper(key), cost: cacheableValue.cost) - } else if let newValue = newValue { - cache.value.setObject(ValueWrapper(newValue), forKey: KeyWrapper(key)) - } else { - cache.value.removeObject(forKey: KeyWrapper(key)) - } + if let newValue, let cacheableValue = newValue as? (any CacheableValue) { + cache.value.setObject(ValueWrapper(newValue), forKey: KeyWrapper(key), cost: cacheableValue.cost) + } else if let newValue = newValue { + cache.value.setObject(ValueWrapper(newValue), forKey: KeyWrapper(key)) + } else { + cache.value.removeObject(forKey: KeyWrapper(key)) + } #endif } } @@ -121,14 +121,14 @@ public final class Cache: NSObject, KeyValueStorage, NSCac let value = try body() #if os(Linux) - cache.value.setObject(ValueWrapper(value), forKey: wrappedKey) + cache.value.setObject(ValueWrapper(value), forKey: wrappedKey) #else - if let cacheableValue = value as? (any CacheableValue) { - cache.value.setObject(ValueWrapper(value), forKey: wrappedKey, cost: cacheableValue.cost) + if let cacheableValue = value as? (any CacheableValue) { + cache.value.setObject(ValueWrapper(value), forKey: wrappedKey, cost: cacheableValue.cost) - } else { - cache.value.setObject(ValueWrapper(value), forKey: wrappedKey) - } + } else { + cache.value.setObject(ValueWrapper(value), forKey: wrappedKey) + } #endif return value } @@ -140,6 +140,6 @@ public final class Cache: NSObject, KeyValueStorage, NSCac } } -extension Cache: Sendable where Key: Sendable, Value: Sendable { } -extension KeyWrapper: Sendable where T: Sendable { } -extension ValueWrapper: Sendable where T: Sendable { } +extension Cache: Sendable where Key: Sendable, Value: Sendable {} +extension KeyWrapper: Sendable where T: Sendable {} +extension ValueWrapper: Sendable where T: Sendable {} diff --git a/Sources/SWBUtil/CountedSet.swift b/Sources/SWBUtil/CountedSet.swift index a1f7287a..77eb4fb1 100644 --- a/Sources/SWBUtil/CountedSet.swift +++ b/Sources/SWBUtil/CountedSet.swift @@ -13,7 +13,7 @@ /// A collection of unique `Element` instances with no defined ordering, but with a per-element count of the number of times that element is present in the counted set. // FIXME: We should make CountedSet a CollectionType. // FIXME: We should also make CountedSet a SequenceType. -public struct CountedSet : CustomStringConvertible { +public struct CountedSet: CustomStringConvertible { /// Private mapping from elements to counts. private var _elmsToCounts = Dictionary() @@ -22,7 +22,7 @@ public struct CountedSet : CustomStringConvertible { private var _totalCount = 0 /// Initializes a new CountedSet with the given elements. - public init(_ elements: T ...) { + public init(_ elements: T...) { for elm in elements { insert(elm) } @@ -32,8 +32,7 @@ public struct CountedSet : CustomStringConvertible { public mutating func insert(_ element: T) { if let count = _elmsToCounts[element] { _elmsToCounts[element] = count + 1 - } - else { + } else { _elmsToCounts[element] = 1 } _totalCount += 1 diff --git a/Sources/SWBUtil/Debugger.swift b/Sources/SWBUtil/Debugger.swift index 7bec1c4b..374499e4 100644 --- a/Sources/SWBUtil/Debugger.swift +++ b/Sources/SWBUtil/Debugger.swift @@ -16,31 +16,31 @@ import SWBLibc public enum Debugger: Sendable { public static func pause() { #if os(Windows) - DebugActiveProcess(GetCurrentProcessId()) + DebugActiveProcess(GetCurrentProcessId()) #else - raise(SIGSTOP) + raise(SIGSTOP) #endif } public static func resume() { #if os(Windows) - DebugActiveProcessStop(GetCurrentProcessId()) + DebugActiveProcessStop(GetCurrentProcessId()) #else - raise(SIGCONT) + raise(SIGCONT) #endif } public static func isAttached() throws -> Bool { #if canImport(Darwin) - var info = kinfo_proc() - var mib: [Int32] = [CTL_KERN, KERN_PROC, KERN_PROC_PID, getpid()] - var size = MemoryLayout.stride - if sysctl(&mib, u_int(mib.count), &info, &size, nil, 0) != 0 { - throw POSIXError(errno, context: "sysctl") - } - return (info.kp_proc.p_flag & P_TRACED) != 0 + var info = kinfo_proc() + var mib: [Int32] = [CTL_KERN, KERN_PROC, KERN_PROC_PID, getpid()] + var size = MemoryLayout.stride + if sysctl(&mib, u_int(mib.count), &info, &size, nil, 0) != 0 { + throw POSIXError(errno, context: "sysctl") + } + return (info.kp_proc.p_flag & P_TRACED) != 0 #else - throw StubError.error("Debugger detection is not supported on this platform") + throw StubError.error("Debugger detection is not supported on this platform") #endif } @@ -61,44 +61,44 @@ public enum Debugger: Sendable { public static func waitForXcodeAutoAttachIfEnabled() async throws { #if canImport(Darwin) - if isXcodeAutoAttachEnabled { - try await waitForAttachment() { - // Exit if parent process died while waiting for debugger - if kill(getppid(), 0) != 0 { // ignore-unacceptable-language; POSIX API - throw StubError.error("Parent process exited while waiting for debugger") + if isXcodeAutoAttachEnabled { + try await waitForAttachment() { + // Exit if parent process died while waiting for debugger + if kill(getppid(), 0) != 0 { // ignore-unacceptable-language; POSIX API + throw StubError.error("Parent process exited while waiting for debugger") + } + return true } - return true } - } #else - throw StubError.error("Debugger detection is not supported on this platform") + throw StubError.error("Debugger detection is not supported on this platform") #endif } public static func requestXcodeAutoAttachIfEnabled(_ remoteToolPID: Int32) throws { #if canImport(Darwin) - if isXcodeAutoAttachEnabled, let sendPipeName = ProcessInfo.processInfo.environment["IB_AUTO_ATTACH_PIPE_NAME"] { - let returnPipeName = "/tmp/Xcode.IBCTTAutoAttachPipe.Return.\(getpid()).\(remoteToolPID)" - unlink(returnPipeName) - let success = mkfifo(returnPipeName, S_IRWXU | S_IRWXG | S_IRWXO) == 0 - - if let writeHandle = FileHandle(forWritingAtPath: sendPipeName) { - let payload = "\(getpid()).\(remoteToolPID).IBCocoaFramework;" - try writeHandle.write(contentsOf: Data(payload.utf8)) - try writeHandle.close() - } + if isXcodeAutoAttachEnabled, let sendPipeName = ProcessInfo.processInfo.environment["IB_AUTO_ATTACH_PIPE_NAME"] { + let returnPipeName = "/tmp/Xcode.IBCTTAutoAttachPipe.Return.\(getpid()).\(remoteToolPID)" + unlink(returnPipeName) + let success = mkfifo(returnPipeName, S_IRWXU | S_IRWXG | S_IRWXO) == 0 - if success { - if let readHandle = FileHandle(forReadingAtPath: returnPipeName) { - _ = try readHandle.readToEnd() - try readHandle.close() + if let writeHandle = FileHandle(forWritingAtPath: sendPipeName) { + let payload = "\(getpid()).\(remoteToolPID).IBCocoaFramework;" + try writeHandle.write(contentsOf: Data(payload.utf8)) + try writeHandle.close() } - unlink(returnPipeName) + if success { + if let readHandle = FileHandle(forReadingAtPath: returnPipeName) { + _ = try readHandle.readToEnd() + try readHandle.close() + } + + unlink(returnPipeName) + } } - } #else - throw StubError.error("Debugger detection is not supported on this platform") + throw StubError.error("Debugger detection is not supported on this platform") #endif } } diff --git a/Sources/SWBUtil/DependencyInfo.swift b/Sources/SWBUtil/DependencyInfo.swift index a39141fe..23b28c41 100644 --- a/Sources/SWBUtil/DependencyInfo.swift +++ b/Sources/SWBUtil/DependencyInfo.swift @@ -208,7 +208,7 @@ extension DependencyInfo { { Opcode.version.bytes(for: [try self.version.validatingEncodability()]) }, { Opcode.input.bytes(for: try self.inputs.validatingEncodability()) }, { Opcode.missing.bytes(for: try self.missing.validatingEncodability()) }, - { Opcode.output.bytes(for: try self.outputs.validatingEncodability()) } + { Opcode.output.bytes(for: try self.outputs.validatingEncodability()) }, ] return try operations.map({ try $0() }).reduce(into: [], { result, element in result += element }) } diff --git a/Sources/SWBUtil/DiagnosticsEngine.swift b/Sources/SWBUtil/DiagnosticsEngine.swift index db52c9cc..6942418d 100644 --- a/Sources/SWBUtil/DiagnosticsEngine.swift +++ b/Sources/SWBUtil/DiagnosticsEngine.swift @@ -33,7 +33,7 @@ public enum Component: Serializable, Equatable, Hashable, Sendable, Codable { case "targetIntegrity": self = .targetIntegrity - // Compatibility cases + // Compatibility cases case "swiftCompiler": self = .clangCompiler(categoryName: "Swift Compiler Error") case "parseIssue": @@ -61,7 +61,7 @@ public enum Component: Serializable, Equatable, Hashable, Sendable, Codable { } } - public func serialize(to serializer: T) where T : Serializer { + public func serialize(to serializer: T) where T: Serializer { serializer.serialize(name) } @@ -103,7 +103,7 @@ public struct Diagnostic: Equatable, Hashable, Serializable, Sendable, Codable { /// - parameter identifier: An opaque string identifying the object. case object(identifier: String) - public func serialize(to serializer: T) where T : Serializer { + public func serialize(to serializer: T) where T: Serializer { serializer.serializeAggregate(2) { switch self { case let .textual(line, column): @@ -224,7 +224,7 @@ public struct Diagnostic: Equatable, Hashable, Serializable, Sendable, Codable { switch self { case .unknown: serializer.serialize(LocationType.unknown) - serializer.serialize("") // This is needed so that the number of items in the aggregate is constant no matter the case. + serializer.serialize("") // This is needed so that the number of items in the aggregate is constant no matter the case. case let .path(path, fileLocation): serializer.serialize(LocationType.path) serializer.beginAggregate(2) @@ -250,19 +250,23 @@ public struct Diagnostic: Equatable, Hashable, Serializable, Sendable, Codable { try deserializer.beginAggregate(2) switch try deserializer.deserialize() as LocationType { case .unknown: - _ = try deserializer.deserialize() as String // This is just a mock for the aggregate and we can ignore the value. + _ = try deserializer.deserialize() as String // This is just a mock for the aggregate and we can ignore the value. self = .unknown case .path: try deserializer.beginAggregate(2) - self = .path(try deserializer.deserialize(), - fileLocation: try deserializer.deserialize()) + self = .path( + try deserializer.deserialize(), + fileLocation: try deserializer.deserialize() + ) case .buildSettings: try deserializer.beginAggregate(1) self = .buildSettings(names: try deserializer.deserialize()) case .buildFiles: try deserializer.beginAggregate(2) - self = .buildFiles(try deserializer.deserialize(), - targetGUID: try deserializer.deserialize()) + self = .buildFiles( + try deserializer.deserialize(), + targetGUID: try deserializer.deserialize() + ) } } } @@ -323,7 +327,7 @@ public struct Diagnostic: Equatable, Hashable, Serializable, Sendable, Codable { public let endLine: Int public let endColumn: Int - public func serialize(to serializer: T) where T : Serializer { + public func serialize(to serializer: T) where T: Serializer { serializer.serializeAggregate(5) { serializer.serialize(path) serializer.serialize(startLine) @@ -539,7 +543,7 @@ public struct Diagnostic: Equatable, Hashable, Serializable, Sendable, Codable { self.childDiagnostics = try deserializer.deserialize() } - public static func ==(lhs: Diagnostic, rhs: Diagnostic) -> Bool { + public static func == (lhs: Diagnostic, rhs: Diagnostic) -> Bool { // Not the best for performance, but this is only used in unit tests, and making DiagnosticID and DiagnosticData conform to Equatable is nontrivial. lhs.formatLocalizedDescription(.debug) == rhs.formatLocalizedDescription(.debug) } diff --git a/Sources/SWBUtil/Dictionary.swift b/Sources/SWBUtil/Dictionary.swift index 9cdc1e77..e9548a82 100644 --- a/Sources/SWBUtil/Dictionary.swift +++ b/Sources/SWBUtil/Dictionary.swift @@ -10,8 +10,7 @@ // //===----------------------------------------------------------------------===// -public extension Dictionary -{ +public extension Dictionary { /// Check if the dictionary contains an entry for `element`. func contains(_ element: Key) -> Bool { return index(forKey: element) != nil @@ -77,7 +76,7 @@ public extension Dictionary { } /// Returns the elements of the dictionary, sorted using the given key path as the comparison between elements. - @inlinable func sorted( _ areInIncreasingOrder: (KeyValue, KeyValue) throws -> Bool, byKey predicate: (Key) -> KeyValue) rethrows -> [(key: Key, value: Value)] { + @inlinable func sorted(_ areInIncreasingOrder: (KeyValue, KeyValue) throws -> Bool, byKey predicate: (Key) -> KeyValue) rethrows -> [(key: Key, value: Value)] { return try sorted(byKey: { try areInIncreasingOrder(predicate($0), predicate($1)) }) } diff --git a/Sources/SWBUtil/ElapsedTimer.swift b/Sources/SWBUtil/ElapsedTimer.swift index 03680c0c..6cd8f80d 100644 --- a/Sources/SWBUtil/ElapsedTimer.swift +++ b/Sources/SWBUtil/ElapsedTimer.swift @@ -69,11 +69,11 @@ public struct ElapsedTimerInterval: Hashable, Sendable { } public var nanoseconds: UInt64 { - UInt64(duration.nanoseconds) // always positive + UInt64(duration.nanoseconds) // always positive } public var microseconds: UInt64 { - UInt64(duration.microseconds) // always positive + UInt64(duration.microseconds) // always positive } public var seconds: TimeInterval { diff --git a/Sources/SWBUtil/EmptyState.swift b/Sources/SWBUtil/EmptyState.swift index 04bfd27b..26f74b7e 100644 --- a/Sources/SWBUtil/EmptyState.swift +++ b/Sources/SWBUtil/EmptyState.swift @@ -25,8 +25,8 @@ extension EmptyState { } // Add more types here as needed -extension Array: EmptyState { } -extension Path: EmptyState { } -extension String: EmptyState { } -extension Substring: EmptyState { } -extension Set: EmptyState { } +extension Array: EmptyState {} +extension Path: EmptyState {} +extension String: EmptyState {} +extension Substring: EmptyState {} +extension Set: EmptyState {} diff --git a/Sources/SWBUtil/Environment.swift b/Sources/SWBUtil/Environment.swift index f9fd7087..702adb84 100644 --- a/Sources/SWBUtil/Environment.swift +++ b/Sources/SWBUtil/Environment.swift @@ -114,7 +114,8 @@ extension Environment: Collection { extension Environment: CustomStringConvertible { public var description: String { - let body = self + let body = + self .sorted { $0.key < $1.key } .map { "\"\($0.rawValue)=\($1)\"" } .joined(separator: ", ") diff --git a/Sources/SWBUtil/EnvironmentKey.swift b/Sources/SWBUtil/EnvironmentKey.swift index 5d27598f..39f490a3 100644 --- a/Sources/SWBUtil/EnvironmentKey.swift +++ b/Sources/SWBUtil/EnvironmentKey.swift @@ -51,9 +51,9 @@ extension EnvironmentKey: Encodable { extension EnvironmentKey: Equatable { public static func == (_ lhs: Self, _ rhs: Self) -> Bool { #if os(Windows) - lhs.rawValue.lowercased() == rhs.rawValue.lowercased() + lhs.rawValue.lowercased() == rhs.rawValue.lowercased() #else - lhs.rawValue == rhs.rawValue + lhs.rawValue == rhs.rawValue #endif } } @@ -73,9 +73,9 @@ extension EnvironmentKey: Decodable { extension EnvironmentKey: Hashable { public func hash(into hasher: inout Hasher) { #if os(Windows) - self.rawValue.lowercased().hash(into: &hasher) + self.rawValue.lowercased().hash(into: &hasher) #else - self.rawValue.hash(into: &hasher) + self.rawValue.hash(into: &hasher) #endif } } diff --git a/Sources/SWBUtil/Error.swift b/Sources/SWBUtil/Error.swift index 9225b0f5..05e1adb6 100644 --- a/Sources/SWBUtil/Error.swift +++ b/Sources/SWBUtil/Error.swift @@ -43,52 +43,51 @@ public enum StubError: Error, LocalizedError, CustomStringConvertible, Serializa } #if canImport(Darwin) -public struct MacError: Error, CustomStringConvertible, LocalizedError { - public let code: OSStatus - private let error: NSError + public struct MacError: Error, CustomStringConvertible, LocalizedError { + public let code: OSStatus + private let error: NSError - public init(_ code: OSStatus) { - self.code = code - self.error = NSError(domain: NSOSStatusErrorDomain, code: Int(code), userInfo: nil) - } - - public var description: String { - // No supported API is available for retrieving this information in a nicer way - let prefix = "Error Domain=NSOSStatusErrorDomain Code=\(code) \"" - let suffix = "\"" - var rawDescription = error.description - guard rawDescription.hasPrefix(prefix) else { - return rawDescription + public init(_ code: OSStatus) { + self.code = code + self.error = NSError(domain: NSOSStatusErrorDomain, code: Int(code), userInfo: nil) } - rawDescription = rawDescription.withoutPrefix(prefix) - if rawDescription.hasSuffix(suffix) { - rawDescription = rawDescription.withoutSuffix(suffix) - } - let (first, second) = rawDescription.split(":") - let constant: String - let message: String - if !second.isEmpty { - constant = first - message = String(second.dropFirst()) - } else { - constant = "" - message = first - } + public var description: String { + // No supported API is available for retrieving this information in a nicer way + let prefix = "Error Domain=NSOSStatusErrorDomain Code=\(code) \"" + let suffix = "\"" + var rawDescription = error.description + guard rawDescription.hasPrefix(prefix) else { + return rawDescription + } + rawDescription = rawDescription.withoutPrefix(prefix) + if rawDescription.hasSuffix(suffix) { + rawDescription = rawDescription.withoutSuffix(suffix) + } + let (first, second) = rawDescription.split(":") - if !constant.isEmpty { - return "\(message) (\(constant), \(code))" - } else { - if message == "(null)" || message.isEmpty { - // Provide a friendlier message in these odd cases. - return "The operation couldn’t be completed. (\(code))" + let constant: String + let message: String + if !second.isEmpty { + constant = first + message = String(second.dropFirst()) + } else { + constant = "" + message = first } - else { - return "\(message) (\(code))" + + if !constant.isEmpty { + return "\(message) (\(constant), \(code))" + } else { + if message == "(null)" || message.isEmpty { + // Provide a friendlier message in these odd cases. + return "The operation couldn’t be completed. (\(code))" + } else { + return "\(message) (\(code))" + } } } - } - public var errorDescription: String? { return description } -} + public var errorDescription: String? { return description } + } #endif diff --git a/Sources/SWBUtil/FSProxy.swift b/Sources/SWBUtil/FSProxy.swift index 7f907cc2..8431ac7b 100644 --- a/Sources/SWBUtil/FSProxy.swift +++ b/Sources/SWBUtil/FSProxy.swift @@ -13,9 +13,9 @@ import SWBLibc #if canImport(System) -public import System + public import System #else -public import SystemPackage + public import SystemPackage #endif public import struct Foundation.CocoaError @@ -34,14 +34,13 @@ public import struct Foundation.FileAttributeKey public import struct Foundation.TimeInterval public import class Foundation.NSDictionary #if canImport(Darwin) -import struct ObjectiveC.ObjCBool + import struct ObjectiveC.ObjCBool #endif #if os(Windows) -public import struct WinSDK.HANDLE + public import struct WinSDK.HANDLE #endif - /// File system information for a particular file. /// /// This is a simple wrapper for stat() information. @@ -112,7 +111,7 @@ public struct FileInfo: Equatable, Sendable { return _readFileAttributePrimitive(fileAttrs[.systemNumber], as: Int32.self) ?? 0 } - public static func ==(lhs: FileInfo, rhs: FileInfo) -> Bool { + public static func == (lhs: FileInfo, rhs: FileInfo) -> Bool { return NSDictionary(dictionary: lhs.fileAttrs).isEqual(NSDictionary(dictionary: rhs.fileAttrs)) } } @@ -357,10 +356,10 @@ class LocalFS: FSProxy, @unchecked Sendable { public func moveInSameVolume(_ path: Path, to: Path) throws { #if canImport(Darwin) - _ = try fileManager.replaceItemAt(URL(fileURLWithPath: to.str), withItemAt: URL(fileURLWithPath: path.str), options: FileManager.ItemReplacementOptions.usingNewMetadataOnly) + _ = try fileManager.replaceItemAt(URL(fileURLWithPath: to.str), withItemAt: URL(fileURLWithPath: path.str), options: FileManager.ItemReplacementOptions.usingNewMetadataOnly) #else - // `replaceItemAt` doesn't work on swift-corelibs-foundation - try move(path, to: to) + // `replaceItemAt` doesn't work on swift-corelibs-foundation + try move(path, to: to) #endif } @@ -373,17 +372,17 @@ class LocalFS: FSProxy, @unchecked Sendable { /// /// If the given path is a symlink to a directory, then this will return true if the destination of the symlink is a directory. func isDirectory(_ path: Path) -> Bool { -#if canImport(Darwin) - var isDirectory: ObjCBool = false - if fileManager.fileExists(atPath: path.str, isDirectory: &isDirectory) { - return isDirectory.boolValue - } -#else - var isDirectory = false - if fileManager.fileExists(atPath: path.str, isDirectory: &isDirectory) { - return isDirectory - } -#endif + #if canImport(Darwin) + var isDirectory: ObjCBool = false + if fileManager.fileExists(atPath: path.str, isDirectory: &isDirectory) { + return isDirectory.boolValue + } + #else + var isDirectory = false + if fileManager.fileExists(atPath: path.str, isDirectory: &isDirectory) { + return isDirectory + } + #endif return false } @@ -419,19 +418,16 @@ class LocalFS: FSProxy, @unchecked Sendable { if isDirectory(path) { // If the item at the path is a directory, then we're good. This includes if it's a symlink which points to a directory. return - } - else if isSymlink(path, &destinationExists) { + } else if isSymlink(path, &destinationExists) { // If the item at the path is a symlink, then we check whether it's a broken symlink or points to something that is not a directory. if destinationExists { // The destination does exist, so it's not a directory. throw StubError.error("File is a symbolic link which references a path which is not a directory: \(path.str)") - } - else { + } else { // The destination does not exist - throw an exception because we have a broken symlink. throw StubError.error("File is a broken symbolic link: \(path.str)") } - } - else { + } else { /// The path exists but is not a directory throw StubError.error("File exists but is not a directory: \(path.str)") } @@ -458,26 +454,26 @@ class LocalFS: FSProxy, @unchecked Sendable { func createTemporaryDirectory(parent: Path) throws -> Path { #if os(Windows) - return parent.join("swbuild.tmp." + UUID().description) + return parent.join("swbuild.tmp." + UUID().description) #else - // FIXME: This is an inappropriate location for general purpose infrastructure. - let template = [UInt8](parent.join("swbuild.tmp.XXXXXXXX").str.utf8) - - // Create the temp path. - let name = UnsafeMutablePointer.allocate(capacity: template.count + 1) - template.withUnsafeBufferPointer { buf in - memcpy(name, buf.baseAddress!, buf.count) - name[buf.count] = 0 - } + // FIXME: This is an inappropriate location for general purpose infrastructure. + let template = [UInt8](parent.join("swbuild.tmp.XXXXXXXX").str.utf8) + + // Create the temp path. + let name = UnsafeMutablePointer.allocate(capacity: template.count + 1) + template.withUnsafeBufferPointer { buf in + memcpy(name, buf.baseAddress!, buf.count) + name[buf.count] = 0 + } - defer { name.deallocate() } + defer { name.deallocate() } - // Create the temporary directory. - guard mkdtemp(name) != nil else { - throw POSIXError(errno, context: "mkdtemp", String(cString: name)) - } + // Create the temporary directory. + guard mkdtemp(name) != nil else { + throw POSIXError(errno, context: "mkdtemp", String(cString: name)) + } - return Path(String(cString: name)) + return Path(String(cString: name)) #endif } @@ -645,108 +641,108 @@ class LocalFS: FSProxy, @unchecked Sendable { func listExtendedAttributes(_ path: Path) throws -> [String] { #if os(Windows) - // Implement ADS on Windows? See also https://github.com/swiftlang/swift-foundation/issues/1166 - return [] + // Implement ADS on Windows? See also https://github.com/swiftlang/swift-foundation/issues/1166 + return [] #elseif os(FreeBSD) - // FreeBSD blocked on https://github.com/swiftlang/swift/pull/77836 - return [] + // FreeBSD blocked on https://github.com/swiftlang/swift/pull/77836 + return [] #elseif os(OpenBSD) - // OpenBSD no longer supports extended attributes - return [] - #else - #if canImport(Darwin) - var size = listxattr(path.str, nil, 0, 0) - #else - var size = listxattr(path.str, nil, 0) - #endif - if size == -1 { - throw POSIXError(errno, context: "listxattr", path.str) - } - guard size > 0 else { return [] } - let keyList = UnsafeMutableBufferPointer.allocate(capacity: size) - defer { keyList.deallocate() } - #if canImport(Darwin) - size = listxattr(path.str, keyList.baseAddress!, size, 0) + // OpenBSD no longer supports extended attributes + return [] #else - size = listxattr(path.str, keyList.baseAddress!, size) - #endif - if size == -1 { - throw POSIXError(errno, context: "listxattr", path.str) - } - guard size > 0 else { return [] } - - var extendedAttrs: [String] = [] - var current = keyList.baseAddress! - let end = keyList.baseAddress!.advanced(by: keyList.count) - while current < end { - let currentKey = String(cString: current) - defer { current = current.advanced(by: currentKey.utf8.count) + 1 /* pass null byte */ } - extendedAttrs.append(currentKey) - } - return extendedAttrs + #if canImport(Darwin) + var size = listxattr(path.str, nil, 0, 0) + #else + var size = listxattr(path.str, nil, 0) + #endif + if size == -1 { + throw POSIXError(errno, context: "listxattr", path.str) + } + guard size > 0 else { return [] } + let keyList = UnsafeMutableBufferPointer.allocate(capacity: size) + defer { keyList.deallocate() } + #if canImport(Darwin) + size = listxattr(path.str, keyList.baseAddress!, size, 0) + #else + size = listxattr(path.str, keyList.baseAddress!, size) + #endif + if size == -1 { + throw POSIXError(errno, context: "listxattr", path.str) + } + guard size > 0 else { return [] } + + var extendedAttrs: [String] = [] + var current = keyList.baseAddress! + let end = keyList.baseAddress!.advanced(by: keyList.count) + while current < end { + let currentKey = String(cString: current) + defer { current = current.advanced(by: currentKey.utf8.count) + 1 /* pass null byte */ } + extendedAttrs.append(currentKey) + } + return extendedAttrs #endif } func setExtendedAttribute(_ path: Path, key: String, value: ByteString) throws { #if os(Windows) - // Implement ADS on Windows? See also https://github.com/swiftlang/swift-foundation/issues/1166 + // Implement ADS on Windows? See also https://github.com/swiftlang/swift-foundation/issues/1166 #elseif os(FreeBSD) - // FreeBSD blocked on https://github.com/swiftlang/swift/pull/77836 + // FreeBSD blocked on https://github.com/swiftlang/swift/pull/77836 #elseif os(OpenBSD) - // OpenBSD no longer supports extended attributes + // OpenBSD no longer supports extended attributes #else - try value.bytes.withUnsafeBufferPointer { buf throws -> Void in - #if canImport(Darwin) - let result = setxattr(path.str, key, buf.baseAddress, buf.count, 0, XATTR_NOFOLLOW) - #else - let result = lsetxattr(path.str, key, buf.baseAddress, buf.count, 0) - #endif - guard result == 0 else { - throw POSIXError(errno, context: "setxattr", path.str, key, value.unsafeStringValue) + try value.bytes.withUnsafeBufferPointer { buf throws -> Void in + #if canImport(Darwin) + let result = setxattr(path.str, key, buf.baseAddress, buf.count, 0, XATTR_NOFOLLOW) + #else + let result = lsetxattr(path.str, key, buf.baseAddress, buf.count, 0) + #endif + guard result == 0 else { + throw POSIXError(errno, context: "setxattr", path.str, key, value.unsafeStringValue) + } } - } #endif } func getExtendedAttribute(_ path: Path, key: String) throws -> ByteString? { #if os(Windows) - // Implement ADS on Windows? See also https://github.com/swiftlang/swift-foundation/issues/1166 - return nil + // Implement ADS on Windows? See also https://github.com/swiftlang/swift-foundation/issues/1166 + return nil #elseif os(FreeBSD) - // FreeBSD blocked on https://github.com/swiftlang/swift/pull/77836 - return nil + // FreeBSD blocked on https://github.com/swiftlang/swift/pull/77836 + return nil #elseif os(OpenBSD) - // OpenBSD no longer supports extended attributes - return nil + // OpenBSD no longer supports extended attributes + return nil #else - var bufferSize = 4096 - repeat { - var data = [UInt8].init(repeating: 0, count: bufferSize) - let count: ssize_t = data.withUnsafeMutableBytes { - #if canImport(Darwin) - return getxattr(path.str, key, $0.baseAddress, $0.count, 0, XATTR_NOFOLLOW) - #else - return lgetxattr(path.str, key, $0.baseAddress, $0.count) - #endif - } - if count < 0 { - switch errno { - #if os(Linux) || os(Android) - case ENODATA: - return nil - #else - case ENOATTR: - return nil - #endif - case ERANGE: - bufferSize *= 2 - continue - default: - throw POSIXError(errno, context: "getxattr", path.str, key) + var bufferSize = 4096 + repeat { + var data = [UInt8].init(repeating: 0, count: bufferSize) + let count: ssize_t = data.withUnsafeMutableBytes { + #if canImport(Darwin) + return getxattr(path.str, key, $0.baseAddress, $0.count, 0, XATTR_NOFOLLOW) + #else + return lgetxattr(path.str, key, $0.baseAddress, $0.count) + #endif } - } - return ByteString(data[0.. Bool { #if os(macOS) - var fs = statfs() - guard statfs(path.str, &fs) == 0 else { - // Conservatively assume path may be remote. - return true - } - return (fs.f_flags & UInt32(bitPattern: MNT_LOCAL)) == 0 + var fs = statfs() + guard statfs(path.str, &fs) == 0 else { + // Conservatively assume path may be remote. + return true + } + return (fs.f_flags & UInt32(bitPattern: MNT_LOCAL)) == 0 #else - return false + return false #endif } @@ -794,7 +790,6 @@ class LocalFS: FSProxy, @unchecked Sendable { } } - /// Concrete FSProxy implementation which simulates an empty disk. /// /// This class is thread-safe and supports @@ -820,8 +815,7 @@ public class PseudoFS: FSProxy, @unchecked Sendable { for item in try listdir(path) { try _copy(path.join(item), to: to.join(item)) } - } - else { + } else { try _copy(path, to: to) } } @@ -917,7 +911,7 @@ public class PseudoFS: FSProxy, @unchecked Sendable { var device: dev_t // The extended attributes of the node. - var xattrs = [String:ByteString]() + var xattrs = [String: ByteString]() init(_ contents: NodeContents, permissions: Int, timestamp: Int, inode: ino_t, device: dev_t = 1) { self.contents = contents @@ -1066,7 +1060,6 @@ public class PseudoFS: FSProxy, @unchecked Sendable { throw POSIXError(ENOTDIR) } - // Check if the node already exists. if let node = directory.contents[path.basename] { // Verify it is a directory. @@ -1191,8 +1184,7 @@ public class PseudoFS: FSProxy, @unchecked Sendable { if let node = directory.contents[path.basename] { guard case let .file(fileContents) = node.contents else { throw POSIXError(EISDIR) } existingContents = append ? fileContents : ByteString() - } - else { + } else { existingContents = ByteString() } @@ -1237,7 +1229,8 @@ public class PseudoFS: FSProxy, @unchecked Sendable { // Get the parent node's content if its a directory. guard let parent = getNode(path.dirname), - case .directory(let contents) = parent.contents else { + case .directory(let contents) = parent.contents + else { return } // Set it to nil to release the contents. @@ -1264,12 +1257,13 @@ public class PseudoFS: FSProxy, @unchecked Sendable { } let info: [FileAttributeKey: any Sendable] = [ - .modificationDate : Date(timeIntervalSince1970: TimeInterval(node.timestamp)), + .modificationDate: Date(timeIntervalSince1970: TimeInterval(node.timestamp)), .type: type, .size: size, .posixPermissions: node.permissions, .systemNumber: node.device, - .systemFileNumber: node.inode] + .systemFileNumber: node.inode, + ] return createFileInfo(info) } } @@ -1387,37 +1381,37 @@ public func createFS(simulated: Bool, ignoreFileSystemDeviceInodeChanges: Bool) } #if os(Windows) -extension HANDLE { - /// Runs a closure and then closes the HANDLE, even if an error occurs. - /// - /// - Parameter body: The closure to run. - /// If the closure throws an error, - /// this method closes the file descriptor before it rethrows that error. - /// - /// - Returns: The value returned by the closure. - /// - /// If `body` throws an error - /// or an error occurs while closing the file descriptor, - /// this method rethrows that error. - public func closeAfter(_ body: () throws -> R) throws -> R { - // No underscore helper, since the closure's throw isn't necessarily typed. - let result: R - do { - result = try body() - } catch { - _ = try? self.close() // Squash close error and throw closure's - throw error + extension HANDLE { + /// Runs a closure and then closes the HANDLE, even if an error occurs. + /// + /// - Parameter body: The closure to run. + /// If the closure throws an error, + /// this method closes the file descriptor before it rethrows that error. + /// + /// - Returns: The value returned by the closure. + /// + /// If `body` throws an error + /// or an error occurs while closing the file descriptor, + /// this method rethrows that error. + public func closeAfter(_ body: () throws -> R) throws -> R { + // No underscore helper, since the closure's throw isn't necessarily typed. + let result: R + do { + result = try body() + } catch { + _ = try? self.close() // Squash close error and throw closure's + throw error + } + try self.close() + return result } - try self.close() - return result - } - fileprivate func close() throws { - if !CloseHandle(self) { - throw Win32Error(GetLastError()) + fileprivate func close() throws { + if !CloseHandle(self) { + throw Win32Error(GetLastError()) + } } } -} #endif extension FileDescriptor { @@ -1438,7 +1432,7 @@ extension FileDescriptor { do { result = try await body() } catch { - _ = try? self.close() // Squash close error and throw closure's + _ = try? self.close() // Squash close error and throw closure's throw error } try self.close() diff --git a/Sources/SWBUtil/GraphAlgorithms.swift b/Sources/SWBUtil/GraphAlgorithms.swift index e93c7f88..57d1cc2d 100644 --- a/Sources/SWBUtil/GraphAlgorithms.swift +++ b/Sources/SWBUtil/GraphAlgorithms.swift @@ -16,7 +16,9 @@ /// /// - Returns: The distance (in number of edges) of the shortest path from the source to the destination, or nil if there is no such path. public func minimumDistance( - from source: T, to destination: T, successors: (T) throws -> [T] + from source: T, + to destination: T, + successors: (T) throws -> [T] ) rethrows -> Int? { var queue = Queue([(distance: 0, source)]) var visited = Set([source]) @@ -48,7 +50,9 @@ public func minimumDistance( /// /// - Returns: The shortest path (starting with the source and ending with the destination), or nil if there is no such path. public func shortestPath( - from source: T, to destination: T, successors: (T) throws -> [T] + from source: T, + to destination: T, + successors: (T) throws -> [T] ) rethrows -> [T]? { var queue = Queue([[source]]) var visited = Set([source]) @@ -80,8 +84,9 @@ public func shortestPath( /// not automatically include `nodes` unless present in the relation defined by /// `successors`. public func transitiveClosure( - _ nodes: [T], successors: (T) throws -> [T] - ) rethrows -> (result: OrderedSet, dupes: OrderedSet) { + _ nodes: [T], + successors: (T) throws -> [T] +) rethrows -> (result: OrderedSet, dupes: OrderedSet) { var dupes = OrderedSet() var result = OrderedSet() diff --git a/Sources/SWBUtil/HashContext.swift b/Sources/SWBUtil/HashContext.swift index 916adc37..a7b9a922 100644 --- a/Sources/SWBUtil/HashContext.swift +++ b/Sources/SWBUtil/HashContext.swift @@ -11,9 +11,9 @@ //===----------------------------------------------------------------------===// #if os(Windows) -import WinSDK + import WinSDK #elseif canImport(CryptoKit) -private import CryptoKit + private import CryptoKit #endif public import Foundation @@ -45,104 +45,104 @@ extension HashContext { } #if os(Windows) -fileprivate final class BCryptHashContext: HashContext { - private let digestLength: Int - private var hAlgorithm: BCRYPT_ALG_HANDLE? - private var hash: BCRYPT_HASH_HANDLE? - - @usableFromInline - internal var result: ByteString? - - public init(algorithm: String, digestLength: Int) { - self.digestLength = digestLength - algorithm.withCString(encodedAs: UTF16.self) { wName in - precondition(BCryptOpenAlgorithmProvider(&hAlgorithm, wName, nil, 0) == 0) + fileprivate final class BCryptHashContext: HashContext { + private let digestLength: Int + private var hAlgorithm: BCRYPT_ALG_HANDLE? + private var hash: BCRYPT_HASH_HANDLE? + + @usableFromInline + internal var result: ByteString? + + public init(algorithm: String, digestLength: Int) { + self.digestLength = digestLength + algorithm.withCString(encodedAs: UTF16.self) { wName in + precondition(BCryptOpenAlgorithmProvider(&hAlgorithm, wName, nil, 0) == 0) + } + precondition(BCryptCreateHash(hAlgorithm, &hash, nil, 0, nil, 0, 0) == 0) } - precondition(BCryptCreateHash(hAlgorithm, &hash, nil, 0, nil, 0, 0) == 0) - } - - deinit { - precondition(BCryptDestroyHash(hash) == 0) - precondition(BCryptCloseAlgorithmProvider(hAlgorithm, 0) == 0) - } - public func add(bytes: D) { - precondition(result == nil, "tried to add additional context to a finalized HashContext") - var byteArray = Array(bytes) - byteArray.withUnsafeMutableBufferPointer { buffer in - precondition(BCryptHashData(hash, buffer.baseAddress, numericCast(buffer.count), 0) == 0) + deinit { + precondition(BCryptDestroyHash(hash) == 0) + precondition(BCryptCloseAlgorithmProvider(hAlgorithm, 0) == 0) } - } - public var signature: ByteString { - guard let result = self.result else { - let digest = withUnsafeTemporaryAllocation(of: UInt8.self, capacity: digestLength) { - precondition(BCryptFinishHash(hash, $0.baseAddress, numericCast($0.count), 0) == 0) - return Array($0) + public func add(bytes: D) { + precondition(result == nil, "tried to add additional context to a finalized HashContext") + var byteArray = Array(bytes) + byteArray.withUnsafeMutableBufferPointer { buffer in + precondition(BCryptHashData(hash, buffer.baseAddress, numericCast(buffer.count), 0) == 0) } - let byteCount = digestLength + } + + public var signature: ByteString { + guard let result = self.result else { + let digest = withUnsafeTemporaryAllocation(of: UInt8.self, capacity: digestLength) { + precondition(BCryptFinishHash(hash, $0.baseAddress, numericCast($0.count), 0) == 0) + return Array($0) + } + let byteCount = digestLength - var result = [UInt8](repeating: 0, count: Int(byteCount) * 2) + var result = [UInt8](repeating: 0, count: Int(byteCount) * 2) - digest.withUnsafeBytes { ptr in - for i in 0..> 4) - result[i*2 + 1] = hexchar(value & 0x0F) + digest.withUnsafeBytes { ptr in + for i in 0..> 4) + result[i * 2 + 1] = hexchar(value & 0x0F) + } } - } - let tmp = ByteString(result) - self.result = tmp - return tmp + let tmp = ByteString(result) + self.result = tmp + return tmp + } + return result } - return result } -} -@available(*, unavailable) -extension BCryptHashContext: Sendable { } + @available(*, unavailable) + extension BCryptHashContext: Sendable {} #elseif canImport(CryptoKit) -fileprivate final class SwiftCryptoHashContext: HashContext { - @usableFromInline - internal var hash = HF() + fileprivate final class SwiftCryptoHashContext: HashContext { + @usableFromInline + internal var hash = HF() - @usableFromInline - internal var result: ByteString? + @usableFromInline + internal var result: ByteString? - public init() { - } + public init() { + } - public func add(bytes: D) { - precondition(result == nil, "tried to add additional context to a finalized HashContext") - hash.update(data: bytes) - } + public func add(bytes: D) { + precondition(result == nil, "tried to add additional context to a finalized HashContext") + hash.update(data: bytes) + } - public var signature: ByteString { - guard let result = self.result else { - let digest = hash.finalize() - let byteCount = type(of: digest).byteCount + public var signature: ByteString { + guard let result = self.result else { + let digest = hash.finalize() + let byteCount = type(of: digest).byteCount - var result = [UInt8](repeating: 0, count: Int(byteCount) * 2) + var result = [UInt8](repeating: 0, count: Int(byteCount) * 2) - digest.withUnsafeBytes { ptr in - for i in 0..> 4) - result[i*2 + 1] = hexchar(value & 0x0F) + digest.withUnsafeBytes { ptr in + for i in 0..> 4) + result[i * 2 + 1] = hexchar(value & 0x0F) + } } - } - let tmp = ByteString(result) - self.result = tmp - return tmp + let tmp = ByteString(result) + self.result = tmp + return tmp + } + return result } - return result } -} -@available(*, unavailable) -extension SwiftCryptoHashContext: Sendable { } + @available(*, unavailable) + extension SwiftCryptoHashContext: Sendable {} #endif fileprivate final class VendoredSHA256HashContext: HashContext { @@ -154,7 +154,7 @@ fileprivate final class VendoredSHA256HashContext: HashContext { /// The initial hash value. private static let initalHashValue: [UInt32] = [ - 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19 + 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19, ] /// The constants in the algorithm (K). @@ -166,7 +166,7 @@ fileprivate final class VendoredSHA256HashContext: HashContext { 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3, - 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2 + 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2, ] private var bytes: OutputByteStream @@ -175,19 +175,19 @@ fileprivate final class VendoredSHA256HashContext: HashContext { self.bytes = .init() } - func add(bytes: D) where D : DataProtocol { + func add(bytes: D) where D: DataProtocol { self.bytes.write(bytes) } var signature: ByteString { - let digest = self.hash(self.bytes.bytes) + let digest = self.hash(self.bytes.bytes) var result = [UInt8](repeating: 0, count: Int(digest.count) * 2) digest.bytes.withUnsafeBufferPointer { ptr in for i in 0..> 4) - result[i*2 + 1] = hexchar(value & 0x0F) + result[i * 2 + 0] = hexchar(value >> 4) + result[i * 2 + 1] = hexchar(value & 0x0F) } } @@ -234,14 +234,14 @@ fileprivate final class VendoredSHA256HashContext: HashContext { case 0...15: let index = block.startIndex.advanced(by: t * 4) // Put 4 bytes in each message. - W[t] = UInt32(block[index + 0]) << 24 + W[t] = UInt32(block[index + 0]) << 24 W[t] |= UInt32(block[index + 1]) << 16 W[t] |= UInt32(block[index + 2]) << 8 W[t] |= UInt32(block[index + 3]) default: - let σ1 = W[t-2].rotateRight(by: 17) ^ W[t-2].rotateRight(by: 19) ^ (W[t-2] >> 10) - let σ0 = W[t-15].rotateRight(by: 7) ^ W[t-15].rotateRight(by: 18) ^ (W[t-15] >> 3) - W[t] = σ1 &+ W[t-7] &+ σ0 &+ W[t-16] + let σ1 = W[t - 2].rotateRight(by: 17) ^ W[t - 2].rotateRight(by: 19) ^ (W[t - 2] >> 10) + let σ0 = W[t - 15].rotateRight(by: 7) ^ W[t - 15].rotateRight(by: 18) ^ (W[t - 15] >> 3) + W[t] = σ1 &+ W[t - 7] &+ σ0 &+ W[t - 16] } } @@ -367,29 +367,29 @@ public class DelegatedHashContext: HashContext { public final class InsecureHashContext: DelegatedHashContext { public init() { #if os(Windows) - super.init(impl: BCryptHashContext(algorithm: "MD5", digestLength: 16)) + super.init(impl: BCryptHashContext(algorithm: "MD5", digestLength: 16)) #elseif canImport(CryptoKit) - super.init(impl: SwiftCryptoHashContext()) + super.init(impl: SwiftCryptoHashContext()) #else - super.init(impl: VendoredSHA256HashContext()) + super.init(impl: VendoredSHA256HashContext()) #endif } } @available(*, unavailable) -extension InsecureHashContext: Sendable { } +extension InsecureHashContext: Sendable {} public final class SHA256Context: DelegatedHashContext { public init() { #if os(Windows) - super.init(impl: BCryptHashContext(algorithm: "SHA256", digestLength: 32)) + super.init(impl: BCryptHashContext(algorithm: "SHA256", digestLength: 32)) #elseif canImport(CryptoKit) - super.init(impl: SwiftCryptoHashContext()) + super.init(impl: SwiftCryptoHashContext()) #else - super.init(impl: VendoredSHA256HashContext()) + super.init(impl: VendoredSHA256HashContext()) #endif } } @available(*, unavailable) -extension SHA256Context: Sendable { } +extension SHA256Context: Sendable {} diff --git a/Sources/SWBUtil/Headermap.swift b/Sources/SWBUtil/Headermap.swift index 87c09c70..0ed5c819 100644 --- a/Sources/SWBUtil/Headermap.swift +++ b/Sources/SWBUtil/Headermap.swift @@ -147,7 +147,7 @@ public struct Headermap: Sendable { self.numEntries = Int(header.numEntries) let numBuckets = Int(header.numBuckets) - self.buckets = bytes.withUnsafeBufferPointer{ + self.buckets = bytes.withUnsafeBufferPointer { let bucketsStart = $0.baseAddress! + MemoryLayout
.size return bucketsStart.withMemoryRebound(to: Bucket.self, capacity: numBuckets) { return [Bucket](UnsafeBufferPointer(start: $0, count: numBuckets)) @@ -170,7 +170,7 @@ public struct Headermap: Sendable { // Validate each of the buckets. let stringTableSize = Int32(stringTable.count) for bucket in self.buckets { - guard bucket.keyIndex < stringTableSize && bucket.prefixIndex < stringTableSize && bucket.suffixIndex < stringTableSize else { + guard bucket.keyIndex < stringTableSize && bucket.prefixIndex < stringTableSize && bucket.suffixIndex < stringTableSize else { throw Error.invalidHeadermap("invalid header (invalid bucket index)") } } @@ -190,7 +190,7 @@ public struct Headermap: Sendable { // Insert the new entry bucket. let key = [UInt8](key.str.utf8) let keyIndex = getStringIndex(key) - let (dirname,basename) = value.split() + let (dirname, basename) = value.split() let prefixIndex: Int32, suffixIndex: Int32 if dirname.isEmpty { prefixIndex = 0 @@ -199,7 +199,7 @@ public struct Headermap: Sendable { prefixIndex = getStringIndex([UInt8](dirname.str.utf8) + [Path.pathSeparatorUTF8]) suffixIndex = getStringIndex([UInt8](basename.utf8)) } - insertItem(key[0 ..< key.endIndex], keyIndex: keyIndex, prefixIndex: prefixIndex, suffixIndex: suffixIndex, replace: replace) + insertItem(key[0.., keyIndex: Int32, prefixIndex: Int32, suffixIndex: Int32, replace: Bool = true) { assert(buckets.count.isPowerOfTwo()) let hash = Headermap.hashBytes(key) - for i in 0 ..< buckets.count { + for i in 0..: _Heavy fileprivate final class Entry: Sendable { /// Empty helper type to prove exclusive access to `accessTime` without storing a mutex for each instance. - struct Witness: ~Copyable { } + struct Witness: ~Copyable {} /// The actual value. let value: Value @@ -224,8 +224,7 @@ public final class HeavyCache: _Heavy // Prune one item at a time. This is not efficient for pruning large numbers of items, but that is not the intended use case currently. // // We take some care to make sure we drop keys already evicted by the underlying cache before anything else. - whileLoop: - while _keys.count > max { + whileLoop: while _keys.count > max { // Prune the oldest entry. var oldest: (key: Key, entry: Entry)? = nil for key in _keys { diff --git a/Sources/SWBUtil/IO.swift b/Sources/SWBUtil/IO.swift index 07420339..f79d03aa 100644 --- a/Sources/SWBUtil/IO.swift +++ b/Sources/SWBUtil/IO.swift @@ -13,9 +13,9 @@ import Foundation #if canImport(System) -public import System + public import System #else -public import SystemPackage + public import SystemPackage #endif public struct IOPipe: Sendable { @@ -32,9 +32,9 @@ public struct IOPipe: Sendable { extension FileDescriptor { public func setBinaryMode() throws { #if os(Windows) - if _setmode(rawValue, _O_BINARY) == -1 { - throw Errno(rawValue: errno) - } + if _setmode(rawValue, _O_BINARY) == -1 { + throw Errno(rawValue: errno) + } #endif } } diff --git a/Sources/SWBUtil/IndexStore.swift b/Sources/SWBUtil/IndexStore.swift index 8b4515fc..b4938653 100644 --- a/Sources/SWBUtil/IndexStore.swift +++ b/Sources/SWBUtil/IndexStore.swift @@ -89,7 +89,7 @@ private final class IndexStoreImpl { for objectFile in objectFiles { // Get the records of this object file. - guard let unitReader = try? self.api.call ({ self.api.fn.unit_reader_create(store, unitName(object: objectFile), &$0) }) else { + guard let unitReader = try? self.api.call({ self.api.fn.unit_reader_create(store, unitName(object: objectFile), &$0) }) else { continue } let records = try getRecords(unitReader: unitReader) @@ -113,7 +113,7 @@ private final class IndexStoreImpl { if let parentClassName = inheritance[moduleName]?[className] { let parentMethods = flatten(moduleName: moduleName, className: parentClassName) - allMethods.merge(parentMethods, uniquingKeysWith: { (lhs, _) in lhs }) + allMethods.merge(parentMethods, uniquingKeysWith: { (lhs, _) in lhs }) } for method in testMethods[moduleName]?[className] ?? [] { @@ -136,11 +136,10 @@ private final class IndexStoreImpl { return testCaseClasses } - @available(*, deprecated, message: "use listTests(in:) instead") public func listTests(inObjectFile object: Path) throws -> [TestCaseClass] { // Get the records of this object file. - let unitReader = try api.call{ self.api.fn.unit_reader_create(store, unitName(object: object), &$0) } + let unitReader = try api.call { self.api.fn.unit_reader_create(store, unitName(object: object), &$0) } let records = try getRecords(unitReader: unitReader) // Get the test classes. @@ -178,15 +177,15 @@ private final class IndexStoreImpl { return testCaseClasses } - private func getTestsInfo(record: String) throws -> (inheritance: [String: String], testMethods: [String: [(name: String, async: Bool)]] ) { - let recordReader = try api.call{ self.api.fn.record_reader_create(store, record, &$0) } + private func getTestsInfo(record: String) throws -> (inheritance: [String: String], testMethods: [String: [(name: String, async: Bool)]]) { + let recordReader = try api.call { self.api.fn.record_reader_create(store, record, &$0) } // scan for inheritance let inheritanceStoreRef = StoreRef([String: String](), api: self.api) let inheritancePointer = unsafeBitCast(Unmanaged.passUnretained(inheritanceStoreRef), to: UnsafeMutableRawPointer.self) - _ = self.api.fn.record_reader_occurrences_apply_f(recordReader, inheritancePointer) { inheritancePointer , occ -> Bool in + _ = self.api.fn.record_reader_occurrences_apply_f(recordReader, inheritancePointer) { inheritancePointer, occ -> Bool in let inheritanceStoreRef = Unmanaged>.fromOpaque(inheritancePointer!).takeUnretainedValue() let fn = inheritanceStoreRef.api.fn @@ -197,7 +196,7 @@ private final class IndexStoreImpl { if symbolProperties & UInt64(INDEXSTORE_SYMBOL_PROPERTY_UNITTEST.rawValue) == 0 { return true } - if fn.symbol_get_kind(sym) != INDEXSTORE_SYMBOL_KIND_CLASS{ + if fn.symbol_get_kind(sym) != INDEXSTORE_SYMBOL_KIND_CLASS { return true } @@ -232,7 +231,7 @@ private final class IndexStoreImpl { let testMethodsStoreRef = StoreRef([String: [(name: String, async: Bool)]](), api: api) let testMethodsPointer = unsafeBitCast(Unmanaged.passUnretained(testMethodsStoreRef), to: UnsafeMutableRawPointer.self) - _ = self.api.fn.record_reader_occurrences_apply_f(recordReader, testMethodsPointer) { testMethodsPointer , occ -> Bool in + _ = self.api.fn.record_reader_occurrences_apply_f(recordReader, testMethodsPointer) { testMethodsPointer, occ -> Bool in let testMethodsStoreRef = Unmanaged>.fromOpaque(testMethodsPointer!).takeUnretainedValue() let fn = testMethodsStoreRef.api.fn @@ -285,7 +284,7 @@ private final class IndexStoreImpl { let builder = StoreRef([String](), api: api) let ctx = unsafeBitCast(Unmanaged.passUnretained(builder), to: UnsafeMutableRawPointer.self) - _ = self.api.fn.unit_reader_dependencies_apply_f(unitReader, ctx) { ctx , unit -> Bool in + _ = self.api.fn.unit_reader_dependencies_apply_f(unitReader, ctx) { ctx, unit -> Bool in let store = Unmanaged>.fromOpaque(ctx!).takeUnretainedValue() let fn = store.api.fn if fn.unit_dependency_get_kind(unit) == INDEXSTORE_UNIT_DEPENDENCY_RECORD { @@ -356,22 +355,22 @@ private final class IndexStoreAPIImpl { var api = swiftbuild_indexstore_functions_t() api.store_create = Library.lookup(dylib, "indexstore_store_create") - api.store_get_unit_name_from_output_path = Library.lookup(dylib, "indexstore_store_get_unit_name_from_output_path") - api.unit_reader_create = Library.lookup(dylib, "indexstore_unit_reader_create") - api.error_get_description = Library.lookup(dylib, "indexstore_error_get_description") - api.unit_reader_dependencies_apply_f = Library.lookup(dylib, "indexstore_unit_reader_dependencies_apply_f") - api.unit_reader_get_module_name = Library.lookup(dylib, "indexstore_unit_reader_get_module_name") - api.unit_dependency_get_kind = Library.lookup(dylib, "indexstore_unit_dependency_get_kind") - api.unit_dependency_get_name = Library.lookup(dylib, "indexstore_unit_dependency_get_name") - api.record_reader_create = Library.lookup(dylib, "indexstore_record_reader_create") - api.symbol_get_name = Library.lookup(dylib, "indexstore_symbol_get_name") - api.symbol_get_properties = Library.lookup(dylib, "indexstore_symbol_get_properties") - api.symbol_get_kind = Library.lookup(dylib, "indexstore_symbol_get_kind") - api.record_reader_occurrences_apply_f = Library.lookup(dylib, "indexstore_record_reader_occurrences_apply_f") - api.occurrence_get_symbol = Library.lookup(dylib, "indexstore_occurrence_get_symbol") - api.occurrence_relations_apply_f = Library.lookup(dylib, "indexstore_occurrence_relations_apply_f") - api.symbol_relation_get_symbol = Library.lookup(dylib, "indexstore_symbol_relation_get_symbol") - api.symbol_relation_get_roles = Library.lookup(dylib, "indexstore_symbol_relation_get_roles") + api.store_get_unit_name_from_output_path = Library.lookup(dylib, "indexstore_store_get_unit_name_from_output_path") + api.unit_reader_create = Library.lookup(dylib, "indexstore_unit_reader_create") + api.error_get_description = Library.lookup(dylib, "indexstore_error_get_description") + api.unit_reader_dependencies_apply_f = Library.lookup(dylib, "indexstore_unit_reader_dependencies_apply_f") + api.unit_reader_get_module_name = Library.lookup(dylib, "indexstore_unit_reader_get_module_name") + api.unit_dependency_get_kind = Library.lookup(dylib, "indexstore_unit_dependency_get_kind") + api.unit_dependency_get_name = Library.lookup(dylib, "indexstore_unit_dependency_get_name") + api.record_reader_create = Library.lookup(dylib, "indexstore_record_reader_create") + api.symbol_get_name = Library.lookup(dylib, "indexstore_symbol_get_name") + api.symbol_get_properties = Library.lookup(dylib, "indexstore_symbol_get_properties") + api.symbol_get_kind = Library.lookup(dylib, "indexstore_symbol_get_kind") + api.record_reader_occurrences_apply_f = Library.lookup(dylib, "indexstore_record_reader_occurrences_apply_f") + api.occurrence_get_symbol = Library.lookup(dylib, "indexstore_occurrence_get_symbol") + api.occurrence_relations_apply_f = Library.lookup(dylib, "indexstore_occurrence_relations_apply_f") + api.symbol_relation_get_symbol = Library.lookup(dylib, "indexstore_symbol_relation_get_symbol") + api.symbol_relation_get_roles = Library.lookup(dylib, "indexstore_symbol_relation_get_roles") self.fn = api } diff --git a/Sources/SWBUtil/InterningArena.swift b/Sources/SWBUtil/InterningArena.swift index 11ca1435..7a7b1077 100644 --- a/Sources/SWBUtil/InterningArena.swift +++ b/Sources/SWBUtil/InterningArena.swift @@ -54,7 +54,7 @@ public final class InterningArena { } @available(*, unavailable) -extension InterningArena: Sendable { } +extension InterningArena: Sendable {} public final class FrozenInterningArena: Sendable { private let items: [T] diff --git a/Sources/SWBUtil/JSONEncoder.swift b/Sources/SWBUtil/JSONEncoder.swift index ba58273d..f763d699 100644 --- a/Sources/SWBUtil/JSONEncoder.swift +++ b/Sources/SWBUtil/JSONEncoder.swift @@ -27,7 +27,7 @@ extension JSONEncoder { } extension JSONDecoder { - public func decode(_ type: T.Type, from path: Path, fs: any FSProxy) throws -> T where T : Decodable { + public func decode(_ type: T.Type, from path: Path, fs: any FSProxy) throws -> T where T: Decodable { let data = try fs.read(path) do { return try decode(type, from: Data(data)) diff --git a/Sources/SWBUtil/LazyCache.swift b/Sources/SWBUtil/LazyCache.swift index b9a64799..ff0c9c22 100644 --- a/Sources/SWBUtil/LazyCache.swift +++ b/Sources/SWBUtil/LazyCache.swift @@ -15,36 +15,36 @@ import Synchronization // Workaround compiler crash: rdar://138854389 (Compiler crashes when compiling code using a Mutex with an Optional) #if os(Windows) -private struct Storage { - let _value: Any? + private struct Storage { + let _value: Any? - init(value: T? = nil) { _value = value } + init(value: T? = nil) { _value = value } - var value: T? { - _value as! T? - } + var value: T? { + _value as! T? + } - static var none: Storage { - .init() - } + static var none: Storage { + .init() + } - static func some(_ value: T) -> Storage { - .init(value: value) + static func some(_ value: T) -> Storage { + .init(value: value) + } } -} -private typealias LazyStorage = Storage + private typealias LazyStorage = Storage #else -private typealias LazyStorage = Optional -fileprivate extension Optional { - var value: Wrapped? { - switch self { - case let .some(wrapped): - return wrapped - case .none: - return nil + private typealias LazyStorage = Optional + fileprivate extension Optional { + var value: Wrapped? { + switch self { + case let .some(wrapped): + return wrapped + case .none: + return nil + } } } -} #endif /// Wrapper for thread-safe lazily computed values. diff --git a/Sources/SWBUtil/Library.swift b/Sources/SWBUtil/Library.swift index 3412bbb1..0afd2cc0 100644 --- a/Sources/SWBUtil/Library.swift +++ b/Sources/SWBUtil/Library.swift @@ -14,9 +14,9 @@ import SWBCLibc public import SWBLibc #if canImport(System) -import System + import System #else -import SystemPackage + import SystemPackage #endif public enum Library: Sendable { @@ -33,54 +33,54 @@ public enum Library: Sendable { @_alwaysEmitIntoClient public static func open(_ path: Path) throws -> LibraryHandle { #if os(Windows) - guard let handle = try path.withPlatformString({ p in try p.withCanonicalPathRepresentation({ LoadLibraryW($0) }) }) else { - throw LibraryOpenError(message: Win32Error(GetLastError()).description) - } - return LibraryHandle(rawValue: handle) - #else - #if canImport(Darwin) - let flags = RTLD_LAZY | RTLD_FIRST + guard let handle = try path.withPlatformString({ p in try p.withCanonicalPathRepresentation({ LoadLibraryW($0) }) }) else { + throw LibraryOpenError(message: Win32Error(GetLastError()).description) + } + return LibraryHandle(rawValue: handle) #else - let flags = RTLD_LAZY - #endif - guard let handle = path.withPlatformString({ (p: UnsafePointer) in dlopen(p, flags) }) else { - #if os(Android) - throw LibraryOpenError(message: String(cString: dlerror()!)) + #if canImport(Darwin) + let flags = RTLD_LAZY | RTLD_FIRST #else - throw LibraryOpenError(message: String(cString: dlerror())) + let flags = RTLD_LAZY #endif - } - return LibraryHandle(rawValue: handle) + guard let handle = path.withPlatformString({ (p: UnsafePointer) in dlopen(p, flags) }) else { + #if os(Android) + throw LibraryOpenError(message: String(cString: dlerror()!)) + #else + throw LibraryOpenError(message: String(cString: dlerror())) + #endif + } + return LibraryHandle(rawValue: handle) #endif } public static func lookup(_ handle: LibraryHandle, _ symbol: String) -> T? { #if os(Windows) - guard let ptr = GetProcAddress(handle.rawValue, symbol) else { return nil } + guard let ptr = GetProcAddress(handle.rawValue, symbol) else { return nil } #else - guard let ptr = dlsym(handle.rawValue, symbol) else { return nil } + guard let ptr = dlsym(handle.rawValue, symbol) else { return nil } #endif return unsafeBitCast(ptr, to: T.self) } public static func locate(_ pointer: T.Type) throws -> Path { #if os(Windows) - var handle: HMODULE? - guard GetModuleHandleExW(DWORD(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS | GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT), unsafeBitCast(pointer, to: LPCWSTR?.self), &handle) else { - throw SymbolLookupError(underlyingError: Win32Error(GetLastError())) - } - return try Path(SWB_GetModuleFileNameW(handle)) + var handle: HMODULE? + guard GetModuleHandleExW(DWORD(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS | GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT), unsafeBitCast(pointer, to: LPCWSTR?.self), &handle) else { + throw SymbolLookupError(underlyingError: Win32Error(GetLastError())) + } + return try Path(SWB_GetModuleFileNameW(handle)) #else - var info = Dl_info() - #if os(Android) - dladdr(unsafeBitCast(pointer, to: UnsafeMutableRawPointer.self), &info) - #else - dladdr(unsafeBitCast(pointer, to: UnsafeMutableRawPointer?.self), &info) - #endif - guard let dli_fname = info.dli_fname else { - throw SymbolLookupError(underlyingError: nil) - } - return Path(platformString: UnsafeMutablePointer(mutating: dli_fname)) + var info = Dl_info() + #if os(Android) + dladdr(unsafeBitCast(pointer, to: UnsafeMutableRawPointer.self), &info) + #else + dladdr(unsafeBitCast(pointer, to: UnsafeMutableRawPointer?.self), &info) + #endif + guard let dli_fname = info.dli_fname else { + throw SymbolLookupError(underlyingError: nil) + } + return Path(platformString: UnsafeMutablePointer(mutating: dli_fname)) #endif } } @@ -118,9 +118,9 @@ public struct SymbolLookupError: Error, CustomStringConvertible, Sendable { // Library handles just store an opaque reference to the dlopen/LoadLibrary-returned pointer, and so are Sendable in practice based on how they are used. public struct LibraryHandle: @unchecked Sendable { #if os(Windows) - @usableFromInline typealias PlatformHandle = HMODULE + @usableFromInline typealias PlatformHandle = HMODULE #else - @usableFromInline typealias PlatformHandle = UnsafeMutableRawPointer + @usableFromInline typealias PlatformHandle = UnsafeMutableRawPointer #endif fileprivate let rawValue: PlatformHandle diff --git a/Sources/SWBUtil/LineReader.swift b/Sources/SWBUtil/LineReader.swift index e35d2652..4f808df3 100644 --- a/Sources/SWBUtil/LineReader.swift +++ b/Sources/SWBUtil/LineReader.swift @@ -13,9 +13,9 @@ public import Foundation #if canImport(System) -import System + import System #else -import SystemPackage + import SystemPackage #endif public final class LineReader { @@ -69,4 +69,4 @@ public final class LineReader { } @available(*, unavailable) -extension LineReader: Sendable { } +extension LineReader: Sendable {} diff --git a/Sources/SWBUtil/Lock.swift b/Sources/SWBUtil/Lock.swift index b45625c6..5f06ccf7 100644 --- a/Sources/SWBUtil/Lock.swift +++ b/Sources/SWBUtil/Lock.swift @@ -11,79 +11,79 @@ //===----------------------------------------------------------------------===// #if canImport(os) -public import os + public import os #elseif os(Windows) -public import WinSDK + public import WinSDK #else -public import SWBLibc + public import SWBLibc #endif // FIXME: Replace the contents of this file with the Swift standard library's Mutex type once it's available everywhere we deploy. /// A more efficient lock than a DispatchQueue (esp. under contention). #if canImport(os) -public typealias Lock = OSAllocatedUnfairLock + public typealias Lock = OSAllocatedUnfairLock #else -public final class Lock: @unchecked Sendable { - #if os(Windows) - @usableFromInline - let mutex: UnsafeMutablePointer = UnsafeMutablePointer.allocate(capacity: 1) - #elseif os(FreeBSD) || os(OpenBSD) - @usableFromInline - let mutex: UnsafeMutablePointer = UnsafeMutablePointer.allocate(capacity: 1) - #else - @usableFromInline - let mutex: UnsafeMutablePointer = UnsafeMutablePointer.allocate(capacity: 1) - #endif - - public init() { + public final class Lock: @unchecked Sendable { #if os(Windows) - InitializeSRWLock(self.mutex) + @usableFromInline + let mutex: UnsafeMutablePointer = UnsafeMutablePointer.allocate(capacity: 1) + #elseif os(FreeBSD) || os(OpenBSD) + @usableFromInline + let mutex: UnsafeMutablePointer = UnsafeMutablePointer.allocate(capacity: 1) #else - let err = pthread_mutex_init(self.mutex, nil) - precondition(err == 0) + @usableFromInline + let mutex: UnsafeMutablePointer = UnsafeMutablePointer.allocate(capacity: 1) #endif - } - deinit { - #if os(Windows) - // SRWLOCK does not need to be freed - #else - let err = pthread_mutex_destroy(self.mutex) - precondition(err == 0) - #endif - mutex.deallocate() - } + public init() { + #if os(Windows) + InitializeSRWLock(self.mutex) + #else + let err = pthread_mutex_init(self.mutex, nil) + precondition(err == 0) + #endif + } - @usableFromInline - func lock() { - #if os(Windows) - AcquireSRWLockExclusive(self.mutex) - #else - let err = pthread_mutex_lock(self.mutex) - precondition(err == 0) - #endif - } + deinit { + #if os(Windows) + // SRWLOCK does not need to be freed + #else + let err = pthread_mutex_destroy(self.mutex) + precondition(err == 0) + #endif + mutex.deallocate() + } - @usableFromInline - func unlock() { - #if os(Windows) - ReleaseSRWLockExclusive(self.mutex) - #else - let err = pthread_mutex_unlock(self.mutex) - precondition(err == 0) - #endif - } + @usableFromInline + func lock() { + #if os(Windows) + AcquireSRWLockExclusive(self.mutex) + #else + let err = pthread_mutex_lock(self.mutex) + precondition(err == 0) + #endif + } - @inlinable - public func withLock(_ body: () throws -> T) rethrows -> T { - self.lock() - defer { - self.unlock() + @usableFromInline + func unlock() { + #if os(Windows) + ReleaseSRWLockExclusive(self.mutex) + #else + let err = pthread_mutex_unlock(self.mutex) + precondition(err == 0) + #endif + } + + @inlinable + public func withLock(_ body: () throws -> T) rethrows -> T { + self.lock() + defer { + self.unlock() + } + return try body() } - return try body() } -} #endif /// Small wrapper to provide only locked access to its value. @@ -122,15 +122,15 @@ extension LockedValue where Value: Sendable { } #if canImport(Darwin) -@available(macOS, deprecated: 15.0, renamed: "Synchronization.Mutex") -@available(iOS, deprecated: 18.0, renamed: "Synchronization.Mutex") -@available(tvOS, deprecated: 18.0, renamed: "Synchronization.Mutex") -@available(watchOS, deprecated: 11.0, renamed: "Synchronization.Mutex") -@available(visionOS, deprecated: 2.0, renamed: "Synchronization.Mutex") -public typealias SWBMutex = LockedValue + @available(macOS, deprecated: 15.0, renamed: "Synchronization.Mutex") + @available(iOS, deprecated: 18.0, renamed: "Synchronization.Mutex") + @available(tvOS, deprecated: 18.0, renamed: "Synchronization.Mutex") + @available(watchOS, deprecated: 11.0, renamed: "Synchronization.Mutex") + @available(visionOS, deprecated: 2.0, renamed: "Synchronization.Mutex") + public typealias SWBMutex = LockedValue #else -public import Synchronization -public typealias SWBMutex = Mutex + public import Synchronization + public typealias SWBMutex = Mutex #endif extension SWBMutex where Value: ~Copyable, Value == Void { diff --git a/Sources/SWBUtil/MachO.swift b/Sources/SWBUtil/MachO.swift index 275394f8..51651468 100644 --- a/Sources/SWBUtil/MachO.swift +++ b/Sources/SWBUtil/MachO.swift @@ -13,18 +13,18 @@ import SWBLibc #if canImport(Darwin) -import Darwin -public import MachO + import Darwin + public import MachO #endif #if canImport(Darwin.ar) -import Darwin.ar + import Darwin.ar #endif #if canImport(System) -import System + import System #else -import SystemPackage + import SystemPackage #endif public import struct Foundation.CharacterSet @@ -360,7 +360,7 @@ public final class BinaryReader: BinaryReaderView { } @available(*, unavailable) -extension BinaryReader: Sendable { } +extension BinaryReader: Sendable {} extension ByteString: BinaryData { @@ -371,11 +371,14 @@ extension ByteString: BinaryData { } var value = T() - try withUnsafeMutableBytes(of: &value, { (to: UnsafeMutableRawBufferPointer) throws -> Void in - self.bytes.withUnsafeBytes { from in - let _ = memcpy(to.baseAddress!, from.baseAddress! + offset, MemoryLayout.size) + try withUnsafeMutableBytes( + of: &value, + { (to: UnsafeMutableRawBufferPointer) throws -> Void in + self.bytes.withUnsafeBytes { from in + let _ = memcpy(to.baseAddress!, from.baseAddress! + offset, MemoryLayout.size) + } } - }) + ) return value } @@ -410,22 +413,21 @@ extension FileHandle: BinaryData { public var size: Int { #if os(Windows) - var info = LARGE_INTEGER() - guard GetFileSizeEx(_handle, &info) else { - return Int.max - } - return Int(info.QuadPart) + var info = LARGE_INTEGER() + guard GetFileSizeEx(_handle, &info) else { + return Int.max + } + return Int(info.QuadPart) #else - var info = stat() - guard fstat(self.fileDescriptor, &info) != -1 else { - return Int.max - } - return Int(info.st_size) + var info = stat() + guard fstat(self.fileDescriptor, &info) != -1 else { + return Int.max + } + return Int(info.st_size) #endif } } - /// Expresses an error when reading a binary file. public enum BinaryReaderError: Error, CustomStringConvertible { /// Expresses an error which indicates the file does not appear to be a Mach-O or static archive at all, based on its magic. @@ -461,33 +463,32 @@ public enum BinaryReaderError: Error, CustomStringConvertible { } #if !canImport(Darwin) -fileprivate let FAT_MAGIC: UInt32 = 0xcafebabe -fileprivate let FAT_CIGAM: UInt32 = 0xbebafeca -fileprivate let FAT_MAGIC_64: UInt32 = 0xcafebabf -fileprivate let FAT_CIGAM_64: UInt32 = 0xbfbafeca -fileprivate let MH_MAGIC: UInt32 = 0xfeedface -fileprivate let MH_CIGAM: UInt32 = 0xcefaedfe -fileprivate let MH_MAGIC_64: UInt32 = 0xfeedfacf -fileprivate let MH_CIGAM_64: UInt32 = 0xcffaedfe -fileprivate let MH_OBJECT: UInt32 = 0x1 -fileprivate let MH_EXECUTE: UInt32 = 0x2 -fileprivate let MH_DYLIB: UInt32 = 0x6 -fileprivate let MH_BUNDLE: UInt32 = 0x8 -fileprivate let SYMDEF = "__.SYMDEF" -fileprivate let SYMDEF_SORTED = "__.SYMDEF SORTED" -fileprivate let SYMDEF_64 = "__.SYMDEF_64" -fileprivate let SYMDEF_64_SORTED = "__.SYMDEF_64 SORTED" -public typealias cpu_type_t = Int32 -public typealias cpu_subtype_t = Int32 -public typealias vm_prot_t = Int32 -public let CPU_TYPE_ANY: Int32 = -1 + fileprivate let FAT_MAGIC: UInt32 = 0xcafebabe + fileprivate let FAT_CIGAM: UInt32 = 0xbebafeca + fileprivate let FAT_MAGIC_64: UInt32 = 0xcafebabf + fileprivate let FAT_CIGAM_64: UInt32 = 0xbfbafeca + fileprivate let MH_MAGIC: UInt32 = 0xfeedface + fileprivate let MH_CIGAM: UInt32 = 0xcefaedfe + fileprivate let MH_MAGIC_64: UInt32 = 0xfeedfacf + fileprivate let MH_CIGAM_64: UInt32 = 0xcffaedfe + fileprivate let MH_OBJECT: UInt32 = 0x1 + fileprivate let MH_EXECUTE: UInt32 = 0x2 + fileprivate let MH_DYLIB: UInt32 = 0x6 + fileprivate let MH_BUNDLE: UInt32 = 0x8 + fileprivate let SYMDEF = "__.SYMDEF" + fileprivate let SYMDEF_SORTED = "__.SYMDEF SORTED" + fileprivate let SYMDEF_64 = "__.SYMDEF_64" + fileprivate let SYMDEF_64_SORTED = "__.SYMDEF_64 SORTED" + public typealias cpu_type_t = Int32 + public typealias cpu_subtype_t = Int32 + public typealias vm_prot_t = Int32 + public let CPU_TYPE_ANY: Int32 = -1 #endif // MARK: - Mach-O String Parsing fileprivate let machOStringEncoding = String.Encoding.utf8 - /// Some MachO strings are encoded in fixed size char arrays, e.g. `struct S { char[16] str; }`. These may contain a null terminator or use the full length of the array. fileprivate func parseMachOFixedSizeString(_ bytes: (Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8)) throws -> String { var mutableBytes = bytes @@ -514,43 +515,43 @@ fileprivate func parseMachONullTerminatedString(_ ptr: UnsafeRawBufferPointer) t // MARK: - Mach-O wrappers -extension UInt8: BinaryDataType{} -extension UInt32: BinaryDataType{} +extension UInt8: BinaryDataType {} +extension UInt32: BinaryDataType {} #if canImport(Darwin) -extension fat_header: BinaryDataType{} -extension mach_header: BinaryDataType{} -extension mach_header_64: BinaryDataType{} -extension fat_arch: BinaryDataType{} -extension fat_arch_64: BinaryDataType{} -extension load_command: BinaryDataType{} -extension dylib_command: BinaryDataType{} -extension uuid_command: BinaryDataType{} -extension segment_command: BinaryDataType{} -extension segment_command_64: BinaryDataType{} -extension section: BinaryDataType{} -extension section_64: BinaryDataType{} -extension build_version_command: BinaryDataType{} -extension version_min_command: BinaryDataType{} -extension rpath_command: BinaryDataType{} - -#if DONT_HAVE_LC_ATOM_INFO || SWIFT_PACKAGE -fileprivate let LC_ATOM_INFO = (0x36) -#endif + extension fat_header: BinaryDataType {} + extension mach_header: BinaryDataType {} + extension mach_header_64: BinaryDataType {} + extension fat_arch: BinaryDataType {} + extension fat_arch_64: BinaryDataType {} + extension load_command: BinaryDataType {} + extension dylib_command: BinaryDataType {} + extension uuid_command: BinaryDataType {} + extension segment_command: BinaryDataType {} + extension segment_command_64: BinaryDataType {} + extension section: BinaryDataType {} + extension section_64: BinaryDataType {} + extension build_version_command: BinaryDataType {} + extension version_min_command: BinaryDataType {} + extension rpath_command: BinaryDataType {} + + #if DONT_HAVE_LC_ATOM_INFO || SWIFT_PACKAGE + fileprivate let LC_ATOM_INFO = (0x36) + #endif #endif #if canImport(Darwin.ar) -extension ar_hdr: BinaryDataType{} + extension ar_hdr: BinaryDataType {} #endif // MARK: - Mach-O byte swapping helpers fileprivate protocol ByteSwappable { - var byteSwapped: Self {get} + var byteSwapped: Self { get } } extension Int16: ByteSwappable {} @@ -591,35 +592,35 @@ public protocol MachOFatHeader { #if canImport(Darwin) -extension fat_arch: MachOFatHeader { - public func offset(byteSwappedIfNeeded swap: Bool) -> UInt64 { - return UInt64(offset.byteSwappedIfNeeded(swap)) - } + extension fat_arch: MachOFatHeader { + public func offset(byteSwappedIfNeeded swap: Bool) -> UInt64 { + return UInt64(offset.byteSwappedIfNeeded(swap)) + } - public func size(byteSwappedIfNeeded swap: Bool) -> UInt64 { - return UInt64(size.byteSwappedIfNeeded(swap)) - } + public func size(byteSwappedIfNeeded swap: Bool) -> UInt64 { + return UInt64(size.byteSwappedIfNeeded(swap)) + } - /// Returns the size, in bytes, of the `fat_arch` struct. - public var structSize: Int { - return MemoryLayout.size + /// Returns the size, in bytes, of the `fat_arch` struct. + public var structSize: Int { + return MemoryLayout.size + } } -} -extension fat_arch_64: MachOFatHeader { - public func offset(byteSwappedIfNeeded swap: Bool) -> UInt64 { - return offset.byteSwappedIfNeeded(swap) - } + extension fat_arch_64: MachOFatHeader { + public func offset(byteSwappedIfNeeded swap: Bool) -> UInt64 { + return offset.byteSwappedIfNeeded(swap) + } - public func size(byteSwappedIfNeeded swap: Bool) -> UInt64 { - return size.byteSwappedIfNeeded(swap) - } + public func size(byteSwappedIfNeeded swap: Bool) -> UInt64 { + return size.byteSwappedIfNeeded(swap) + } - /// Returns the size, in bytes, of the `fat_arch_64` struct. - public var structSize: Int { - return MemoryLayout.size + /// Returns the size, in bytes, of the `fat_arch_64` struct. + public var structSize: Int { + return MemoryLayout.size + } } -} #endif @@ -627,7 +628,7 @@ extension fat_arch_64: MachOFatHeader { public protocol MachOHeader { // These are direct mappings of the mach_header. var magic: UInt32 { get } - var cputype: cpu_type_t {get } + var cputype: cpu_type_t { get } var cpusubtype: cpu_subtype_t { get } var filetype: UInt32 { get } var ncmds: UInt32 { get } @@ -641,39 +642,39 @@ public protocol MachOHeader { #if canImport(Darwin) -extension mach_header: MachOHeader { - /// Returns the size, in bytes, of the `mach_header` struct. - public var structSize: Int { - return MemoryLayout.size + extension mach_header: MachOHeader { + /// Returns the size, in bytes, of the `mach_header` struct. + public var structSize: Int { + return MemoryLayout.size + } } -} -extension mach_header_64: MachOHeader { - /// Returns the size, in bytes, of the `mach_header_64` struct. - public var structSize: Int { - return MemoryLayout.size + extension mach_header_64: MachOHeader { + /// Returns the size, in bytes, of the `mach_header_64` struct. + public var structSize: Int { + return MemoryLayout.size + } } -} #endif // MARK: - public protocol MachOSegmentLoadCommand { - var cmd: UInt32 {get} - var cmdsize: UInt32 {get} - var segname: (Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8) {get} - - var vmaddr64: UInt64 {get} - var vmsize64: UInt64 {get} - var fileoff64: UInt64 {get} - var filesize64: UInt64 {get} - - var maxprot: vm_prot_t {get} - var initprot: vm_prot_t {get} - var nsects: UInt32 {get} - var flags: UInt32 {get} + var cmd: UInt32 { get } + var cmdsize: UInt32 { get } + var segname: (Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8) { get } + + var vmaddr64: UInt64 { get } + var vmsize64: UInt64 { get } + var fileoff64: UInt64 { get } + var filesize64: UInt64 { get } + + var maxprot: vm_prot_t { get } + var initprot: vm_prot_t { get } + var nsects: UInt32 { get } + var flags: UInt32 { get } - var structSize: Int {get} + var structSize: Int { get } } public extension MachOSegmentLoadCommand { @@ -684,42 +685,42 @@ public extension MachOSegmentLoadCommand { #if canImport(Darwin) -extension segment_command: MachOSegmentLoadCommand { - public var vmaddr64: UInt64 { return UInt64(self.vmaddr) } - public var vmsize64: UInt64 { return UInt64(self.vmsize) } - public var fileoff64: UInt64 { return UInt64(self.fileoff) } - public var filesize64: UInt64 { return UInt64(self.filesize) } + extension segment_command: MachOSegmentLoadCommand { + public var vmaddr64: UInt64 { return UInt64(self.vmaddr) } + public var vmsize64: UInt64 { return UInt64(self.vmsize) } + public var fileoff64: UInt64 { return UInt64(self.fileoff) } + public var filesize64: UInt64 { return UInt64(self.filesize) } - public var structSize: Int { - return MemoryLayout.size + public var structSize: Int { + return MemoryLayout.size + } } -} -extension segment_command_64: MachOSegmentLoadCommand { - public var vmaddr64: UInt64 { return self.vmaddr } - public var vmsize64: UInt64 { return self.vmsize } - public var fileoff64: UInt64 { return self.fileoff } - public var filesize64: UInt64 { return self.filesize } + extension segment_command_64: MachOSegmentLoadCommand { + public var vmaddr64: UInt64 { return self.vmaddr } + public var vmsize64: UInt64 { return self.vmsize } + public var fileoff64: UInt64 { return self.fileoff } + public var filesize64: UInt64 { return self.filesize } - public var structSize: Int { - return MemoryLayout.size + public var structSize: Int { + return MemoryLayout.size + } } -} #endif // MARK: - Mach-O section information public protocol MachOSection { - var sectname: (Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8) {get} - var segname: (Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8) {get} - var addr64: UInt64 {get} - var size64: UInt64 {get} - var offset: UInt32 {get} - var align: UInt32 {get} - var reloff: UInt32 {get} - var nreloc: UInt32 {get} - var flags: UInt32 {get} + var sectname: (Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8) { get } + var segname: (Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8) { get } + var addr64: UInt64 { get } + var size64: UInt64 { get } + var offset: UInt32 { get } + var align: UInt32 { get } + var reloff: UInt32 { get } + var nreloc: UInt32 { get } + var flags: UInt32 { get } } extension MachOSection { @@ -734,25 +735,25 @@ extension MachOSection { #if canImport(Darwin) -extension section: MachOSection { - public var addr64: UInt64 { - return UInt64(self.addr) - } + extension section: MachOSection { + public var addr64: UInt64 { + return UInt64(self.addr) + } - public var size64: UInt64 { - return UInt64(self.size) + public var size64: UInt64 { + return UInt64(self.size) + } } -} -extension section_64: MachOSection { - public var addr64: UInt64 { - return self.addr - } + extension section_64: MachOSection { + public var addr64: UInt64 { + return self.addr + } - public var size64: UInt64 { - return self.size + public var size64: UInt64 { + return self.size + } } -} #endif @@ -790,8 +791,7 @@ public final class MachO { let magic2: UInt32 = try reader.peek(offset: MemoryLayout.size) if magic2 == UInt32(bigEndian: StaticArchive.ARMAG2) { self = .archive - } - else { + } else { throw BinaryReaderError.unrecognizedFileType(magicWord1: magic, magicWord2: magic2) } @@ -842,7 +842,7 @@ public final class MachO { /// Returns an array of slices representing the Mach-O file contents and the overall linkage being used. /// /// - seealso: Slice - public func slicesIncludingLinkage() throws -> (slices: [Slice], linkage: WrappedFileType) { + public func slicesIncludingLinkage() throws -> (slices: [Slice], linkage: WrappedFileType) { // Ensure that this is re-entrant and that no lasting state modification happens within this function. reader.push() defer { reader.pop() } @@ -897,14 +897,14 @@ public final class MachO { let magic: UInt32 = try reader.peek() switch magic { #if canImport(Darwin) - case MH_MAGIC, MH_CIGAM: - self = try .mach_header(reader.peek()) + case MH_MAGIC, MH_CIGAM: + self = try .mach_header(reader.peek()) - case MH_MAGIC_64, MH_CIGAM_64: - self = try .mach_header_64(reader.peek()) + case MH_MAGIC_64, MH_CIGAM_64: + self = try .mach_header_64(reader.peek()) #else - case MH_MAGIC, MH_CIGAM, MH_MAGIC_64, MH_CIGAM_64: - throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") + case MH_MAGIC, MH_CIGAM, MH_MAGIC_64, MH_CIGAM_64: + throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") #endif case UInt32(bigEndian: StaticArchive.ARMAG1): @@ -921,21 +921,20 @@ public final class MachO { } #if canImport(Darwin) - case mach_header(mach_header) - case mach_header_64(mach_header_64) + case mach_header(mach_header) + case mach_header_64(mach_header_64) #endif } - var headers: [(Slice, any MachOHeader)] { #if canImport(Darwin) - switch self.header { - case .mach_header(let x): return [(self, x)] - case .mach_header_64(let x): return [(self, x)] - } + switch self.header { + case .mach_header(let x): return [(self, x)] + case .mach_header_64(let x): return [(self, x)] + } #else - assertionFailure("not implemented") - return [] + assertionFailure("not implemented") + return [] #endif } @@ -943,11 +942,11 @@ public final class MachO { precondition(headers.count <= 1, "invalid to call arch when there are multiple headers") #if canImport(Darwin) - if let (_, header) = headers.only { - if let name = Architecture.stringValue(cputype: header.cputype, cpusubtype: header.cpusubtype) { - return name + if let (_, header) = headers.only { + if let name = Architecture.stringValue(cputype: header.cputype, cpusubtype: header.cpusubtype) { + return name + } } - } #endif return "unknown" @@ -964,49 +963,49 @@ public final class MachO { /// Return the names and linkage types of all linked libraries. public func linkedLibraries() throws -> [(pathStr: String, linkageType: LinkageType)] { #if canImport(Darwin) - return try loadCommands().compactMap { lc throws -> (String, LinkageType)? in - let cmd: dylib_command - let linkageType: LinkageType + return try loadCommands().compactMap { lc throws -> (String, LinkageType)? in + let cmd: dylib_command + let linkageType: LinkageType - switch lc.value { - case .loadDylib(let dl): - cmd = dl - linkageType = .normal + switch lc.value { + case .loadDylib(let dl): + cmd = dl + linkageType = .normal - case .lazyLoadDylib(let dl): - cmd = dl - linkageType = .lazy + case .lazyLoadDylib(let dl): + cmd = dl + linkageType = .lazy - case .loadWeakDylib(let dl): - cmd = dl - linkageType = .weak + case .loadWeakDylib(let dl): + cmd = dl + linkageType = .weak - case .upwardDylib(let dl): - cmd = dl - linkageType = .upward + case .upwardDylib(let dl): + cmd = dl + linkageType = .upward - case .reexportDylib(let dl): - cmd = dl - linkageType = .reexport + case .reexportDylib(let dl): + cmd = dl + linkageType = .reexport - default: - return nil - } + default: + return nil + } - let cmdSize = Int(cmd.cmdsize.byteSwappedIfNeeded(lc.swap)) - let nameOffset = Int(cmd.dylib.name.offset.byteSwappedIfNeeded(lc.swap)) - guard nameOffset <= cmdSize else { throw BinaryReaderError.parseError("Failed to parse dylib name: offset out of bounds") } + let cmdSize = Int(cmd.cmdsize.byteSwappedIfNeeded(lc.swap)) + let nameOffset = Int(cmd.dylib.name.offset.byteSwappedIfNeeded(lc.swap)) + guard nameOffset <= cmdSize else { throw BinaryReaderError.parseError("Failed to parse dylib name: offset out of bounds") } - let reader = BinaryReader(data: lc.reader.data, startingAt: lc.reader.offset + nameOffset) - let bytes: [UInt8] = try reader.read(count: cmdSize) + let reader = BinaryReader(data: lc.reader.data, startingAt: lc.reader.offset + nameOffset) + let bytes: [UInt8] = try reader.read(count: cmdSize) - let pathStr = try bytes.withUnsafeBytes { ptr throws -> String in - return try parseMachONullTerminatedString(ptr) + let pathStr = try bytes.withUnsafeBytes { ptr throws -> String in + return try parseMachONullTerminatedString(ptr) + } + return (pathStr, linkageType) } - return (pathStr, linkageType) - } #else - throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") + throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") #endif } @@ -1017,31 +1016,31 @@ public final class MachO { public func installName() throws -> String? { #if canImport(Darwin) - let installNames = try loadCommands().compactMap { (lc: LoadCommand) throws -> String? in - switch lc.value { - case let .idDylib(cmd): - let cmdSize = Int(cmd.cmdsize.byteSwappedIfNeeded(lc.swap)) - let nameOffset = Int(cmd.dylib.name.offset.byteSwappedIfNeeded(lc.swap)) - guard nameOffset <= cmdSize else { throw BinaryReaderError.parseError("Failed to parse dylib name: offset out of bounds") } - - let reader = BinaryReader(data: lc.reader.data, startingAt: lc.reader.offset + nameOffset) - let bytes: [UInt8] = try reader.read(count: cmdSize) - - return try bytes.withUnsafeBytes { ptr throws -> String in - return try parseMachONullTerminatedString(ptr) + let installNames = try loadCommands().compactMap { (lc: LoadCommand) throws -> String? in + switch lc.value { + case let .idDylib(cmd): + let cmdSize = Int(cmd.cmdsize.byteSwappedIfNeeded(lc.swap)) + let nameOffset = Int(cmd.dylib.name.offset.byteSwappedIfNeeded(lc.swap)) + guard nameOffset <= cmdSize else { throw BinaryReaderError.parseError("Failed to parse dylib name: offset out of bounds") } + + let reader = BinaryReader(data: lc.reader.data, startingAt: lc.reader.offset + nameOffset) + let bytes: [UInt8] = try reader.read(count: cmdSize) + + return try bytes.withUnsafeBytes { ptr throws -> String in + return try parseMachONullTerminatedString(ptr) + } + default: + return nil } - default: - return nil } - } - if installNames.count > 1 { - throw BinaryReaderError.parseError("Encountered multiple LC_ID_DYLIB load commands") - } + if installNames.count > 1 { + throw BinaryReaderError.parseError("Encountered multiple LC_ID_DYLIB load commands") + } - return installNames.only + return installNames.only #else - throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") + throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") #endif } @@ -1049,412 +1048,412 @@ public final class MachO { // A simple fix is to create FileTypes 'object' and other valid, and check when providing slice linkage information. public var linkFileType: WrappedFileType { switch (header) { -#if canImport(Darwin) - case .mach_header(let header): - return .macho(FileType(rawValue: header.filetype)) - case .mach_header_64(let header): - return .macho(FileType(rawValue: header.filetype)) -#endif + #if canImport(Darwin) + case .mach_header(let header): + return .macho(FileType(rawValue: header.filetype)) + case .mach_header_64(let header): + return .macho(FileType(rawValue: header.filetype)) + #endif } } public func uuid() throws -> UUID? { #if canImport(Darwin) - let uuids = try loadCommands().compactMap { (lc: LoadCommand) throws -> UUID? in - switch lc.value { - case .uuid(let ulc): return UUID(uuid: ulc.uuid) - default: return nil + let uuids = try loadCommands().compactMap { (lc: LoadCommand) throws -> UUID? in + switch lc.value { + case .uuid(let ulc): return UUID(uuid: ulc.uuid) + default: return nil + } } - } - guard !uuids.isEmpty else { return nil } - guard uuids.count == 1 else { throw BinaryReaderError.parseError("Encountered multiple UUID load commands") } - return uuids[0] + guard !uuids.isEmpty else { return nil } + guard uuids.count == 1 else { throw BinaryReaderError.parseError("Encountered multiple UUID load commands") } + return uuids[0] #else - throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") + throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") #endif } #if canImport(Darwin) - public struct LoadCommand { - let reader: BinaryReader - let swap: Bool - @_spi(Testing) public let base: load_command - let value: ConcreteValue + public struct LoadCommand { + let reader: BinaryReader + let swap: Bool + @_spi(Testing) public let base: load_command + let value: ConcreteValue - init(slice: Slice, reader: BinaryReader, swap: Bool) throws { - self.reader = reader - self.swap = swap + init(slice: Slice, reader: BinaryReader, swap: Bool) throws { + self.reader = reader + self.swap = swap - reader.push() + reader.push() - let base: load_command = try reader.peek() - self.base = base - self.value = try { () throws -> ConcreteValue in - switch base.cmd { - case UInt32(LC_ID_DYLIB): - return try .idDylib(reader.read()) + let base: load_command = try reader.peek() + self.base = base + self.value = try { () throws -> ConcreteValue in + switch base.cmd { + case UInt32(LC_ID_DYLIB): + return try .idDylib(reader.read()) - case UInt32(LC_LOAD_DYLIB): - return try .loadDylib(reader.read()) + case UInt32(LC_LOAD_DYLIB): + return try .loadDylib(reader.read()) - case UInt32(LC_LAZY_LOAD_DYLIB): - return try .lazyLoadDylib(reader.read()) + case UInt32(LC_LAZY_LOAD_DYLIB): + return try .lazyLoadDylib(reader.read()) - case UInt32(LC_LOAD_WEAK_DYLIB): - return try .loadWeakDylib(reader.read()) + case UInt32(LC_LOAD_WEAK_DYLIB): + return try .loadWeakDylib(reader.read()) - case UInt32(LC_LOAD_UPWARD_DYLIB): - return try .upwardDylib(reader.read()) + case UInt32(LC_LOAD_UPWARD_DYLIB): + return try .upwardDylib(reader.read()) - case UInt32(LC_REEXPORT_DYLIB): - return try .reexportDylib(reader.read()) + case UInt32(LC_REEXPORT_DYLIB): + return try .reexportDylib(reader.read()) - case UInt32(LC_UUID): - return try .uuid(reader.read()) + case UInt32(LC_UUID): + return try .uuid(reader.read()) - case UInt32(LC_SEGMENT): - return try .segment(reader.read()) + case UInt32(LC_SEGMENT): + return try .segment(reader.read()) - case UInt32(LC_SEGMENT_64): - return try .segment_64(reader.read()) + case UInt32(LC_SEGMENT_64): + return try .segment_64(reader.read()) - case UInt32(LC_BUILD_VERSION): - return try .build_version(reader.read()) + case UInt32(LC_BUILD_VERSION): + return try .build_version(reader.read()) - case UInt32(LC_VERSION_MIN_MACOSX), - UInt32(LC_VERSION_MIN_IPHONEOS), - UInt32(LC_VERSION_MIN_TVOS), - UInt32(LC_VERSION_MIN_WATCHOS): - return try .version_min(reader.read()) + case UInt32(LC_VERSION_MIN_MACOSX), + UInt32(LC_VERSION_MIN_IPHONEOS), + UInt32(LC_VERSION_MIN_TVOS), + UInt32(LC_VERSION_MIN_WATCHOS): + return try .version_min(reader.read()) - case UInt32(LC_RPATH): - return try .rpath(reader.read()) + case UInt32(LC_RPATH): + return try .rpath(reader.read()) - case UInt32(LC_ATOM_INFO): - // We are not yet parsing the contents of this command. - return .atom_info(base) + case UInt32(LC_ATOM_INFO): + // We are not yet parsing the contents of this command. + return .atom_info(base) - default: - return .other(base) - } - }() + default: + return .other(base) + } + }() - reader.pop() - } + reader.pop() + } - enum ConcreteValue { - case idDylib(dylib_command) - case loadDylib(dylib_command) - case lazyLoadDylib(dylib_command) - case loadWeakDylib(dylib_command) - case upwardDylib(dylib_command) - case reexportDylib(dylib_command) - case uuid(uuid_command) - case segment(segment_command) - case segment_64(segment_command_64) - case build_version(build_version_command) - case version_min(version_min_command) - case rpath(rpath_command) - case atom_info(load_command) - - case other(load_command) + enum ConcreteValue { + case idDylib(dylib_command) + case loadDylib(dylib_command) + case lazyLoadDylib(dylib_command) + case loadWeakDylib(dylib_command) + case upwardDylib(dylib_command) + case reexportDylib(dylib_command) + case uuid(uuid_command) + case segment(segment_command) + case segment_64(segment_command_64) + case build_version(build_version_command) + case version_min(version_min_command) + case rpath(rpath_command) + case atom_info(load_command) + + case other(load_command) + } } - } - /// The load commands of the Mach-O file, or in the case of a static - /// archive, the combined load commands of all of the contained Mach-O - /// object files. - public func loadCommands() throws -> [LoadCommand] { - return try headers.map { (slice, header) in - let swap = shouldSwap(magic: header.magic) - let ncmds = Int(header.ncmds.byteSwappedIfNeeded(swap)) - var offset = header.structSize - - return try (0 ..< ncmds).map { index throws -> LoadCommand in - let loadCommand = try LoadCommand(slice: slice, reader: BinaryReader(data: reader.data, startingAt: reader.offset + offset), swap: swap) - let size = loadCommand.base.cmdsize.byteSwappedIfNeeded(swap) - offset += Int(size) - return loadCommand - } - }.reduce([], { $0 + $1 }) - } + /// The load commands of the Mach-O file, or in the case of a static + /// archive, the combined load commands of all of the contained Mach-O + /// object files. + public func loadCommands() throws -> [LoadCommand] { + return try headers.map { (slice, header) in + let swap = shouldSwap(magic: header.magic) + let ncmds = Int(header.ncmds.byteSwappedIfNeeded(swap)) + var offset = header.structSize + + return try (0.. LoadCommand in + let loadCommand = try LoadCommand(slice: slice, reader: BinaryReader(data: reader.data, startingAt: reader.offset + offset), swap: swap) + let size = loadCommand.base.cmdsize.byteSwappedIfNeeded(swap) + offset += Int(size) + return loadCommand + } + }.reduce([], { $0 + $1 }) + } #endif public func rpaths() throws -> [String] { #if canImport(Darwin) - return try self.loadCommands().compactMap({ (lc: LoadCommand) -> String? in - guard case let .rpath(rpath) = lc.value else { - return nil - } + return try self.loadCommands().compactMap({ (lc: LoadCommand) -> String? in + guard case let .rpath(rpath) = lc.value else { + return nil + } - lc.reader.push() - defer { lc.reader.pop() } + lc.reader.push() + defer { lc.reader.pop() } - try lc.reader.seek(by: Int(rpath.path.offset)) - var data = try lc.reader.read(count: Int(rpath.cmdsize - rpath.path.offset)) + try lc.reader.seek(by: Int(rpath.path.offset)) + var data = try lc.reader.read(count: Int(rpath.cmdsize - rpath.path.offset)) - // Clean the rpath from 0-padding bytes. - while data.last == 0 { - data.removeLast() - } + // Clean the rpath from 0-padding bytes. + while data.last == 0 { + data.removeLast() + } - return String(bytes: data, encoding: .utf8) - }) + return String(bytes: data, encoding: .utf8) + }) #else - throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") + throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") #endif } #if canImport(Darwin) - public struct Segment { - let loadCommand: LoadCommand - let value: ConcreteValue + public struct Segment { + let loadCommand: LoadCommand + let value: ConcreteValue - init(loadCommand: LoadCommand, value: ConcreteValue) { - self.loadCommand = loadCommand - self.value = value - } + init(loadCommand: LoadCommand, value: ConcreteValue) { + self.loadCommand = loadCommand + self.value = value + } - enum ConcreteValue { - case segment(segment_command) - case segment_64(segment_command_64) + enum ConcreteValue { + case segment(segment_command) + case segment_64(segment_command_64) - var segment: any MachOSegmentLoadCommand { - switch self { - case .segment(let s): return s - case .segment_64(let s): return s + var segment: any MachOSegmentLoadCommand { + switch self { + case .segment(let s): return s + case .segment_64(let s): return s + } } } - } - @_spi(Testing) public enum Section: Sendable { - case section(section) - case section_64(section_64) + @_spi(Testing) public enum Section: Sendable { + case section(section) + case section_64(section_64) - var section: any MachOSection { - switch self { - case .section(let s): return s - case .section_64(let s): return s + var section: any MachOSection { + switch self { + case .section(let s): return s + case .section_64(let s): return s + } } } - } - @_spi(Testing) public func sections() throws -> [Section] { - let value = self.value - let nsects = Int(value.segment.nsects.byteSwappedIfNeeded(loadCommand.swap)) - let offset = value.segment.structSize + @_spi(Testing) public func sections() throws -> [Section] { + let value = self.value + let nsects = Int(value.segment.nsects.byteSwappedIfNeeded(loadCommand.swap)) + let offset = value.segment.structSize - let reader = BinaryReader(data: loadCommand.reader.data, startingAt: loadCommand.reader.offset + offset) + let reader = BinaryReader(data: loadCommand.reader.data, startingAt: loadCommand.reader.offset + offset) - func readSection() throws -> Section { - switch value { - case .segment: - return try .section(reader.read()) + func readSection() throws -> Section { + switch value { + case .segment: + return try .section(reader.read()) - case .segment_64: - return try .section_64(reader.read()) + case .segment_64: + return try .section_64(reader.read()) + } } - } - return try (0 ..< nsects).map { _ in - return try readSection() + return try (0.. [Segment] { - return try self.loadCommands().compactMap { (lc: LoadCommand) -> Segment? in - switch lc.value { - case .segment(let sc): - return Segment(loadCommand: lc, value: .segment(sc)) + public func segments() throws -> [Segment] { + return try self.loadCommands().compactMap { (lc: LoadCommand) -> Segment? in + switch lc.value { + case .segment(let sc): + return Segment(loadCommand: lc, value: .segment(sc)) - case .segment_64(let sc): - return Segment(loadCommand: lc, value: .segment_64(sc)) + case .segment_64(let sc): + return Segment(loadCommand: lc, value: .segment_64(sc)) - default: return nil + default: return nil + } } } - } #endif public func buildVersions() throws -> [BuildVersion] { #if canImport(Darwin) - return try loadCommands().compactMap { loadCommand in - switch loadCommand.value { - case .build_version(let cmd): - guard let platform = BuildVersion.Platform(rawValue: cmd.platform) else { - throw BinaryReaderError.parseError("Unrecognized value '\(cmd.platform)' for LC_BUILD_VERSION platform field") - } - return BuildVersion(platform: platform, minOSVersion: Version(machOVersion: cmd.minos), sdkVersion: Version(machOVersion: cmd.sdk)) - case .version_min(let cmd): - guard let (_, header) = headers.first else { throw BinaryReaderError.parseError("much call buildVersions with valid headers") } - - let platform: BuildVersion.Platform - switch Int32(cmd.cmd) { - case LC_VERSION_MIN_MACOSX: - platform = .macOS - case LC_VERSION_MIN_IPHONEOS: - platform = [CPU_TYPE_X86, CPU_TYPE_X86_64].contains(header.cputype) ? .iOSSimulator : .iOS - case LC_VERSION_MIN_TVOS: - platform = [CPU_TYPE_X86, CPU_TYPE_X86_64].contains(header.cputype) ? .tvOSSimulator : .tvOS - case LC_VERSION_MIN_WATCHOS: - platform = [CPU_TYPE_X86, CPU_TYPE_X86_64].contains(header.cputype) ? .watchOSSimulator : .watchOS + return try loadCommands().compactMap { loadCommand in + switch loadCommand.value { + case .build_version(let cmd): + guard let platform = BuildVersion.Platform(rawValue: cmd.platform) else { + throw BinaryReaderError.parseError("Unrecognized value '\(cmd.platform)' for LC_BUILD_VERSION platform field") + } + return BuildVersion(platform: platform, minOSVersion: Version(machOVersion: cmd.minos), sdkVersion: Version(machOVersion: cmd.sdk)) + case .version_min(let cmd): + guard let (_, header) = headers.first else { throw BinaryReaderError.parseError("much call buildVersions with valid headers") } + + let platform: BuildVersion.Platform + switch Int32(cmd.cmd) { + case LC_VERSION_MIN_MACOSX: + platform = .macOS + case LC_VERSION_MIN_IPHONEOS: + platform = [CPU_TYPE_X86, CPU_TYPE_X86_64].contains(header.cputype) ? .iOSSimulator : .iOS + case LC_VERSION_MIN_TVOS: + platform = [CPU_TYPE_X86, CPU_TYPE_X86_64].contains(header.cputype) ? .tvOSSimulator : .tvOS + case LC_VERSION_MIN_WATCHOS: + platform = [CPU_TYPE_X86, CPU_TYPE_X86_64].contains(header.cputype) ? .watchOSSimulator : .watchOS + default: + throw BinaryReaderError.parseError("Unrecognized LC_VERSION_MIN_* command (\(cmd.cmd))") + } + return BuildVersion(platform: platform, minOSVersion: Version(machOVersion: cmd.version), sdkVersion: Version(machOVersion: cmd.sdk)) default: - throw BinaryReaderError.parseError("Unrecognized LC_VERSION_MIN_* command (\(cmd.cmd))") + return nil } - return BuildVersion(platform: platform, minOSVersion: Version(machOVersion: cmd.version), sdkVersion: Version(machOVersion: cmd.sdk)) - default: - return nil } - } #else - throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") + throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") #endif } public func containsAtomInfo() throws -> Bool { #if canImport(Darwin) - return try loadCommands().contains(where: { loadCommand in - switch loadCommand.value { - case .atom_info(_): - return true - default: - return false - } - }) + return try loadCommands().contains(where: { loadCommand in + switch loadCommand.value { + case .atom_info(_): + return true + default: + return false + } + }) #else - throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") + throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") #endif } #if canImport(Darwin) - private func sectionsMatching(_ segmentSectionNames: [(segmentName: String, sectionName: String)]) throws -> [(Bool, MachO.Slice.Segment.Section)] { - let isObject = self.headers.allSatisfy { (slice, header) in return header.filetype == MH_OBJECT } - return try self.segments().filter { (segment: Segment) throws -> Bool in - // If it's an object file, all the sections are part of a single segment with no name. - if isObject { - return try segment.value.segment.segname().isEmpty - } else { - return try segmentSectionNames.map{$0.segmentName}.contains(segment.value.segment.segname()) + private func sectionsMatching(_ segmentSectionNames: [(segmentName: String, sectionName: String)]) throws -> [(Bool, MachO.Slice.Segment.Section)] { + let isObject = self.headers.allSatisfy { (slice, header) in return header.filetype == MH_OBJECT } + return try self.segments().filter { (segment: Segment) throws -> Bool in + // If it's an object file, all the sections are part of a single segment with no name. + if isObject { + return try segment.value.segment.segname().isEmpty + } else { + return try segmentSectionNames.map { $0.segmentName }.contains(segment.value.segment.segname()) + } + } + .flatMap { (segment: Segment) throws -> [(Bool, Segment.Section)] in + return try segment.sections().filter { (section: Segment.Section) throws -> Bool in + let s = section.section + return try segmentSectionNames.contains { try $0.segmentName == s.segname() && $0.sectionName == s.sectname() } + }.map { (segment.loadCommand.swap, $0) } } } - .flatMap{ (segment: Segment) throws -> [(Bool, Segment.Section)] in - return try segment.sections().filter { (section: Segment.Section) throws -> Bool in - let s = section.section - return try segmentSectionNames.contains { try $0.segmentName == s.segname() && $0.sectionName == s.sectname() } - }.map { (segment.loadCommand.swap, $0) } - } - } #endif public func swiftABIVersion() throws -> SwiftABIVersion? { #if canImport(Darwin) - // The Swift ABI version is in the ObjC image info section, which might be in either the __DATA, __DATA_CONST, or the __OBJC segment. - let segmentSectionNames = [ - ("__DATA", "__objc_imageinfo"), - ("__DATA_CONST", "__objc_imageinfo"), - ("__OBJC", "__image_info"), + // The Swift ABI version is in the ObjC image info section, which might be in either the __DATA, __DATA_CONST, or the __OBJC segment. + let segmentSectionNames = [ + ("__DATA", "__objc_imageinfo"), + ("__DATA_CONST", "__objc_imageinfo"), + ("__OBJC", "__image_info"), ] - let sections = try sectionsMatching(segmentSectionNames) + let sections = try sectionsMatching(segmentSectionNames) - // We expect a single ObjC image info section, otherwise we bail - guard !sections.isEmpty else { return nil } - guard sections.count == 1 else { throw BinaryReaderError.parseError("Found multiple ObjC image info sections") } - let (swap, section) = sections[0] + // We expect a single ObjC image info section, otherwise we bail + guard !sections.isEmpty else { return nil } + guard sections.count == 1 else { throw BinaryReaderError.parseError("Found multiple ObjC image info sections") } + let (swap, section) = sections[0] - reader.push() - defer { reader.pop() } + reader.push() + defer { reader.pop() } - // Read objc_image_info struct - let objcInfo = try reader.seek(by: Int(section.section.offset.byteSwappedIfNeeded(swap))) - let flags = (try objcInfo.seek(by: 4).read() as UInt32).byteSwappedIfNeeded(swap) + // Read objc_image_info struct + let objcInfo = try reader.seek(by: Int(section.section.offset.byteSwappedIfNeeded(swap))) + let flags = (try objcInfo.seek(by: 4).read() as UInt32).byteSwappedIfNeeded(swap) - // FIXME: We should be using a shared definition for this. - let swiftVersion = (flags >> 8) & 0xff - guard swiftVersion != 0 else { return nil } + // FIXME: We should be using a shared definition for this. + let swiftVersion = (flags >> 8) & 0xff + guard swiftVersion != 0 else { return nil } - // NOTE: Swift 5.0 uses 7 as the ABI value. This is the first Swift with a stable ABI. + // NOTE: Swift 5.0 uses 7 as the ABI value. This is the first Swift with a stable ABI. - let vers = Int(swiftVersion) - return vers < 7 ? .unstable(vers) : .stable(vers) + let vers = Int(swiftVersion) + return vers < 7 ? .unstable(vers) : .stable(vers) #else - throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") + throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") #endif } public func simulatedEntitlements() throws -> PropertyListItem? { #if canImport(Darwin) - let sections = try sectionsMatching([("__TEXT", "__entitlements")]) + let sections = try sectionsMatching([("__TEXT", "__entitlements")]) - // We expect a single section, otherwise we bail - guard !sections.isEmpty else { return nil } - guard sections.count == 1 else { throw BinaryReaderError.parseError("Found multiple __entitlements sections") } - let (swap, section) = sections[0] + // We expect a single section, otherwise we bail + guard !sections.isEmpty else { return nil } + guard sections.count == 1 else { throw BinaryReaderError.parseError("Found multiple __entitlements sections") } + let (swap, section) = sections[0] - reader.push() - defer { reader.pop() } + reader.push() + defer { reader.pop() } - let sectionReader = try reader.seek(by: Int(section.section.offset.byteSwappedIfNeeded(swap))) - return try PropertyList.fromBytes(sectionReader.read(count: Int(section.section.size64))) + let sectionReader = try reader.seek(by: Int(section.section.offset.byteSwappedIfNeeded(swap))) + return try PropertyList.fromBytes(sectionReader.read(count: Int(section.section.size64))) #else - throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") + throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") #endif } public func simulatedDEREntitlements() throws -> [UInt8]? { #if canImport(Darwin) - let sections = try sectionsMatching([("__TEXT", "__ents_der")]) + let sections = try sectionsMatching([("__TEXT", "__ents_der")]) - // We expect a single section, otherwise we bail - guard !sections.isEmpty else { return nil } - guard sections.count == 1 else { throw BinaryReaderError.parseError("Found multiple __ents_der sections") } - let (swap, section) = sections[0] + // We expect a single section, otherwise we bail + guard !sections.isEmpty else { return nil } + guard sections.count == 1 else { throw BinaryReaderError.parseError("Found multiple __ents_der sections") } + let (swap, section) = sections[0] - reader.push() - defer { reader.pop() } + reader.push() + defer { reader.pop() } - let sectionReader = try reader.seek(by: Int(section.section.offset.byteSwappedIfNeeded(swap))) - return try sectionReader.read(count: Int(section.section.size64)) + let sectionReader = try reader.seek(by: Int(section.section.offset.byteSwappedIfNeeded(swap))) + return try sectionReader.read(count: Int(section.section.size64)) #else - throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") + throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") #endif } public func remarks() throws -> Data? { #if canImport(Darwin) - let sections = try sectionsMatching([("__LLVM", "__remarks")]) + let sections = try sectionsMatching([("__LLVM", "__remarks")]) - // We expect a single section, otherwise we bail - guard !sections.isEmpty else { return nil } - guard sections.count == 1 else { throw BinaryReaderError.parseError("Found multiple __remarks sections") } - let (swap, section) = sections[0] + // We expect a single section, otherwise we bail + guard !sections.isEmpty else { return nil } + guard sections.count == 1 else { throw BinaryReaderError.parseError("Found multiple __remarks sections") } + let (swap, section) = sections[0] - reader.push() - defer { reader.pop() } + reader.push() + defer { reader.pop() } - let sectionReader = try reader.seek(by: Int(section.section.offset.byteSwappedIfNeeded(swap))) - return Data(try sectionReader.read(count: Int(section.section.size64))) + let sectionReader = try reader.seek(by: Int(section.section.offset.byteSwappedIfNeeded(swap))) + return Data(try sectionReader.read(count: Int(section.section.size64))) #else - throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") + throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") #endif } } } @available(*, unavailable) -extension MachO: Sendable { } +extension MachO: Sendable {} @available(*, unavailable) -extension MachO.Slice: Sendable { } +extension MachO.Slice: Sendable {} #if canImport(Darwin) -@available(*, unavailable) -extension MachO.Slice.LoadCommand: Sendable { } + @available(*, unavailable) + extension MachO.Slice.LoadCommand: Sendable {} -@available(*, unavailable) -extension MachO.Slice.Segment: Sendable { } + @available(*, unavailable) + extension MachO.Slice.Segment: Sendable {} #endif fileprivate extension Version { @@ -1497,8 +1496,7 @@ fileprivate extension Version { let magic2: UInt32 = try reader.peek(offset: MemoryLayout.size) if magic2 == UInt32(bigEndian: ARMAG2) { self = .thin - } - else { + } else { throw BinaryReaderError.parseError("Unknown header: 0x\(String(magic, radix: 16)) 0x\(String(magic2, radix: 16))") } @@ -1527,49 +1525,50 @@ fileprivate extension Version { switch archiveType { case .thin: #if canImport(Darwin.ar) - // The magic bytes '!\n' has already been established, so skip past that. - try self.reader.seek(by: Int(SARMAG)) - - var machOs: [MachO] = [] - - // Attempt to read all of the archive headers out of the content. - while !reader.eof { - let archiveHeader: ar_hdr = try reader.read() + // The magic bytes '!\n' has already been established, so skip past that. + try self.reader.seek(by: Int(SARMAG)) + + var machOs: [MachO] = [] + + // Attempt to read all of the archive headers out of the content. + while !reader.eof { + let archiveHeader: ar_hdr = try reader.read() + + // Capture the current reader state so we can properly increment past each archive. + reader.push() + + let archiveName = + try { () throws -> String? in + if let nameSize = archiveHeader.extendedFormatArchiveNameSize { + return [UInt8](try reader.read(count: nameSize)).withUnsafeBytes({ rawPtr in + if let cString = rawPtr.baseAddress?.assumingMemoryBound(to: CChar.self) { + // The length of the filename from the header always seems to include 4 NULL padding bytes... + return FileManager.default.string(withFileSystemRepresentation: cString, length: rawPtr.count).trimmingCharacters(in: CharacterSet(["\0"])) + } + return nil + }) + } + return archiveHeader.rawArchiveName + }() ?? "" - // Capture the current reader state so we can properly increment past each archive. - reader.push() + guard let objectSize = archiveHeader.objectSize, objectSize > 0 else { + throw BinaryReaderError.parseError("Encountered zero-length static archive") + } - let archiveName = try { () throws -> String? in - if let nameSize = archiveHeader.extendedFormatArchiveNameSize { - return [UInt8](try reader.read(count: nameSize)).withUnsafeBytes({ rawPtr in - if let cString = rawPtr.baseAddress?.assumingMemoryBound(to: CChar.self) { - // The length of the filename from the header always seems to include 4 NULL padding bytes... - return FileManager.default.string(withFileSystemRepresentation: cString, length: rawPtr.count).trimmingCharacters(in: CharacterSet(["\0"])) - } - return nil - }) + // Make sure to skip the archive member which is the symbol table + if !StaticArchive.symbolTables.contains(archiveName) { + machOs.append(try MachO(reader: BinaryReader(data: reader.data, startingAt: reader.offset, size: objectSize))) } - return archiveHeader.rawArchiveName - }() ?? "" - guard let objectSize = archiveHeader.objectSize, objectSize > 0 else { - throw BinaryReaderError.parseError("Encountered zero-length static archive") - } + // Return back to the reader's state to when the header was read. + reader.pop() - // Make sure to skip the archive member which is the symbol table - if !StaticArchive.symbolTables.contains(archiveName) { - machOs.append(try MachO(reader: BinaryReader(data: reader.data, startingAt: reader.offset, size: objectSize))) + try reader.seek(by: objectSize.nextEvenNumber() + (archiveHeader.extendedFormatArchiveNameSize ?? 0)) } - // Return back to the reader's state to when the header was read. - reader.pop() - - try reader.seek(by: objectSize.nextEvenNumber() + (archiveHeader.extendedFormatArchiveNameSize ?? 0)) - } - - return machOs + return machOs #else - throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") + throw BinaryReaderError.parseError("Mach-O parsing not supported on this platform") #endif case let .fat(magic): let archs: [any MachOFatHeader] = try .init(reader: reader) @@ -1585,82 +1584,82 @@ fileprivate extension Version { } @available(*, unavailable) -extension StaticArchive: Sendable { } +extension StaticArchive: Sendable {} #if canImport(Darwin.ar) -extension ar_hdr { - public var rawArchiveName: String? { - var tmp = ar_name - return withUnsafeBytes(of: &tmp) { rawPtr in - if let cString = rawPtr.baseAddress?.assumingMemoryBound(to: CChar.self) { - return FileManager.default.string(withFileSystemRepresentation: cString, length: rawPtr.count) + extension ar_hdr { + public var rawArchiveName: String? { + var tmp = ar_name + return withUnsafeBytes(of: &tmp) { rawPtr in + if let cString = rawPtr.baseAddress?.assumingMemoryBound(to: CChar.self) { + return FileManager.default.string(withFileSystemRepresentation: cString, length: rawPtr.count) + } + return nil } - return nil } - } - public var extendedFormatArchiveNameSize: Int? { - if let rawArchiveName, rawArchiveName.hasPrefix(AR_EFMT1) { - return Int(rawArchiveName.withoutPrefix(AR_EFMT1).trimmingCharacters(in: .whitespaces)) + public var extendedFormatArchiveNameSize: Int? { + if let rawArchiveName, rawArchiveName.hasPrefix(AR_EFMT1) { + return Int(rawArchiveName.withoutPrefix(AR_EFMT1).trimmingCharacters(in: .whitespaces)) + } + return nil } - return nil - } - public var objectSize: Int? { - var tmp = ar_size - return withUnsafeBytes(of: &tmp) { rawPtr in - if let s = String(bytes: rawPtr, encoding: .utf8), let size = Int(s.trimmingCharacters(in: .whitespaces)) { - return size - (extendedFormatArchiveNameSize ?? 0) + public var objectSize: Int? { + var tmp = ar_size + return withUnsafeBytes(of: &tmp) { rawPtr in + if let s = String(bytes: rawPtr, encoding: .utf8), let size = Int(s.trimmingCharacters(in: .whitespaces)) { + return size - (extendedFormatArchiveNameSize ?? 0) + } + return nil } - return nil } - } - /// The number of bytes the `ar_hdr` occupies. - public var structSize: Int { - return MemoryLayout.size + /// The number of bytes the `ar_hdr` occupies. + public var structSize: Int { + return MemoryLayout.size + } } -} #endif extension Array where Element == any MachOFatHeader { init(reader: BinaryReader) throws { #if canImport(Darwin) - let fh: fat_header = try reader.read() - let swap = shouldSwap(magic: fh.magic) - let nfat_arch = fh.nfat_arch.byteSwappedIfNeeded(swap) + let fh: fat_header = try reader.read() + let swap = shouldSwap(magic: fh.magic) + let nfat_arch = fh.nfat_arch.byteSwappedIfNeeded(swap) - let is64bit = fh.magic == FAT_MAGIC_64 || fh.magic == FAT_CIGAM_64 + let is64bit = fh.magic == FAT_MAGIC_64 || fh.magic == FAT_CIGAM_64 - var archs: [any MachOFatHeader] = try (0 ..< nfat_arch).map { _ in - if is64bit { - return try reader.read() as fat_arch_64 - } else { - return try reader.read() as fat_arch - } - } - - // Special case hidden CPU_TYPE_ARM64. nfat_arch says N, but arm64 is at N+1, to hide it from OS versions which cannot process it correctly. Note that lipo itself is capable of _writing_ multiple hidden arm64 slices, but it will only _read_ a single hidden arm64 slice. We'll only read one, since that seems to have been the intent (and predates arm64e). - - if let firstSliceOffset = archs.map({ $0.offset(byteSwappedIfNeeded: swap) }).min(), reader.offset < firstSliceOffset - UInt64(is64bit ? MemoryLayout.size : MemoryLayout.size) { - let hiddenSliceReader = BinaryReader(data: reader.data, startingAt: reader.offset, size: Int(firstSliceOffset) - reader.offset) - if !hiddenSliceReader.eof { - let arch: any MachOFatHeader + var archs: [any MachOFatHeader] = try (0..