|
| 1 | +import Algorithms |
| 2 | +import AsyncCollections |
| 3 | + |
| 4 | +public enum DataLoaderValue<T: Sendable>: Sendable { |
| 5 | + case success(T) |
| 6 | + case failure(Error) |
| 7 | +} |
| 8 | + |
| 9 | +public typealias BatchLoadFunction<Key: Hashable & Sendable, Value: Sendable> = @Sendable (_ keys: [Key]) async throws -> [DataLoaderValue<Value>] |
| 10 | +private typealias LoaderQueue<Key: Hashable & Sendable, Value: Sendable> = [(key: Key, channel: Channel<Value, Error>)] |
| 11 | + |
| 12 | +/// DataLoader creates a public API for loading data from a particular |
| 13 | +/// data back-end with unique keys such as the id column of a SQL table |
| 14 | +/// or document name in a MongoDB database, given a batch loading function. |
| 15 | +/// |
| 16 | +/// Each DataLoader instance contains a unique memoized cache. Use caution |
| 17 | +/// when used in long-lived applications or those which serve many users |
| 18 | +/// with different access permissions and consider creating a new instance |
| 19 | +/// per data request. |
| 20 | +public actor DataLoader<Key: Hashable & Sendable, Value: Sendable> { |
| 21 | + private let batchLoadFunction: BatchLoadFunction<Key, Value> |
| 22 | + private let options: DataLoaderOptions<Key, Value> |
| 23 | + |
| 24 | + private var cache = [Key: Channel<Value, Error>]() |
| 25 | + private var queue = LoaderQueue<Key, Value>() |
| 26 | + |
| 27 | + private var dispatchScheduled = false |
| 28 | + |
| 29 | + public init( |
| 30 | + options: DataLoaderOptions<Key, Value> = DataLoaderOptions(), |
| 31 | + batchLoadFunction: @escaping BatchLoadFunction<Key, Value> |
| 32 | + ) { |
| 33 | + self.options = options |
| 34 | + self.batchLoadFunction = batchLoadFunction |
| 35 | + } |
| 36 | + |
| 37 | + /// Loads a key, returning the value represented by that key. |
| 38 | + public func load(key: Key) async throws -> Value { |
| 39 | + let cacheKey = options.cacheKeyFunction?(key) ?? key |
| 40 | + |
| 41 | + if options.cachingEnabled, let cached = cache[cacheKey] { |
| 42 | + return try await cached.value |
| 43 | + } |
| 44 | + |
| 45 | + let channel = Channel<Value, Error>() |
| 46 | + |
| 47 | + if options.batchingEnabled { |
| 48 | + queue.append((key: key, channel: channel)) |
| 49 | + |
| 50 | + if let executionPeriod = options.executionPeriod, !dispatchScheduled { |
| 51 | + Task.detached { |
| 52 | + try await Task.sleep(nanoseconds: executionPeriod) |
| 53 | + try await self.execute() |
| 54 | + } |
| 55 | + |
| 56 | + dispatchScheduled = true |
| 57 | + } |
| 58 | + } else { |
| 59 | + Task.detached { |
| 60 | + do { |
| 61 | + let results = try await self.batchLoadFunction([key]) |
| 62 | + |
| 63 | + if results.isEmpty { |
| 64 | + await channel.fail(DataLoaderError.noValueForKey("Did not return value for key: \(key)")) |
| 65 | + } else { |
| 66 | + let result = results[0] |
| 67 | + |
| 68 | + switch result { |
| 69 | + case let .success(value): |
| 70 | + await channel.fulfill(value) |
| 71 | + case let .failure(error): |
| 72 | + await channel.fail(error) |
| 73 | + } |
| 74 | + } |
| 75 | + } catch { |
| 76 | + await channel.fail(error) |
| 77 | + } |
| 78 | + } |
| 79 | + } |
| 80 | + |
| 81 | + if options.cachingEnabled { |
| 82 | + cache[cacheKey] = channel |
| 83 | + } |
| 84 | + |
| 85 | + return try await channel.value |
| 86 | + } |
| 87 | + |
| 88 | + /// Loads multiple keys, promising an array of values: |
| 89 | + /// |
| 90 | + /// ```swift |
| 91 | + /// async let aAndB = try myLoader.loadMany(keys: [ "a", "b" ]) |
| 92 | + /// ``` |
| 93 | + /// |
| 94 | + /// This is equivalent to the more verbose: |
| 95 | + /// |
| 96 | + /// ```swift |
| 97 | + /// async let aAndB = [ |
| 98 | + /// myLoader.load(key: "a"), |
| 99 | + /// myLoader.load(key: "b") |
| 100 | + /// ] |
| 101 | + /// ``` |
| 102 | + /// or |
| 103 | + /// ```swift |
| 104 | + /// async let a = myLoader.load(key: "a") |
| 105 | + /// async let b = myLoader.load(key: "b") |
| 106 | + /// ``` |
| 107 | + public func loadMany(keys: [Key]) async throws -> [Value] { |
| 108 | + guard !keys.isEmpty else { |
| 109 | + return [] |
| 110 | + } |
| 111 | + |
| 112 | + return try await keys.concurrentMap { try await self.load(key: $0) } |
| 113 | + } |
| 114 | + |
| 115 | + /// Clears the value at `key` from the cache, if it exists. Returns itself for |
| 116 | + /// method chaining. |
| 117 | + @discardableResult |
| 118 | + public func clear(key: Key) -> DataLoader<Key, Value> { |
| 119 | + let cacheKey = options.cacheKeyFunction?(key) ?? key |
| 120 | + |
| 121 | + cache.removeValue(forKey: cacheKey) |
| 122 | + |
| 123 | + return self |
| 124 | + } |
| 125 | + |
| 126 | + /// Clears the entire cache. To be used when some event results in unknown |
| 127 | + /// invalidations across this particular `DataLoader`. Returns itself for |
| 128 | + /// method chaining. |
| 129 | + @discardableResult |
| 130 | + public func clearAll() -> DataLoader<Key, Value> { |
| 131 | + cache.removeAll() |
| 132 | + |
| 133 | + return self |
| 134 | + } |
| 135 | + |
| 136 | + /// Adds the provied key and value to the cache. If the key already exists, no |
| 137 | + /// change is made. Returns itself for method chaining. |
| 138 | + @discardableResult |
| 139 | + public func prime(key: Key, value: Value) async throws -> DataLoader<Key, Value> { |
| 140 | + let cacheKey = options.cacheKeyFunction?(key) ?? key |
| 141 | + |
| 142 | + if cache[cacheKey] == nil { |
| 143 | + let channel = Channel<Value, Error>() |
| 144 | + |
| 145 | + Task.detached { |
| 146 | + await channel.fulfill(value) |
| 147 | + } |
| 148 | + |
| 149 | + cache[cacheKey] = channel |
| 150 | + } |
| 151 | + |
| 152 | + return self |
| 153 | + } |
| 154 | + |
| 155 | + public func execute() async throws { |
| 156 | + // Take the current loader queue, replacing it with an empty queue. |
| 157 | + let batch = queue |
| 158 | + |
| 159 | + queue = [] |
| 160 | + |
| 161 | + if dispatchScheduled { |
| 162 | + dispatchScheduled = false |
| 163 | + } |
| 164 | + |
| 165 | + guard !batch.isEmpty else { |
| 166 | + return () |
| 167 | + } |
| 168 | + |
| 169 | + // If a maxBatchSize was provided and the queue is longer, then segment the |
| 170 | + // queue into multiple batches, otherwise treat the queue as a single batch. |
| 171 | + if let maxBatchSize = options.maxBatchSize, maxBatchSize > 0, maxBatchSize < batch.count { |
| 172 | + try await batch.chunks(ofCount: maxBatchSize).asyncForEach { slicedBatch in |
| 173 | + try await self.executeBatch(batch: Array(slicedBatch)) |
| 174 | + } |
| 175 | + } else { |
| 176 | + try await executeBatch(batch: batch) |
| 177 | + } |
| 178 | + } |
| 179 | + |
| 180 | + private func executeBatch(batch: LoaderQueue<Key, Value>) async throws { |
| 181 | + let keys = batch.map { $0.key } |
| 182 | + |
| 183 | + if keys.isEmpty { |
| 184 | + return |
| 185 | + } |
| 186 | + |
| 187 | + // Step through the values, resolving or rejecting each Promise in the |
| 188 | + // loaded queue. |
| 189 | + do { |
| 190 | + let values = try await batchLoadFunction(keys) |
| 191 | + |
| 192 | + if values.count != keys.count { |
| 193 | + throw DataLoaderError.typeError("The function did not return an array of the same length as the array of keys. \nKeys count: \(keys.count)\nValues count: \(values.count)") |
| 194 | + } |
| 195 | + |
| 196 | + for entry in batch.enumerated() { |
| 197 | + let result = values[entry.offset] |
| 198 | + |
| 199 | + switch result { |
| 200 | + case let .failure(error): |
| 201 | + await entry.element.channel.fail(error) |
| 202 | + case let .success(value): |
| 203 | + await entry.element.channel.fulfill(value) |
| 204 | + } |
| 205 | + } |
| 206 | + } catch { |
| 207 | + await failedExecution(batch: batch, error: error) |
| 208 | + } |
| 209 | + } |
| 210 | + |
| 211 | + private func failedExecution(batch: LoaderQueue<Key, Value>, error: Error) async { |
| 212 | + for (key, channel) in batch { |
| 213 | + _ = clear(key: key) |
| 214 | + |
| 215 | + await channel.fail(error) |
| 216 | + } |
| 217 | + } |
| 218 | +} |
0 commit comments