A high-performance, thread safe, feature-complete Least Recently Used cache implementation for Swift, inspired by the popular Node.js lru-cache package.
import SwiftLRUCache
/// Simple LRU cache with max 100 items
let cache = LRUCache<String, String>(
configuration: try! Configuration(max: 100)
)
Task {
/// Store value
await cache.set("key", value: "value")
/// Retrieve value
if let value = await cache.get("key") {
print(value)
}
}import SwiftLRUCache
/// Create a cache with maximum 100 items
let config = try Configuration<String, Data>(max: 100)
let cache = LRUCache<String, Data>(configuration: config)
/// Set values (async)
await cache.set("key1", value: data1)
await cache.set("key2", value: data2)
/// Get values (async)
if let data = await cache.get("key1") {
// Use the data
}
/// Check existence (async)
if await cache.has("key2") {
// Key exists
}
/// Delete items (async)
await cache.delete("key1")
/// Clear cache (async)
await cache.clear()/// Cache with default TTL of 5 minutes
let config = try Configuration<String, String>(max: 1000, ttl: 300)
let cache = LRUCache<String, String>(configuration: config)
/// Set item with custom TTL
await cache.set("session", value: "abc123", ttl: 3600) // 1 hour
/// Get remaining TTL
if let remaining = await cache.getRemainingTTL("session") {
print("Session expires in \(remaining) seconds")
}
/// Allow stale items
var config = try Configuration<String, String>(max: 100, ttl: 60)
config.allowStale = true
let cache = LRUCache<String, String>(configuration: config)
/// Returns stale value if expired
let value = await cache.get("key", options: GetOptions(allowStale: true))var config = try Configuration<String, Data>(maxSize: 1024 * 1024) // 1MB total
config.sizeCalculation = { data, _ in
return data.count
}
let cache = LRUCache<String, Data>(configuration: config)
/// Items will be evicted when total size exceeds 1MB
await cache.set("image1", value: imageData)var config = try Configuration<String, FileHandle>(max: 10)
config.dispose = { handle, key, reason in
/// Clean up when items are removed
handle.closeFile()
print("Disposed \(key) due to \(reason)")
}
let cache = LRUCache<String, FileHandle>(configuration: config)/// Pop least recently used item
if let (key, value) = await cache.pop() {
print("Removed LRU item: \(key) = \(value)")
}
/// Iterate over all items (MRU to LRU order)
await cache.forEach { key, value in
print("\(key): \(value)")
}
/// Get all entries, keys, or values
let entries = await cache.entries()
let keys = await cache.keys()
let values = await cache.values()
/// Peek at value without updating LRU order
let value = await cache.peek("key1")
/// Get cache statistics
let currentSize = await cache.size
let totalSize = await cache.calculatedSize
let maxItems = cache.max
/// Debug representation
let debugInfo = await cache.dump()
print(debugInfo)Add the following to your Package.swift file:
dependencies: [
.package(url: "https://github.com/tornikegomareli/swift-lru-cache.git", from: "0.4.0")
]- Swift 6.0+
- macOS 14.0+ / iOS 17.0+ / tvOS 17.0+ / watchOS 10.0+ / visionOS 1.0+
Please feel free to submit a Pull Request. For major changes, please open an issue first to discuss what you would like to change.
This project is licensed under the MIT License - see the LICENSE file for details.
- Inspired by isaacs/node-lru-cache
- Built with Swift 6.0 and Swift Testing framework