super_cache_disk 1.0.1 copy "super_cache_disk: ^1.0.1" to clipboard
super_cache_disk: ^1.0.1 copied to clipboard

File-per-entry persistent cache for Flutter and Dart. SHA-256 integrity, optional gzip compression, and async write queue. Part of the super_cache family.

example/main.dart

// ignore_for_file: avoid_print
import 'dart:io';

import 'package:super_cache_disk/super_cache_disk.dart';

/// super_cache_disk walkthrough
///
/// Simulates the lifecycle of a "product catalog" app that needs data to
/// survive app restarts. Covers each DiskCache feature step-by-step:
///
///   1. Persist data across an app restart
///   2. TTL expiry survives restarts (wall-clock based, not session-based)
///   3. Batch warm-up with putAll
///   4. Custom JSON codec for structured types
///   5. SHA-256 integrity check — corrupted file returns null, not bad data
///
/// Run with:
///   dart run example/main.dart
void main() async {
  // Create a temporary directory to act as the app's cache directory.
  final appCacheDir =
      await Directory.systemTemp.createTemp('super_cache_disk_demo_');

  try {
    await _section('1. Persist across an app restart',
        () => _demo1Persistence(appCacheDir));
    await _section(
        '2. TTL survives restart', () => _demo2TtlPersistence(appCacheDir));
    await _section(
        '3. Batch warm-up with putAll', () => _demo3PutAll(appCacheDir));
    await _section('4. Custom JSON codec', () => _demo4JsonCodec(appCacheDir));
    await _section('5. Integrity check — corrupted file',
        () => _demo5Integrity(appCacheDir));
  } finally {
    // Clean up the temp directory when the example finishes.
    await appCacheDir.delete(recursive: true);
  }

  print('\nDone!');
}

// ---------------------------------------------------------------------------
// 1. Persist across an app restart
//
// DiskCache writes one file per entry using a SHA-256 hash of the key as the
// filename, so plaintext keys are never stored on disk. A new DiskCache
// instance pointed at the same directory picks up all existing entries after
// calling initialize().
// ---------------------------------------------------------------------------

Future<void> _demo1Persistence(Directory root) async {
  final dir = await root.createTemp('restart_');

  // ── Session 1: app first launch ──────────────────────────────────────────
  print('[Session 1] writing catalog data...');
  final session1 = DiskCache<String, String>(
    directory: dir,
    codec: const StringCacheCodec(),
    defaultTTL: const Duration(hours: 24),
  );
  await session1.initialize();

  await session1.put('product:1', 'Mechanical Keyboard');
  await session1.put('product:2', 'USB-C Hub');
  await session1.put('config:version', '2.4.1');

  print('  product:1   → ${await session1.get("product:1")}');
  print('  product:2   → ${await session1.get("product:2")}');

  // Always dispose before "restarting" — it flushes the async write queue.
  await session1.dispose();
  print('[Session 1] disposed (write queue flushed)\n');

  // ── Session 2: app restarted ──────────────────────────────────────────────
  print('[Session 2] reading from disk after restart...');
  final session2 = DiskCache<String, String>(
    directory: dir,
    codec: const StringCacheCodec(),
  );
  await session2.initialize(); // scans the directory for existing .dat files

  print(
      '  product:1   → ${await session2.get("product:1")}'); // Mechanical Keyboard
  print('  product:2   → ${await session2.get("product:2")}'); // USB-C Hub
  print('  config:version → ${await session2.get("config:version")}'); // 2.4.1

  await session2.dispose();
}

// ---------------------------------------------------------------------------
// 2. TTL survives restart
//
// TTL is wall-clock based. If you write an entry with a 1-hour TTL and the
// app restarts 30 minutes later, the entry has only 30 minutes left — it
// does not get a fresh 1-hour TTL just because the cache was re-opened.
// ---------------------------------------------------------------------------

Future<void> _demo2TtlPersistence(Directory root) async {
  final dir = await root.createTemp('ttl_restart_');

  // Write with a very short TTL so we can see it expire.
  final cache1 = DiskCache<String, String>(
    directory: dir,
    codec: const StringCacheCodec(),
    defaultTTL: const Duration(milliseconds: 300),
  );
  await cache1.initialize();
  await cache1.put('flash_deal', 'SAVE50');
  print('  written: ${await cache1.get("flash_deal")}'); // SAVE50
  await cache1.dispose();

  // Simulate a restart after the TTL has elapsed.
  await Future<void>.delayed(const Duration(milliseconds: 350));

  final cache2 = DiskCache<String, String>(
    directory: dir,
    codec: const StringCacheCodec(),
  );
  await cache2.initialize();
  print(
      '  after TTL + restart: ${await cache2.get("flash_deal")}'); // null ← expired
  await cache2.dispose();
}

// ---------------------------------------------------------------------------
// 3. Batch warm-up with putAll
//
// On app start you can pre-populate the cache with remote config, feature
// flags, or pre-fetched data in a single call instead of many sequential puts.
// ---------------------------------------------------------------------------

Future<void> _demo3PutAll(Directory root) async {
  final dir = await root.createTemp('batch_');

  final cache = DiskCache<String, String>(
    directory: dir,
    codec: const StringCacheCodec(),
    defaultTTL: const Duration(hours: 1),
  );
  await cache.initialize();

  // Pre-populate feature flags from a remote config response.
  final remoteConfig = {
    'flag:dark_mode': 'true',
    'flag:new_checkout': 'false',
    'flag:ai_search': 'true',
    'flag:loyalty_points': 'true',
  };
  await cache.putAll(remoteConfig);

  print('  Feature flags:');
  for (final key in remoteConfig.keys) {
    print('    $key → ${await cache.get(key)}');
  }

  await cache.dispose();
}

// ---------------------------------------------------------------------------
// 4. Custom JSON codec
//
// DiskCache<K, V> is fully generic. Provide a CacheCodec<V> to encode any
// type to bytes and decode it back. The example uses a simple UserProfile model.
// ---------------------------------------------------------------------------

Future<void> _demo4JsonCodec(Directory root) async {
  final dir = await root.createTemp('json_');

  // JsonCacheCodec is built into super_cache_disk — no custom codec needed
  // for JSON-serialisable types. Just provide fromJson / toJson.
  final codec = JsonCacheCodec<UserProfile>(
    fromJson: UserProfile.fromJson,
    toJson: (p) => p.toJson(),
  );

  final cache = DiskCache<String, UserProfile>(
    directory: dir,
    codec: codec,
    defaultTTL: const Duration(days: 7),
  );
  await cache.initialize();

  // Store a rich object.
  await cache.put('user:alice',
      const UserProfile(id: 'alice', name: 'Alice Smith', score: 9800));
  await cache.put(
      'user:bob', const UserProfile(id: 'bob', name: 'Bob Jones', score: 4200));

  // Restart simulation.
  await cache.dispose();

  final cache2 = DiskCache<String, UserProfile>(
    directory: dir,
    codec: codec,
  );
  await cache2.initialize();

  final alice = await cache2.get('user:alice');
  final bob = await cache2.get('user:bob');

  print(
      '  alice: ${alice?.name} (score: ${alice?.score})'); // Alice Smith (score: 9800)
  print(
      '  bob  : ${bob?.name} (score: ${bob?.score})'); // Bob Jones   (score: 4200)

  await cache2.dispose();
}

// A simple user profile model — only needs toJson / fromJson for JsonCacheCodec.
final class UserProfile {
  const UserProfile({
    required this.id,
    required this.name,
    required this.score,
  });

  final String id;
  final String name;
  final int score;

  Map<String, dynamic> toJson() => {'id': id, 'name': name, 'score': score};

  factory UserProfile.fromJson(Map<String, dynamic> json) => UserProfile(
        id: json['id'] as String,
        name: json['name'] as String,
        score: json['score'] as int,
      );
}

// ---------------------------------------------------------------------------
// 5. Integrity check
//
// Each .dat file contains a SHA-256 checksum of the payload. If the file is
// corrupted (disk error, incomplete write, tampered) the checksum fails and
// DiskCache returns null instead of serving bad data.
// ---------------------------------------------------------------------------

Future<void> _demo5Integrity(Directory root) async {
  final dir = await root.createTemp('integrity_');

  final cache = DiskCache<String, String>(
    directory: dir,
    codec: const StringCacheCodec(),
  );
  await cache.initialize();
  await cache.put('critical', 'important-data');

  // Let the async write queue flush to disk.
  await Future<void>.delayed(const Duration(milliseconds: 100));
  await cache.dispose();

  // Corrupt the .dat file to simulate a disk error.
  final datFiles = dir
      .listSync()
      .whereType<File>()
      .where((f) => f.path.endsWith('.dat'))
      .toList();
  print('  .dat files on disk: ${datFiles.length}');
  if (datFiles.isNotEmpty) {
    await datFiles.first.writeAsBytes([0xDE, 0xAD, 0xBE, 0xEF]);
    print('  file corrupted intentionally');
  }

  // Re-open. The integrity check catches the corruption.
  final cache2 = DiskCache<String, String>(
    directory: dir,
    codec: const StringCacheCodec(),
  );
  await cache2.initialize();

  final result = await cache2.get('critical');
  print(
      '  read after corruption: $result'); // null — safe miss, not corrupt data
  await cache2.dispose();
}

// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------

Future<void> _section(String title, Future<void> Function() demo) async {
  print('\n${'─' * 55}');
  print('  $title');
  print('─' * 55);
  await demo();
}
1
likes
150
points
100
downloads

Publisher

verified publisherjihedmrouki.com

Weekly Downloads

File-per-entry persistent cache for Flutter and Dart. SHA-256 integrity, optional gzip compression, and async write queue. Part of the super_cache family.

Repository (GitHub)
View/report issues

Topics

#cache #caching #storage #disk #persistence

Documentation

API reference

License

MIT (license)

Dependencies

hashlib, super_cache

More

Packages that depend on super_cache_disk