Caching

Go memiliki beberapa opsi untuk implementasi caching, baik in-memory maupun distributed. Tutorial ini akan menunjukkan implementasi berbagai jenis cache.

Contoh Masalah

Bagaimana cara:

  1. Implementasi in-memory cache
  2. Cache dengan expiry
  3. Thread-safe caching
  4. LRU cache

Penyelesaian

package main

import (
    "container/list"
    "fmt"
    "log"
    "sync"
    "time"
)

// 1. Basic cache item
type Item struct {
    Value      interface{}
    Expiration *time.Time
}

func (i *Item) Expired() bool {
    if i.Expiration == nil {
        return false
    }
    return time.Now().After(*i.Expiration)
}

// 2. Simple in-memory cache
type SimpleCache struct {
    sync.RWMutex
    items map[string]Item
}

func NewSimpleCache() *SimpleCache {
    return &SimpleCache{
        items: make(map[string]Item),
    }
}

func (c *SimpleCache) Set(key string, value interface{},
    expiration time.Duration) {
    c.Lock()
    defer c.Unlock()

    var exp *time.Time
    if expiration > 0 {
        t := time.Now().Add(expiration)
        exp = &t
    }

    c.items[key] = Item{
        Value:      value,
        Expiration: exp,
    }
}

func (c *SimpleCache) Get(key string) (interface{}, bool) {
    c.RLock()
    defer c.RUnlock()

    item, exists := c.items[key]
    if !exists {
        return nil, false
    }

    if item.Expired() {
        return nil, false
    }

    return item.Value, true
}

func (c *SimpleCache) Delete(key string) {
    c.Lock()
    defer c.Unlock()
    delete(c.items, key)
}

// 3. LRU Cache
type LRUCache struct {
    sync.RWMutex
    capacity int
    items    map[string]*list.Element
    queue    *list.List
}

type LRUItem struct {
    key   string
    value interface{}
}

func NewLRUCache(capacity int) *LRUCache {
    return &LRUCache{
        capacity: capacity,
        items:    make(map[string]*list.Element),
        queue:    list.New(),
    }
}

func (c *LRUCache) Set(key string, value interface{}) {
    c.Lock()
    defer c.Unlock()

    // Check if key exists
    if element, exists := c.items[key]; exists {
        c.queue.MoveToFront(element)
        element.Value.(*LRUItem).value = value
        return
    }

    // Add new item
    element := c.queue.PushFront(&LRUItem{
        key:   key,
        value: value,
    })
    c.items[key] = element

    // Remove oldest if capacity exceeded
    if c.queue.Len() > c.capacity {
        oldest := c.queue.Back()
        if oldest != nil {
            c.queue.Remove(oldest)
            delete(c.items, oldest.Value.(*LRUItem).key)
        }
    }
}

func (c *LRUCache) Get(key string) (interface{}, bool) {
    c.Lock()
    defer c.Unlock()

    element, exists := c.items[key]
    if !exists {
        return nil, false
    }

    c.queue.MoveToFront(element)
    return element.Value.(*LRUItem).value, true
}

// 4. Timed cache with cleanup
type TimedCache struct {
    sync.RWMutex
    items    map[string]Item
    interval time.Duration
    stop     chan struct{}
}

func NewTimedCache(cleanupInterval time.Duration) *TimedCache {
    cache := &TimedCache{
        items:    make(map[string]Item),
        interval: cleanupInterval,
        stop:     make(chan struct{}),
    }
    
    go cache.cleanup()
    return cache
}

func (c *TimedCache) cleanup() {
    ticker := time.NewTicker(c.interval)
    defer ticker.Stop()

    for {
        select {
        case <-ticker.C:
            c.Lock()
            for key, item := range c.items {
                if item.Expired() {
                    delete(c.items, key)
                }
            }
            c.Unlock()
        case <-c.stop:
            return
        }
    }
}

func (c *TimedCache) Stop() {
    close(c.stop)
}

func (c *TimedCache) Set(key string, value interface{},
    expiration time.Duration) {
    c.Lock()
    defer c.Unlock()

    var exp *time.Time
    if expiration > 0 {
        t := time.Now().Add(expiration)
        exp = &t
    }

    c.items[key] = Item{
        Value:      value,
        Expiration: exp,
    }
}

func (c *TimedCache) Get(key string) (interface{}, bool) {
    c.RLock()
    defer c.RUnlock()

    item, exists := c.items[key]
    if !exists {
        return nil, false
    }

    if item.Expired() {
        return nil, false
    }

    return item.Value, true
}

// 5. Cache statistics
type CacheStats struct {
    Hits      int64
    Misses    int64
    ItemCount int64
}

type StatsCache struct {
    sync.RWMutex
    items map[string]Item
    stats CacheStats
}

func NewStatsCache() *StatsCache {
    return &StatsCache{
        items: make(map[string]Item),
    }
}

func (c *StatsCache) Set(key string, value interface{},
    expiration time.Duration) {
    c.Lock()
    defer c.Unlock()

    var exp *time.Time
    if expiration > 0 {
        t := time.Now().Add(expiration)
        exp = &t
    }

    c.items[key] = Item{
        Value:      value,
        Expiration: exp,
    }
    c.stats.ItemCount++
}

func (c *StatsCache) Get(key string) (interface{}, bool) {
    c.Lock()
    defer c.Unlock()

    item, exists := c.items[key]
    if !exists {
        c.stats.Misses++
        return nil, false
    }

    if item.Expired() {
        c.stats.Misses++
        return nil, false
    }

    c.stats.Hits++
    return item.Value, true
}

func (c *StatsCache) Stats() CacheStats {
    c.RLock()
    defer c.RUnlock()
    return c.stats
}

func main() {
    // Example 1: Simple Cache
    fmt.Println("Simple Cache Example:")
    cache := NewSimpleCache()
    
    // Set values
    cache.Set("key1", "value1", 2*time.Second)
    cache.Set("key2", "value2", 0) // No expiration
    
    // Get values
    if val, ok := cache.Get("key1"); ok {
        fmt.Printf("key1: %v\n", val)
    }
    
    time.Sleep(3 * time.Second)
    if _, ok := cache.Get("key1"); !ok {
        fmt.Println("key1 expired")
    }

    // Example 2: LRU Cache
    fmt.Println("\nLRU Cache Example:")
    lru := NewLRUCache(2)
    
    lru.Set("A", 1)
    lru.Set("B", 2)
    lru.Set("C", 3) // This will evict A
    
    if _, ok := lru.Get("A"); !ok {
        fmt.Println("A was evicted")
    }
    if val, ok := lru.Get("B"); ok {
        fmt.Printf("B is still present: %v\n", val)
    }

    // Example 3: Timed Cache
    fmt.Println("\nTimed Cache Example:")
    timedCache := NewTimedCache(time.Second)
    defer timedCache.Stop()
    
    timedCache.Set("temp", "temporary", 2*time.Second)
    timedCache.Set("perm", "permanent", 0)
    
    time.Sleep(3 * time.Second)
    if _, ok := timedCache.Get("temp"); !ok {
        fmt.Println("temp was automatically cleaned up")
    }
    if val, ok := timedCache.Get("perm"); ok {
        fmt.Printf("perm is still present: %v\n", val)
    }

    // Example 4: Stats Cache
    fmt.Println("\nStats Cache Example:")
    statsCache := NewStatsCache()
    
    // Some operations
    statsCache.Set("key1", "value1", time.Minute)
    statsCache.Get("key1")
    statsCache.Get("nonexistent")
    
    stats := statsCache.Stats()
    fmt.Printf("Cache stats - Hits: %d, Misses: %d, Items: %d\n",
        stats.Hits, stats.Misses, stats.ItemCount)
}

Penjelasan Kode

  1. Cache Types

    • Simple cache
    • LRU cache
    • Timed cache
    • Stats cache
  2. Features

    • Expiration
    • Thread safety
    • Auto cleanup
  3. Best Practices

    • Memory management
    • Concurrency
    • Statistics

Output

Simple Cache Example:
key1: value1
key1 expired

LRU Cache Example:
A was evicted
B is still present: 2

Timed Cache Example:
temp was automatically cleaned up
perm is still present: permanent

Stats Cache Example:
Cache stats - Hits: 1, Misses: 1, Items: 1

Tips

  • Pilih tipe cache sesuai kebutuhan
  • Handle concurrency
  • Monitor memory usage
  • Set expiration yang sesuai
  • Implement cleanup routine