Mahakam comes with a built-in cache support. Here is the list of supported cache providers.
- Map (Simple in-memory cache)
- Redis
- Memcached
Example
Here is an example of a cache middleware using Map
cache provider.
// main.go
package main
import (
"encoding/json"
"log"
"net/http"
"time"
"github.com/seiortech/mahakam"
"github.com/seiortech/mahakam/extensions"
)
type response struct {
Id int `json:"id"`
Name string `json:"name"`
}
func main() {
cache := extensions.NewMapCache()
mux := http.NewServeMux()
mux.HandleFunc("/cache/{name}", func(w http.ResponseWriter, r *http.Request) {
name := r.PathValue("name")
if name == "" {
http.Error(w, "Name is required", http.StatusBadRequest)
return
}
if c, ok := cache.Get(name); ok {
log.Println("Cache hit for ", name)
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
if err := json.NewEncoder(w).Encode(c); err != nil {
http.Error(w, "Failed to encode response", http.StatusInternalServerError)
return
}
return
} else {
log.Println("Cache miss for ", name)
data := response{
Id: int(time.Now().UnixMilli()),
Name: name,
}
if err := cache.Set(name, data); err != nil {
http.Error(w, "Failed to set cache", http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
if err := json.NewEncoder(w).Encode(data); err != nil {
http.Error(w, "Failed to encode response", http.StatusInternalServerError)
return
}
}
})
// cache using middleware
mux.HandleFunc("/cache-middleware/{name}", extensions.CacheMiddleware(cache, func(w http.ResponseWriter, r *http.Request) {
name := r.PathValue("name")
if name == "" {
http.Error(w, "Name is required", http.StatusBadRequest)
return
}
data := response{
Id: int(time.Now().UnixMilli()),
Name: name,
}
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
if err := json.NewEncoder(w).Encode(data); err != nil {
http.Error(w, "Failed to encode response", http.StatusInternalServerError)
return
}
}))
if err := mahakam.NewServer(":8080", mux).ListenAndServe(); err != nil {
log.Fatalf("Failed to start server: %v", err)
}
}
Cache Middleware
You can use the CacheMiddleware
to cache the response of the handler. It’s used to caching response data for HTTP requests by matching the request path and method. Here is the example of using the CacheMiddleware
.
cacheProvider := extensions.NewMapCache()
mux.HandleFunc("/cache-middleware/{name}", extensions.CacheMiddleware(cacheProvider, func(w http.ResponseWriter, r *http.Request) {
name := r.PathValue("name")
if name == "" {
http.Error(w, "Name is required", http.StatusBadRequest)
return
}
data := response{
Id: int(time.Now().UnixMilli()),
Name: name,
}
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
if err := json.NewEncoder(w).Encode(data); err != nil {
http.Error(w, "Failed to encode response", http.StatusInternalServerError)
return
}
}))
Cache Providers
Currently, we support the following cache providers.
- Map (Simple in-memory cache)
- Redis
- Memcached
Map
Map
is a simple in-memory cache that stores data in a map. It’s used for storing data in memory and is suitable for small-scale applications.
Map
cache provider in production. You can’t scale it.
Redis
Redis
is a popular in-memory data store that is used for caching data. It’s used for storing data in memory and is suitable for large-scale applications.
Example
Here is the example of using Redis
cache provider.
package main
import (
"encoding/json"
"log"
"net/http"
"time"
"github.com/redis/go-redis/v9"
"github.com/seiortech/mahakam"
"github.com/seiortech/mahakam/extensions"
)
type response struct {
Id int `json:"id"`
Name string `json:"name"`
}
func main() {
redisClient := redis.NewClient(&redis.Options{
Addr: "localhost:6379",
Password: "",
DB: 0,
})
cache, err := extensions.NewRedisCache(redisClient)
if err != nil {
log.Fatalf("Failed to create Redis cache: %v", err)
}
mux := http.NewServeMux()
mux.HandleFunc("/cache/{name}", func(w http.ResponseWriter, r *http.Request) {
name := r.PathValue("name")
if name == "" {
http.Error(w, "Name is required", http.StatusBadRequest)
return
}
if c, ok := cache.Get(name); ok {
log.Println("Cache hit for ", name)
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
if err := json.NewEncoder(w).Encode(c); err != nil {
http.Error(w, "Failed to encode response", http.StatusInternalServerError)
return
}
return
}
log.Println("Cache miss for", name)
data := response{
Id: int(time.Now().UnixMilli()),
Name: name,
}
var cacheData []byte
if cacheData, err = json.Marshal(data); err != nil {
log.Println("Failed to marshal data:", err)
http.Error(w, "Failed to marshal data", http.StatusInternalServerError)
return
}
if err := cache.Set(name, cacheData); err != nil {
log.Println("Failed to set cache:", err)
http.Error(w, "Failed to set cache", http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
if err := json.NewEncoder(w).Encode(data); err != nil {
http.Error(w, "Failed to encode response", http.StatusInternalServerError)
return
}
})
// cache using middleware
mux.HandleFunc("/cache-middleware/{name}", extensions.CacheMiddleware(cache, func(w http.ResponseWriter, r *http.Request) {
name := r.PathValue("name")
if name == "" {
http.Error(w, "Name is required", http.StatusBadRequest)
return
}
data := response{
Id: int(time.Now().UnixMilli()),
Name: name,
}
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
if err := json.NewEncoder(w).Encode(data); err != nil {
http.Error(w, "Failed to encode response", http.StatusInternalServerError)
return
}
}))
if err := mahakam.NewServer(":8080", mux).ListenAndServe(); err != nil {
log.Fatalf("Failed to start server: %v", err)
}
}
now, we need to setup our redis. I’m gonna use docker-compose for this.
services:
redis:
image: redis:8.0.2-alpine
container_name: redis
ports:
- "6379:6379"
volumes:
- redis_data:/data
command: redis-server --appendonly yes
volumes:
redis_data:
Memcached
SOON
Custom Cache Provider
You can implement your own cache provider by implementing the CacheProvider
interface.
// Cache is an interface that defines methods for caching data.
type Cache interface {
// Get retrieves a value from the cache by key.
Get(key string) (interface{}, bool)
// Set stores a value in the cache storage with the specified key.
Set(key string, value interface{}) error
// SetWithExpiration adds a value to the cache with a specific expiration time.
SetWithExpiration(key string, value interface{}, expiration int64) error
// Delete removes a value from the cache by key.
Delete(key string) error
// Exists checks if a key exists in the cache.
Exists(key string) bool
// Close closes the cache storage, releasing any resources it holds.
Close() error
}