Nyx by Example

Microservice

A microservice that consumes from nyx-queue and uses nyx-kv as a cache. Cache-aside pattern: check cache first, fall through to DB on miss, populate cache for next request. EXPIRE prevents stale data.

Code

// Microservice pattern — nyx-queue consumer with nyx-kv cache

fn resp_cmd(parts: Array) -> String {
    var sb: StringBuilder = StringBuilder.new()
    sb.append("*" + int_to_string(parts.length()) + "\r\n")
    var i: int = 0
    while i < parts.length() {
        let p: String = parts[i]
        sb.append("$" + int_to_string(p.length()) + "\r\n" + p + "\r\n")
        i = i + 1
    }
    return sb.to_string()
}

// Send a RESP command and read a simple response line
fn send_cmd(host: String, port: int, cmd: String) -> String {
    let fd: int = tcp_connect(host, port)
    if fd < 0 { return "" }
    tcp_write(fd, cmd)
    let r: String = tcp_read_line(fd)
    tcp_close(fd)
    return r.trim()
}

// Enrich a raw order event with cached user data
fn process_order(order_id: String, user_id: String) -> bool {
    // Check nyx-kv cache for user profile
    let cache_key: String = "user:" + user_id
    let cached: String = send_cmd("127.0.0.1", 6380, resp_cmd(["GET", cache_key]))

    if cached.length() > 0 {
        print("  [cache hit] " + cache_key)
    } else {
        // Would fetch from DB and cache here
        print("  [cache miss] fetching user " + user_id)
        send_cmd("127.0.0.1", 6380, resp_cmd(["SET", cache_key, "{\"name\":\"Alice\"}"]))
        send_cmd("127.0.0.1", 6380, resp_cmd(["EXPIRE", cache_key, "3600"]))
    }

    // Process and store result
    let result_key: String = "order:" + order_id
    send_cmd("127.0.0.1", 6380, resp_cmd(["HSET", result_key, "status", "processed", "user", user_id]))
    print("  [processed] " + order_id)
    return true
}

fn main() -> int {
    // Simulate consuming 3 orders from nyx-queue
    print("microservice: order processor")
    process_order("ord_001", "u42")
    process_order("ord_002", "u42")  // cache hit
    process_order("ord_003", "u7")

    print("")
    print("architecture:")
    print("  API -> ENQUEUE orders -> nyx-queue -> DEQUEUE -> this service")
    print("  this service <-> nyx-kv (cache) <-> nyx-db (persistent)")
    return 0
}

Output

microservice: order processor
  [cache miss] fetching user u42
  [processed] ord_001
  [cache hit] user:u42
  [processed] ord_002
  [cache miss] fetching user u7
  [processed] ord_003

architecture:
  API -> ENQUEUE orders -> nyx-queue -> DEQUEUE -> this service
  this service <-> nyx-kv (cache) <-> nyx-db (persistent)

Explanation

Cache-aside is the most common caching strategy because it fails gracefully: if the cache is down, requests still work — just slower. The first request for user u42 was a miss, so we fetched from the DB and wrote to nyx-kv with a 1-hour TTL. The second request for the same user was a hit, served in microseconds. EXPIRE guarantees staleness can't grow unbounded — profile changes propagate within an hour even without explicit invalidation. Scale horizontally by running more instances of this same binary against the same queue.

← Previous Next →

Source: examples/by-example/98-microservice.nx