Stock synchronisation design between web shop and legacy system

I am writing an e-commerce shop for a clothing company written in node.js and have an issue with stock synchronization between the web application and the back-office accounting package system which runs on Opera 2 - FoxPro DBF files.

Here is the design I have thus far and want to get your input if it can be improved.

stock synchronisation

Basically, Opera Qty is made up of goods in, returns and adjustments such as spoiled goods that needs to be removed from the stock.

I have a small node.js application running on the Opera App server:

watch = require "watch"
path = require "path"
Promise = require "bluebird"
DBFParser = require "node-dbf"
request = require "request"

fakeChanges = true
serverUrl = "http://localhost:3000/api/stock"

convert = (dbfr) ->
  stock = dbfr.CN_FREEST
  if fakeChanges
    stock = Math.round(dbfr.CN_FREEST * ( 0.9 + 0.2 * Math.random()))
  {
    sku: dbfr.CN_REF
    description: dbfr.CN_DESC
    sellPrice: dbfr.CN_SELL
    inStock: stock
    comCode: dbfr.CN_COMCODE
    countryOfOrigin: dbfr.CN_CNTORIG
  }

getRecords = (fname) ->
  new Promise (resolve, reject) ->
    records = []
    cnameParser = new DBFParser fname
    cnameParser.on "end", -> resolve records
    cnameParser.on "error", reject
    cnameParser.on "record", (r) ->
      if not r['@deleted'] and r.CN_DORMANT is "F" and r.CN_CAT
        records.push convert(r)
    cnameParser.parse()

uploadRecords = (records) ->
  apiOptions =
    url: serverUrl
    body: records
    json: true
  apiCallback = (err, response, body) ->
    if err  then console.error(err)
    else console.log("Pushed #{records.length} records", body)
  request.post apiOptions, apiCallback

filefilter = (fname) -> path.basename(fname) is "c_cname.dbf"
onFileChanged = ->
  getRecords "data/c_cname.dbf"
    .then uploadRecords
    .catch (err) -> console.error(err)

console.log "Watching files..."
watch.watchTree "data", {filter: filefilter}, onFileChanged

if fakeChanges then setInterval onFileChanged , 1000

The sync script basically watches for changes on the c_cname.dbf file and pushes the changes.

On the web app I have server.coffee as:

cluster = require "cluster"
numCPUs = require("os").cpus().length
app = require "./app"
config = require "config"
engine = require "engine.io"
http = require "http"
Promise = require "bluebird"

if cluster.isMaster and config.server.enableCluster
  RespawnWorker = (worker, code, signal) ->
    if not worker.suicide
      console.log "worker #{worker.process.pid} died"
      cluster.fork()

  for i in [1 .. numCPUs]
    cluster.fork()

  # re-create worker upon unnatural death
  cluster.on 'exit', RespawnWorker

  cluster.on 'listening', (worker) ->
    console.log 'MASTER: worker[%s] listening', worker.process.pid
    setTimeout (whackAMole.bind null, worker), 1000

  whackAMole = (worker) ->
    console.log 'MASTER: sending \'detach\' to worker %s', worker.process.pid
    worker.send 'detach'

    worker.once 'message', (msg) ->
      console.log 'MASTER: worker[%s] response: %s', worker.process.pid, msg
      if msg == 'detached'
        console.log 'MASTER: disconnect()ing worker[%s]', worker.process.pid
        worker.disconnect()
        worker.once 'disconnect', () ->
          console.log 'MASTER: \'disconnect\' from worker[%s]: %s', worker.suicide, worker.process.pid

else
  port = app.get "port"
  httpServer = http.createServer(app).listen(port)
  console.log "Starting Web Server on Port #{port}"
  wsServer = engine.attach httpServer
  redisUrl = require("redis-url")
  dbClient = redisUrl.connect(config.database.redis.url)
  channelClient = redisUrl.connect(config.database.redis.url)
  channelClient.subscribe "stock.change"
  hgetallPr = Promise.promisify dbClient.hgetall, dbClient
  wsServer.on "connection", (socket) ->
    hgetallPr "stock"
      .then (map) ->
        socket.send JSON.stringify(map)
        onStockChange = (chanel, mapJson) -> socket.send mapJson
        channelClient.addListener "message", onStockChange
        socket.on "close", -> channelClient.removeListener "message", onStockChange    
      .catch (err) -> console.error err

And on the client, main.coffee:

$(document).ready ()->

  stockInputs = $(".real-time-stock")
  if stockInputs.length
    scheme = window.location.protocol
    if scheme == 'https:'
      ws = "wss://" + window.location.host
    else
      ws = "ws://" + window.location.host
    socket = new eio.Socket(ws)
    socket.on "open", ->
      socket.on "message", (data) ->
        map = JSON.parse(data)
        stockInputs.each ->
          jinput = $(this)
          sku = jinput.data("sku")
          stock = (map[sku] || "").toString()
          isInput = jinput.prop("tagName") is "INPUT"
          if isInput
            jinput.attr "placeholder", stock
          else
            jinput.text stock

This works to a point in that changes on the Opera App are synced on the web-shop. The issue I have is how to ensure the two applications know how much stock is available taking into account that there will be a lag between placing the order on the web shop and processing it on the back-office application.

opera_qty - this is the value that is taken from the FreeStock on the DBF file for that sku

sku_qty - this is the initial quantity store for the specific sku

total_carts_qty_for_this_sku - this is the total qty for all the carts on the shop

So the way I think the sku_qty is derived from this formula:

available_sku_qty = sku_qty + ((sku_qty - total_carts_qty_for_this_sku) - opera_qty)

if we have 1000 sku_qty and opera_qty, and 0 items in cars then

available_sku_qty = 1000 + ((1000-0)- 1000) equals 1000

If there are 200 in the carts, then

available_sku_qty = 1000 + ((1000-200)- 1000) equals 800

Then once the cart items have been processed and opera_qty has been updated, we get:

available_sku_qty = 800 + ((800-0) - 800) equals 800

The problem is that the time lag between the update on opera can take up to 30 min, so there will be different values and I don't see how best to design this.

I am using Redis to keep track of these and want to know if this is the correct approach or if there a different way to do this.