How to store MongoDB documents in redis as cache and flush by timeout

I have nodejs application that handles nested JSON documents likes the following:

var post = {
  id: 123,
  title: 'Sterling Archer',    
  comments: [
    {text: 'Comment text', tags: ['tag1', 'tag2', 'tag3']},
    {text: 'Comment test', tags: ['tag2', 'tag5']}
  ]  
};

And stores them in MongoDB database. My documents require frequent updates but as you know, MongoDB is very slow on writes due their nature. To resolve this problem I've decided to store documents in Redis and flush to MongoDB at some timeout (after 1-2 hours for example).

So here the code example of my updating approach:

var redis_cli = require("redis").createClient();

app.post('/update/:id', function (req, res, next) {

  var id = req.params.id
  redis_cli.get(id, function (err, document) {
      if (err) return next(err);

      // If there is no key, try to get from MongoDB
      if (!document) {
         DocumentModel.findOneByid(id, function (err, document) {
             if (err) return next(err);
             if (!document) return next(new Error("Document with id " + id + " does not exists"));

             // Document in MongoDB, so store in redis
             redis_cli.set(id, JSON.stringify(document), function (err) {
                  if (err) return next(err);

                  updateDocument(document);
             });                    
         });    
      } else {
          updateDocument(JSON.parse(document));
      }

      function updateDocument (document) {
          // Do some updates ...
          document.title = "Updated post title";

          // Store to redis
          redis_cli.set(id, JSON.strinfy(document), function (err) {
              if (err) return next(err);

              // Document updated successful
              return res.status(200).send('OK');
          });              
      }          
  });
});

My first question is what do think about my approach to work with documents? Is there any problems with my approach?

And the second question is how to flush documents in Redis back to mongodb and remove ones from redis? My purpose are the following: I want to store documents in redis only when necessary by my users so if the don't work with their document it should be stored in MongoDB instead Redis.

Your approach seems reasonable.

To "flush" the documents, keep a sorted set in Redis of the ids of documents that were updated, their scores set to the update timestamp. Periodically, e.g. every minute, do a ZRANGE on that set to get "old" (e.g. last update more than an hour ago) document ids, and for each id do a GET of the document, write it to Mongo, DEL the document and ZREM from the ordered set.

EDIT untested and totally made up code samples in pseudo-Node.js:

 function updateDocument (document) {
      // Do some updates ...
      document.title = "Updated post title";

      // Store to redis
      multi = redis_cli.multi();
      multi.set(id, JSON.strinfy(document);
      multi.zadd('last_update', time(), id);
      multi.exec(), function (err, replies) {
          if (err) return next(err);

          // Document updated successful
          return res.status(200).send('OK');
      });              
  }

  // call this function periodically, e.g. every minute or so
  function flushOldDocuments () {
      fromTime = time()-3600;
      while (redis_cli.zcount('last_update', '-inf', fromTime) > 0) {
          id = redis_cli.zrangebyscore('last_update', '-inf', fromTime, false, 0, 1); // no scores, offset 0, limit 1 -> get the oldest document
          redis_cli.watch(id);
          Mongo.write(JSON.parse(redis_cli.get(id))); // or something like that
          multi = redis_cli.multi();
          multi.zrem('last_update', id);
          multi.del(id);
          multi.exec(), function(err, replies) {
              // if the watch fails the exec, no harm done and the document will be flushed in an hour
              ...
          };
      };
  }