I have code like that
var subscribeNewMessages = require("redis").createClient(config.redis.port, config.redis.host);
subscribeNewMessages.subscribe('new-messages');
io.of('/new-messages').on('connection', function (client) {
subscribeNewMessages.on("message", function(channel, message) {
var obj = JSON.parse(message);
if (client.userId == obj.toUserId || client.guestId == obj.toUserId) {
client.send(message);
}
obj = null;
});
})
And how can I optimize it? Because this code parses string json for each new messages.
Also when I try to publish to redis chanel I need to JSON.stringify
redis1.publish(channelPrefix, JSON.stringify(clientData));
There isn't going to be a way to avoid JSON.parse()/JSON.stringify() as long as you're storing js objects. You could use a different serialization format like msgpack, but you're still calling functions to serialize/unserialize your data (also JSON.parse()/JSON.stringify() are already pretty hard to beat performance-wise in node).
I think the only real performance adjustment you could make with the code you've provided is to only parse the JSON once for all clients instead of for each client. Example:
var subscribeNewMessages = require('redis').createClient(config.redis.port, config.redis.host);
subscribeNewMessages.subscribe('new-messages');
var nsNewMsgs = io.of('/new-messages');
subscribeNewMessages.on('message', function(channel, message) {
var obj = JSON.parse(message),
clients = nsNewMsgs.connected,
ids = Object.keys(clients);
for (var i = 0, len = ids.length, client; i < len; ++i) {
client = clients[ids[i]];
if (client.userId == obj.toUserId || client.guestId == obj.toUserId)
client.send(message);
}
});
Depending on your application, you might even be able to avoid the for-loop entirely if you can store the socket.id values in your published messages, then you can simply look up clients[obj.userSockId] and clients[obj.guestSockId] because the connected is keyed on socket.id.