redis使用pipeline批量插入hash数据
import org.apache.commons.lang.math.RandomUtils;import redis.clients.jedis.Jedis;import redis.clients.jedis.Pipeline;public class RedisEasyTest {private static Jedis jedis = new Jedis("xx.xx.xx.xx");private static Pipeline p = jedis.pipelined();private static int KEY_COUNT = 10000;private static int FIELD_COUNT = 10;public void single() {for (int i = 0; i < KEY_COUNT; i++) {String key = RandomUtils.nextInt(5) + "";for (int j = 0; j < FIELD_COUNT; j++) {jedis.hset(key, j + "", i + j + "");jedis.expire(key, 3600);}}}public void batch() {int index = 0;for (int i = 0; i < KEY_COUNT; i++) {String key = RandomUtils.nextInt(5) + "";for (int j = 0; j < FIELD_COUNT; j++) {p.hset(key, j + "", i + j + "");p.expire(key, 3600);}if (++index % 1000 == 0) {p.sync();}}p.sync();}public static void main(String[] args) {long start = System.currentTimeMillis();RedisEasyTest r = new RedisEasyTest();r.single();System.out.printf("single use %d sec \n", (System.currentTimeMillis() - start) / 1000);start = System.currentTimeMillis();r.batch();System.out.printf("batch use %d sec \n", (System.currentTimeMillis() - start) / 1000);}}
输出结果:
single use 30 sec
batch use 0 sec
?
可以看到通过pipeline批量插入数据性能是非常不错的。
?
?
--end
?
?
?