码迷,mamicode.com
首页 > 其他好文 > 详细

高并发量的数据处理方案

时间:2015-07-14 11:42:06      阅读:99      评论:0      收藏:0      [点我收藏+]

标签:数据   批处理   高并发   

需求描述:

有5000台左右的设备,每台设备每隔一分钟就会向服务器端上报设备的信息(如设备所在位置经纬度等),现在需要服务端对这些上报请求传输的数据进行处理,并持久化到数据库中;

需求就这样简单,但服务端要处理的并发还是不小的,平均每秒种都会有将近100个请求过来,遇到这样的情况,你会怎么做呢?

我的解决方案是,使用了缓存+批处理操作,代码如下:

package com.jimu.data.servlet;

import org.apache.log4j.Logger;
import com.jimu.data.conn.JimuDataBakConn;

import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.HashMap;
import java.util.LinkedList;

import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;


public class GpsRecordServlet extends HttpServlet{
	
	private static final long serialVersionUID = 1L;
	//写日志操作
	private static final Logger logger = Logger.getLogger(GpsRecordServlet.class);
	//缓存大小
	private static int cacheSize = 2000;
	//(临界资源,需同步访问,确保线程安全)缓存链表开关标识
	private static int switchFlag = 1;
	
	//交替缓存数据,当存储的数据达到cacheSize时,向数据库中记录当前链表中的数据然后清空
	private static LinkedList<HashMap<String,Object>> cacheList1 = new LinkedList<HashMap<String,Object>>();
	private static LinkedList<HashMap<String,Object>> cacheList2 = new LinkedList<HashMap<String,Object>>();
	
	//127.0.0.1:8080/jimudata/recordgps.html?tsn=9303ZH201508080107&lng=23.175&lat=113.245&tim=1436756925668
	public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
		response.setContentType("text/html;charset=UTF-8");
        request.setCharacterEncoding("UTF-8");
        
        String termSn = request.getParameter("tsn");		//设备序列号
        String longitude = request.getParameter("lng");		//经度
        String latitude = request.getParameter("lat");		//纬度
        String gpsTime = request.getParameter("tim");		//gps时间戳
        int size = 0;
        if(getSwitchFlag() == 1){
        	size = cacheRecord(termSn,longitude,latitude,gpsTime,getCacheList1());
        	if(size >= cacheSize){
        		setSwitchFlag(2);
        		saveCacheList(getCacheList1());
        		//清空缓存
        		getCacheList1().clear();
        	}
        }else if(getSwitchFlag() == 2){
        	size = cacheRecord(termSn,longitude,latitude,gpsTime,getCacheList2());
        	if(size >= cacheSize){
        		setSwitchFlag(1);
        		saveCacheList(getCacheList2());
        		//清空缓存
        		getCacheList2().clear();
        	}
        }
	}
	public void doPost(HttpServletRequest request, HttpServletResponse response)
			throws ServletException, IOException {
		doGet(request, response);
	}
	
	public int cacheRecord(String ternSn,String longitude,String latitude,String gpsTime,LinkedList<HashMap<String,Object>> cacheList){
		HashMap<String,Object> itemMap = new HashMap<String, Object>();
		itemMap.put("ternSn",ternSn);
		itemMap.put("longitude",longitude);
		itemMap.put("latitude",latitude);
		itemMap.put("gpsTime",gpsTime);
		cacheList.add(itemMap);
		return cacheList.size();
	}
	
	public void saveCacheList(LinkedList<HashMap<String,Object>> cacheList){
		if(cacheList.size() == 0){
			return;
		}
		//数据源JimuBus 对应数据库jimubus,查询数据并返回json数据集
		Connection conn = null;
		PreparedStatement ps = null;
		ResultSet rs = null;
		try{
			conn = JimuDataBakConn.getConn();
			String sql = " INSERT INTO term_gps_record (term_sn,longitude,latitude,gps_time,create_time) VALUES(?,?,?,?,now()) ";
			ps = conn.prepareStatement(sql);
			//批处理大小
			final int batchSize = 500;
			int count = 0;
			for (HashMap<String,Object> itemMap: cacheList) {
				String termSn = itemMap.get("termSn") == null ? "":itemMap.get("termSn").toString();
				ps.setString(1,termSn);
				String longitude = itemMap.get("longitude") == null ? "":itemMap.get("longitude").toString();
				ps.setString(2,longitude);
				String latitude = itemMap.get("latitude") == null ? "":itemMap.get("latitude").toString();
				ps.setString(3,latitude);
				String gpsTime = itemMap.get("gpsTime") == null ? "":itemMap.get("gpsTime").toString();
				ps.setString(4,gpsTime);
				
				ps.addBatch();
			    if(++count % batchSize == 0) {
			        ps.executeBatch();		//达到批处理大小时,进行批处理操作
			        ps.clearBatch();		//清空stmt中积攒的sql
			    }
			}
			ps.executeBatch(); 				//insert remaining records
			
		}catch (Exception e) {
			e.printStackTrace();
			logger.info("批处理出现异常");
		}finally{
			try{rs.close();rs=null;}catch(Exception e){}
			try{ps.close();ps=null;}catch(Exception e){}
			try{conn.close();conn=null;}catch(Exception e){}
		}
	}
	
	//应用关闭时,把缓存中的数据保存到数据库
	public void destroy(){
		System.out.println("tomcat即将关闭,保存缓存中数据!");
		saveCacheList(getCacheList1());
		saveCacheList(getCacheList2());
	}
	
	private synchronized static int getSwitchFlag(){
		return switchFlag;
	}
	
	private synchronized static void setSwitchFlag(int flag){
		switchFlag = flag;
	}
	
	private synchronized static LinkedList<HashMap<String,Object>> getCacheList1(){
		return cacheList1;
	}
	
	private synchronized static LinkedList<HashMap<String,Object>> getCacheList2(){
		return cacheList2;
	}

}

如果有不足之处,欢迎指正,如果您有更好的解决方案,可以和大家探讨分享,共同完善!

版权声明:本文为博主原创文章,未经博主允许不得转载。

高并发量的数据处理方案

标签:数据   批处理   高并发   

原文地址:http://blog.csdn.net/sunlovefly2012/article/details/46873355

(0)
(0)
   
举报
评论 一句话评论(0
登录后才能评论!
© 2014 mamicode.com 版权所有  联系我们:gaon5@hotmail.com
迷上了代码!