mapreduce [traffic statistics] sum - user defined data type

Keywords: Programming Hadoop Apache Java Mobile

Demand: in the document, the total upstream traffic, total downstream traffic and total traffic consumed by each user

1363157985066 	13726230503	00-FD-07-A4-72-B8:CMCC	120.196.100.82	i02.c.aliimg.com		24	27	2481	24681	200
1363157995052 	13826544101	5C-0E-8B-C7-F1-E0:CMCC	120.197.40.4			4	0	264	0	200
1363157991076 	13926435656	20-10-7A-28-CC-0A:CMCC	120.196.100.99			2	4	132	1512	200
1363154400022 	13926251106	5C-0E-8B-8B-B1-50:CMCC	120.197.40.4			4	0	240	0	200
1363157993044 	18211575961	94-71-AC-CD-E6-18:CMCC-EASY	120.196.100.99	iface.qiyi.com	Video website	15	12	1527	2106	200
1363157995074 	84138413	5C-0E-8B-8C-E8-20:7DaysInn	120.197.40.4	122.72.52.12		20	16	4116	1432	200
1363157993055 	13560439658	C4-17-FE-BA-DE-D9:CMCC	120.196.100.99			18	15	1116	954	200
1363157995033 	15920133257	5C-0E-8B-C7-BA-20:CMCC	120.197.40.4	sug.so.360.cn	information safety	20	20	3156	2936	200
1363157983019 	13719199419	68-A1-B7-03-07-B1:CMCC-EASY	120.196.100.82			4	0	240	0	200
1363157984041 	13660577991	5C-0E-8B-92-5C-20:CMCC-EASY	120.197.40.4	s19.cnzz.com	Site statistics	24	9	6960	690	200
1363157973098 	15013685858	5C-0E-8B-C7-F7-90:CMCC	120.197.40.4	rank.ie.sogou.com	Search Engines	28	27	3659	3538	200
1363157986029 	15989002119	E8-99-C4-4E-93-E0:CMCC-EASY	120.196.100.99	www.umeng.com	Site statistics	3	3	1938	180	200
1363157992093 	13560439658	C4-17-FE-BA-DE-D9:CMCC	120.196.100.99			15	9	918	4938	200
1363157986041 	13480253104	5C-0E-8B-C7-FC-80:CMCC-EASY	120.197.40.4			3	3	180	180	200
1363157984040 	13602846565	5C-0E-8B-8B-B6-00:CMCC	120.197.40.4	2052.flash2-http.qq.com	Comprehensive portal	15	12	1938	2910	200
1363157995093 	13922314466	00-FD-07-A2-EC-BA:CMCC	120.196.100.82	img.qfc.cn		12	12	3008	3720	200
1363157982040 	13502468823	5C-0A-5B-6A-0B-D4:CMCC-EASY	120.196.100.99	y0.ifengimg.com	Comprehensive portal	57	102	7335	110349	200
1363157986072 	18320173382	84-25-DB-4F-10-1A:CMCC-EASY	120.196.100.99	input.shouji.sogou.com	Search Engines	21	18	9531	2412	200
1363157990043 	13925057413	00-1F-64-E1-E6-9A:CMCC	120.196.100.55	t3.baidu.com	Search Engines	69	63	11058	48243	200
1363157988072 	13760778710	00-FD-07-A4-7B-08:CMCC	120.196.100.82			2	2	120	120	200
1363157985066 	13726238888	00-FD-07-A4-72-B8:CMCC	120.196.100.82	i02.c.aliimg.com		24	27	2481	24681	200
1363157993055 	13560436666	C4-17-FE-BA-DE-D9:CMCC	120.196.100.99			18	15	1116	954	200

Idea: map stage: divide each line into various fields according to the tab, extract the cell phone number as the output key, encapsulate the traffic information into the FlowBean object as the output value

Important: how to implement the serialization interface of Hadoop with custom type

FlowBean: this custom data type must implement the serialization interface of Hadoop: Writable

There are two ways to do this:

Readfields (in) -- deserialization method

Write (out) - serialization method

reduce stage: traverse all the values (flow beans) of a set of data, accumulate them, and then output them with the mobile phone number as the key and the total flow information bean as the value.

code implementation

1.FlowBean 

import org.apache.hadoop.io.Writable;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;

/**
 * Function of this case: demonstrate how to implement the serialization interface of Hadoop with custom data type
 *  1,This class must keep the null constructor
 *  2.write The order of the binary data of the output field in the method should be the same as that of the readFiles method
 */
public class FlowBean implements Writable {

    private int upFlow;
    private int dFlow;
    private String phone;
    private int amountFlow;

    public int getUpFlow() {
        return upFlow;
    }

    public void setUpFlow(int upFlow) {
        this.upFlow = upFlow;
    }

    public int getdFlow() {
        return dFlow;
    }

    public void setdFlow(int dFlow) {
        this.dFlow = dFlow;
    }

    public int getAmountFlow() {
        return amountFlow;
    }

    public void setAmountFlow(int amountFlow) {
        this.amountFlow = amountFlow;
    }

    public FlowBean() {
    }

    public FlowBean(int upFlow, int dFlow,String phone) {
        this.upFlow = upFlow;
        this.dFlow = dFlow;
        this.phone=phone;
        this.amountFlow=upFlow+dFlow;
    }

    /**
     * hadoop Method to be called by the system when serializing objects of this class
     * @param dataOutput
     * @throws IOException
     */
    public void write(DataOutput dataOutput) throws IOException {

        dataOutput.writeInt(upFlow);
        dataOutput.writeUTF(phone);
        dataOutput.writeInt(dFlow);
        dataOutput.writeInt(amountFlow);


    }

    /**
     * hadoop Method the system calls when deserializing
     * @param dataInput
     * @throws IOException
     */
    public void readFields(DataInput dataInput) throws IOException {

        this.upFlow=dataInput.readInt();
        this.phone=dataInput.readUTF();
        this.dFlow=dataInput.readInt();
        this.amountFlow=dataInput.readInt();
    }

    @Override
    public String toString() {
        return this.upFlow+","+this.dFlow+","+this.amountFlow;
    }
}

2.FlowCountMapper 

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import java.io.IOException;

public class FlowCountMapper extends Mapper<LongWritable, Text, Text, FlowBean> {


    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        String line = value.toString();
        String[] fields = line.split("\t");
        String phone = fields[1];
        int upFlow=Integer.parseInt(fields[fields.length-3]);
        int dFlow=Integer.parseInt(fields[fields.length-2]);

        context.write(new Text(phone),new FlowBean(upFlow,dFlow,phone));

    }
}

3.FlowCountReduce 

import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import java.io.IOException;

public class FlowCountReduce extends Reducer<Text,FlowBean,Text,FlowBean> {
    /**
     *
     * @param key:Cell-phone number
     * @param values: Traffic data in all access records generated by a mobile phone number
     * @param context
     * @throws IOException
     * @throws InterruptedException
     */
    @Override
    protected void reduce(Text key, Iterable<FlowBean> values, Context context) throws IOException, InterruptedException {

        int upSum=0;
        int dSum=0;
        for(FlowBean value:values){
            upSum +=value.getUpFlow();
            dSum +=value.getdFlow();
        }
        context.write(key,new FlowBean(upSum,dSum,key.toString()));
    }
}

4.JobSubmitter

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class JobSubmitter{
    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
        Job job = Job.getInstance(conf);
        job.setJarByClass(JobSubmitter.class);
        job.setMapperClass(FlowCountMapper.class);
        job.setReducerClass(FlowCountReduce.class);

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(FlowBean.class);

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(FlowBean.class);

        FileInputFormat.setInputPaths(job,new Path("F:\\mrdata\\flow\\input"));
        FileOutputFormat.setOutputPath(job,new Path("F:\\mrdata\\flow\\output"));

        boolean res = job.waitForCompletion(true);
        System.exit(res ? 0:-1);
    }
}

5. Statistical results of jobsubmitter program operation [total uplink and downlink traffic of mobile number]

13480253104	180,180,360
13502468823	7335,110349,117684
13560436666	1116,954,2070
13560439658	2034,5892,7926
13602846565	1938,2910,4848
13660577991	6960,690,7650
13719199419	240,0,240
13726230503	2481,24681,27162
13726238888	2481,24681,27162
13760778710	120,120,240
13826544101	264,0,264
13922314466	3008,3720,6728
13925057413	11058,48243,59301
13926251106	240,0,240
13926435656	132,1512,1644
15013685858	3659,3538,7197
15920133257	3156,2936,6092
15989002119	1938,180,2118
18211575961	1527,2106,3633
18320173382	9531,2412,11943
84138413	4116,1432,5548

Posted by alexguz79 on Sun, 03 Nov 2019 08:47:52 -0800