Warning: file_get_contents(/data/phpspider/zhask/data//catemap/0/hadoop/6.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Hadoop SequenceFileRecordReader返回的ByteArray错误_Hadoop_Apache Pig_Hdfs_User Defined Functions - Fatal编程技术网

Hadoop SequenceFileRecordReader返回的ByteArray错误

Hadoop SequenceFileRecordReader返回的ByteArray错误,hadoop,apache-pig,hdfs,user-defined-functions,Hadoop,Apache Pig,Hdfs,User Defined Functions,我试图读取存储在HDFS SequenceFile中的二进制网络数据包数据。问题似乎出在不可打印的字符或数据包头中 目前,数据通过定制插件(带全局tcpdump头的jpcap)通过Flume ng(1.4)存储到HDFS中。事件按每个数据包提交。这是我用SequenceFile RecordReader将其读回Pig的进一步方式 现在,为了保持简单,除了从seq文件中读取记录并将其直接写入文件(output.pcap)之外,我什么也不做 input.pcap摘录(从HDFS检索): output

我试图读取存储在HDFS SequenceFile中的二进制网络数据包数据。问题似乎出在不可打印的字符或数据包头中

目前,数据通过定制插件(带全局tcpdump头的jpcap)通过Flume ng(1.4)存储到HDFS中。事件按每个数据包提交。这是我用SequenceFile RecordReader将其读回Pig的进一步方式

现在,为了保持简单,除了从seq文件中读取记录并将其直接写入文件(output.pcap)之外,我什么也不做

input.pcap摘录(从HDFS检索):

output.pcap相同批量摘录(来自清管器UDF):

正如您可能看到的,第一个十六进制转储显示0x01425c4ae2ee,它转换为时间戳:1384527880942,或Fri,2013年11月15日15:04:40 GMT。另一个只显示nills,直到数据包数据开始

希望有人能在这里为我指出正确的方向,这样我就可以读出包含以下内容的数据包头:

c2 ba cd 4f b6 35 0f 00  36 00 00 00 36 00 00 00

1-4b:   Timestamp, 0x4fcdbac2. 
calc 0x4fcdbac2 -> 1338882754
-> date --date='1970-01-01 1338882754 sec GMT’

5-8b:   Microseconds of timestamp

9-12b:  Packet data size

13-16b: Length of packet as it was captured on the wire (54b). Can be the same as 9-12b but can be different if snapshot length (max packet 
    length) is less than 65536
无需更多介绍,这里是PcapFileLoader.java:

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.reflect.Type;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.ArrayList;
import java.util.Arrays;

import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.ByteWritable;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.io.serializer.SerializationFactory;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileRecordReader;
import org.apache.pig.FileInputLoadFunc;
import org.apache.pig.LoadFunc;
import org.apache.pig.backend.BackendException;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit;
import org.apache.pig.data.DataByteArray;
import org.apache.pig.data.DataType;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
import org.krakenapps.pcap.util.ByteOrderConverter;

public class PcapFileLoader extends FileInputLoadFunc {

    private SequenceFileRecordReader<LongWritable, BytesWritable> reader;

    private Writable key;
    private BytesWritable value;

    private ArrayList<Object> mProtoTuple = null;

    protected TupleFactory mTupleFactory = TupleFactory.getInstance();
    protected SerializationFactory serializationFactory;

    protected byte[] currentPacket;

    protected byte keyType = DataType.UNKNOWN;
    protected byte valType = DataType.UNKNOWN;

    public PcapFileLoader() {
        mProtoTuple = new ArrayList<Object>(2);
    }

    protected void setKeyType(Class<?> keyClass) throws BackendException {
        this.keyType |= inferPigDataType(keyClass);
        if (keyType == DataType.ERROR) {
            throw new BackendException("Unable to translate " + key.getClass() + " to a Pig datatype");
        }
    }

    protected void setValueType(Class<?> valueClass) throws BackendException {
        this.valType |= inferPigDataType(valueClass);
        if (keyType == DataType.ERROR) {
            throw new BackendException("Unable to translate " + key.getClass() + " to a Pig datatype");
        }
    }

    @Override
    public Tuple getNext() throws IOException {
        boolean next = false;
        try {
            next = reader.nextKeyValue();
        } catch (InterruptedException e) {
            throw new IOException(e);
        }

        if (!next) {
            return null;
        }

        key = reader.getCurrentKey();
        value = reader.getCurrentValue();

        currentPacket = value.getBytes();
        if (keyType == DataType.UNKNOWN && key != null) {
            setKeyType(key.getClass());
        }
        if (valType == DataType.UNKNOWN && value != null) {
            setValueType(value.getClass());
        }

        //readPacketHeader();
        ByteBuffer buffer = ByteBuffer.wrap(currentPacket);
        long ts = buffer.getLong();
        ts = ByteOrderConverter.swap(ts);
        System.out.println(ts);

        FileOutputStream file = new FileOutputStream(new File("output.pcap"),true);
        file.write(value.getBytes());
        file.close();

        mProtoTuple.add(translateWritableToPigDataType(key, keyType));
        mProtoTuple.add(translateWritableToPigDataType(value, valType));
        Tuple t = mTupleFactory.newTuple(mProtoTuple);
        mProtoTuple.clear();
        return t;
    }

    protected byte inferPigDataType(Type t) {
        if (t == DataByteArray.class) {
            return DataType.BYTEARRAY;
        } else if (t == BytesWritable.class) {
            return DataType.BYTEARRAY;
        } else if (t == Text.class) {
            return DataType.CHARARRAY;
        } else if (t == IntWritable.class) {
            return DataType.INTEGER;
        } else if (t == LongWritable.class) {
            return DataType.LONG;
        } else if (t == FloatWritable.class) {
            return DataType.FLOAT;
        } else if (t == DoubleWritable.class) {
            return DataType.DOUBLE;
        } else if (t == BooleanWritable.class) {
            return DataType.BOOLEAN;
        } else if (t == ByteWritable.class) {
            return DataType.BYTE;
        } // not doing maps or other complex types for now
        else {
            return DataType.ERROR;
        }
    }

    protected Object translateWritableToPigDataType(Writable w, byte dataType) {
        switch (dataType) {
            case DataType.CHARARRAY:
                return ((Text) w).toString();
            case DataType.BYTEARRAY:
                return (w instanceof BytesWritable ? new DataByteArray(((BytesWritable) w).getBytes()) : w);
            case DataType.BOOLEAN:
                return ((BooleanWritable) w).get();
            case DataType.INTEGER:
                return ((IntWritable) w).get();
            case DataType.LONG:
                return ((LongWritable) w).get();
            case DataType.FLOAT:
                return ((FloatWritable) w).get();
            case DataType.DOUBLE:
                return ((DoubleWritable) w).get();
            case DataType.BYTE:
                return ((ByteWritable) w).get();
        }

        return null;
    }

    @SuppressWarnings("unchecked")
    @Override
    public InputFormat getInputFormat() throws IOException {
        return new SequenceFileInputFormat<LongWritable, BytesWritable>();
    }

    @SuppressWarnings("unchecked")
    @Override
    public void prepareToRead(RecordReader reader, PigSplit split)
            throws IOException {
        this.reader = (SequenceFileRecordReader) reader;
    }

    @Override
    public void setLocation(String location, Job job) throws IOException {
        FileInputFormat.setInputPaths(job, location);
    }
}

谢谢

结果表明,本例中的问题在于理解数据集。通常情况下(我见过的那些解决方案)会开始分析IP层,而JPCAP也提供以太网报头。正因为如此,IP层在链中排名第二。最后,我还发现了一些隐藏的好文档

c2 ba cd 4f b6 35 0f 00  36 00 00 00 36 00 00 00

1-4b:   Timestamp, 0x4fcdbac2. 
calc 0x4fcdbac2 -> 1338882754
-> date --date='1970-01-01 1338882754 sec GMT’

5-8b:   Microseconds of timestamp

9-12b:  Packet data size

13-16b: Length of packet as it was captured on the wire (54b). Can be the same as 9-12b but can be different if snapshot length (max packet 
    length) is less than 65536
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.reflect.Type;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.ArrayList;
import java.util.Arrays;

import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.ByteWritable;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.io.serializer.SerializationFactory;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileRecordReader;
import org.apache.pig.FileInputLoadFunc;
import org.apache.pig.LoadFunc;
import org.apache.pig.backend.BackendException;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit;
import org.apache.pig.data.DataByteArray;
import org.apache.pig.data.DataType;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
import org.krakenapps.pcap.util.ByteOrderConverter;

public class PcapFileLoader extends FileInputLoadFunc {

    private SequenceFileRecordReader<LongWritable, BytesWritable> reader;

    private Writable key;
    private BytesWritable value;

    private ArrayList<Object> mProtoTuple = null;

    protected TupleFactory mTupleFactory = TupleFactory.getInstance();
    protected SerializationFactory serializationFactory;

    protected byte[] currentPacket;

    protected byte keyType = DataType.UNKNOWN;
    protected byte valType = DataType.UNKNOWN;

    public PcapFileLoader() {
        mProtoTuple = new ArrayList<Object>(2);
    }

    protected void setKeyType(Class<?> keyClass) throws BackendException {
        this.keyType |= inferPigDataType(keyClass);
        if (keyType == DataType.ERROR) {
            throw new BackendException("Unable to translate " + key.getClass() + " to a Pig datatype");
        }
    }

    protected void setValueType(Class<?> valueClass) throws BackendException {
        this.valType |= inferPigDataType(valueClass);
        if (keyType == DataType.ERROR) {
            throw new BackendException("Unable to translate " + key.getClass() + " to a Pig datatype");
        }
    }

    @Override
    public Tuple getNext() throws IOException {
        boolean next = false;
        try {
            next = reader.nextKeyValue();
        } catch (InterruptedException e) {
            throw new IOException(e);
        }

        if (!next) {
            return null;
        }

        key = reader.getCurrentKey();
        value = reader.getCurrentValue();

        currentPacket = value.getBytes();
        if (keyType == DataType.UNKNOWN && key != null) {
            setKeyType(key.getClass());
        }
        if (valType == DataType.UNKNOWN && value != null) {
            setValueType(value.getClass());
        }

        //readPacketHeader();
        ByteBuffer buffer = ByteBuffer.wrap(currentPacket);
        long ts = buffer.getLong();
        ts = ByteOrderConverter.swap(ts);
        System.out.println(ts);

        FileOutputStream file = new FileOutputStream(new File("output.pcap"),true);
        file.write(value.getBytes());
        file.close();

        mProtoTuple.add(translateWritableToPigDataType(key, keyType));
        mProtoTuple.add(translateWritableToPigDataType(value, valType));
        Tuple t = mTupleFactory.newTuple(mProtoTuple);
        mProtoTuple.clear();
        return t;
    }

    protected byte inferPigDataType(Type t) {
        if (t == DataByteArray.class) {
            return DataType.BYTEARRAY;
        } else if (t == BytesWritable.class) {
            return DataType.BYTEARRAY;
        } else if (t == Text.class) {
            return DataType.CHARARRAY;
        } else if (t == IntWritable.class) {
            return DataType.INTEGER;
        } else if (t == LongWritable.class) {
            return DataType.LONG;
        } else if (t == FloatWritable.class) {
            return DataType.FLOAT;
        } else if (t == DoubleWritable.class) {
            return DataType.DOUBLE;
        } else if (t == BooleanWritable.class) {
            return DataType.BOOLEAN;
        } else if (t == ByteWritable.class) {
            return DataType.BYTE;
        } // not doing maps or other complex types for now
        else {
            return DataType.ERROR;
        }
    }

    protected Object translateWritableToPigDataType(Writable w, byte dataType) {
        switch (dataType) {
            case DataType.CHARARRAY:
                return ((Text) w).toString();
            case DataType.BYTEARRAY:
                return (w instanceof BytesWritable ? new DataByteArray(((BytesWritable) w).getBytes()) : w);
            case DataType.BOOLEAN:
                return ((BooleanWritable) w).get();
            case DataType.INTEGER:
                return ((IntWritable) w).get();
            case DataType.LONG:
                return ((LongWritable) w).get();
            case DataType.FLOAT:
                return ((FloatWritable) w).get();
            case DataType.DOUBLE:
                return ((DoubleWritable) w).get();
            case DataType.BYTE:
                return ((ByteWritable) w).get();
        }

        return null;
    }

    @SuppressWarnings("unchecked")
    @Override
    public InputFormat getInputFormat() throws IOException {
        return new SequenceFileInputFormat<LongWritable, BytesWritable>();
    }

    @SuppressWarnings("unchecked")
    @Override
    public void prepareToRead(RecordReader reader, PigSplit split)
            throws IOException {
        this.reader = (SequenceFileRecordReader) reader;
    }

    @Override
    public void setLocation(String location, Job job) throws IOException {
        FileInputFormat.setInputPaths(job, location);
    }
}
%DEFAULT includepath includes.pig
RUN $includepath;

seq = LOAD 'good.newest.pcap' using PcapFileLoader() as (a: long, b: bytearray);

DUMP seq;