Java 从结果集写入ORC文件
我正在尝试从结果集写入orc文件。出错 java.lang.ClassCastException:java.lang.String不能强制转换为org.apache.hadoop.hive.common.type.HiveVarchar 我想在没有地图的情况下写作Java 从结果集写入ORC文件,java,hadoop,Java,Hadoop,我正在尝试从结果集写入orc文件。出错 java.lang.ClassCastException:java.lang.String不能强制转换为org.apache.hadoop.hive.common.type.HiveVarchar 我想在没有地图的情况下写作 final static OrcSerde serde = new OrcSerde(); final Writer writer = OrcFile.createWriter(hdfs, OrcPath, conf,
final static OrcSerde serde = new OrcSerde();
final Writer writer = OrcFile.createWriter(hdfs, OrcPath, conf, oi, stripeSize, CompressionKind.SNAPPY, compressBufferSize, rowIndexStride);
final List<Object> record = new LinkedList<>();
while (resultSet.next()) {
for (int column = 1; column <= columnCount; ++column) {
switch (columnTypes[column]) {
case Types.BOOLEAN:
record.add(resultSet.getBoolean(column));
break;
case Types.CHAR:
case Types.VARCHAR:
record.add(resultSet.getString(column));
break;
case Types.SMALLINT:
case Types.INTEGER:
record.add(resultSet.getInt(column));
break;
case Types.BIGINT:
record.add(resultSet.getString(column));
break;
case Types.FLOAT:
case Types.DECIMAL:
case Types.DOUBLE:
record.add(resultSet.getDouble(column));
break;
default:
throw new RuntimeException
}
if (resultSet.wasNull()) {
record.remove(record.size());
record.add(null);
}
}
writer.addRow(record);
}
writer.close();
}
}
final static OrcSerde serde=new OrcSerde();
final Writer Writer=OrcFile.createWriter(hdfs、OrcPath、conf、oi、stripeSize、CompressionKind.SNAPPY、compressBufferSize、rowIndexStride);
最终列表记录=新建LinkedList();
while(resultSet.next()){
for(int column=1;column