[Bug] Write paimon table with hive engine by MR got exeception
Search before asking
- [X] I searched in the issues and found nothing similar.
Paimon version
0.9
Compute Engine
hive: 2.1-cdh-6.3-1
Minimal reproduce step
create paimon table in beeline
SET hive.metastore.warehouse.dir=/user/hive/warehouse;
CREATE TABLE hive_test_table(
a INT COMMENT 'The a field',
b STRING COMMENT 'The b field'
)
STORED BY 'org.apache.paimon.hive.PaimonStorageHandler';
after that, try insert data
insert into hive_test_table values (3, 'paimon');
What doesn't meet your expectations?
MapReduce failed with bellow error.
2024-11-17 22:11:46,830 INFO [main] org.apache.hadoop.hive.ql.exec.TableScanOperator: Initializing operator TS[0]
2024-11-17 22:11:46,830 INFO [main] org.apache.hadoop.hive.ql.exec.SelectOperator: Initializing operator SEL[1]
2024-11-17 22:11:47,196 INFO [main] org.apache.hadoop.hive.ql.exec.SelectOperator: SELECT struct<tmp_values_col1:string,tmp_values_col2:string>
2024-11-17 22:11:47,203 INFO [main] org.apache.hadoop.hive.ql.exec.FileSinkOperator: Initializing operator FS[3]
2024-11-17 22:11:47,204 INFO [main] org.apache.hadoop.conf.Configuration.deprecation: mapred.task.id is deprecated. Instead, use mapreduce.task.attempt.id
2024-11-17 22:11:47,621 INFO [main] org.apache.hadoop.hive.ql.exec.FileSinkOperator: Using serializer : org.apache.paimon.hive.PaimonSerDe@6ede46f6 and formatter : org.apache.paimon.hive.mapred.PaimonOutputFormat@66273da0
2024-11-17 22:11:47,621 INFO [main] org.apache.hadoop.conf.Configuration.deprecation: mapred.healthChecker.script.timeout is deprecated. Instead, use mapreduce.tasktracker.healthchecker.script.timeout
2024-11-17 22:11:47,634 INFO [main] org.apache.hadoop.hive.ql.exec.FileSinkOperator: New Final Path: FS hdfs://cdh01.daniel.com:8020/user/hive/warehouse/default.db/_tmp.hive_test_table/000000_0
2024-11-17 22:11:47,781 INFO [main] org.apache.hadoop.hive.ql.exec.FileSinkOperator: FS[3]: records written - 1
2024-11-17 22:11:47,913 INFO [main] org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.zstd]
2024-11-17 22:11:48,249 INFO [main] org.apache.hadoop.hive.ql.exec.MapOperator: MAP[0]: records read - 1
2024-11-17 22:11:48,249 INFO [main] org.apache.hadoop.hive.ql.exec.MapOperator: MAP[0]: Total records read - 1. abort - false
2024-11-17 22:11:48,249 INFO [main] org.apache.hadoop.hive.ql.exec.MapOperator: DESERIALIZE_ERRORS:0, RECORDS_IN:1,
2024-11-17 22:11:48,250 INFO [main] org.apache.hadoop.hive.ql.exec.FileSinkOperator: FS[3]: records written - 1
2024-11-17 22:11:48,250 INFO [main] org.apache.hadoop.hive.ql.exec.FileSinkOperator: TOTAL_TABLE_ROWS_WRITTEN:1, RECORDS_OUT_1_default.hive_test_table:1,
2024-11-17 22:11:48,272 INFO [main] org.apache.hadoop.mapred.Task: Task:attempt_1731851152753_0005_m_000000_0 is done. And is in the process of committing
2024-11-17 22:11:48,276 INFO [main] org.apache.hadoop.mapred.Task: Task attempt_1731851152753_0005_m_000000_0 is allowed to commit now
2024-11-17 22:11:48,357 ERROR [main] org.apache.hadoop.mapred.YarnChild: Error running child : java.lang.UnsatisfiedLinkError: com.github.luben.zstd.ZstdOutputStreamNoFinalizer.recommendedCOutSize()J
at com.github.luben.zstd.ZstdOutputStreamNoFinalizer.recommendedCOutSize(Native Method)
at com.github.luben.zstd.ZstdOutputStreamNoFinalizer.<clinit>(ZstdOutputStreamNoFinalizer.java:30)
at com.github.luben.zstd.RecyclingBufferPool.<clinit>(RecyclingBufferPool.java:17)
at org.apache.paimon.shade.org.apache.parquet.hadoop.codec.ZstandardCodec.createOutputStream(ZstandardCodec.java:107)
at org.apache.paimon.shade.org.apache.parquet.hadoop.codec.ZstandardCodec.createOutputStream(ZstandardCodec.java:100)
at org.apache.paimon.shade.org.apache.parquet.hadoop.CodecFactory$HeapBytesCompressor.compress(CodecFactory.java:176)
at org.apache.paimon.shade.org.apache.parquet.hadoop.ColumnChunkPageWriteStore$ColumnChunkPageWriter.writePage(ColumnChunkPageWriteStore.java:168)
at org.apache.paimon.shade.org.apache.parquet.column.impl.ColumnWriterV1.writePage(ColumnWriterV1.java:59)
at org.apache.paimon.shade.org.apache.parquet.column.impl.ColumnWriterBase.writePage(ColumnWriterBase.java:389)
at org.apache.paimon.shade.org.apache.parquet.column.impl.ColumnWriteStoreBase.flush(ColumnWriteStoreBase.java:186)
at org.apache.paimon.shade.org.apache.parquet.column.impl.ColumnWriteStoreV1.flush(ColumnWriteStoreV1.java:29)
at org.apache.paimon.shade.org.apache.parquet.hadoop.InternalParquetRecordWriter.flushRowGroupToStore(InternalParquetRecordWriter.java:185)
at org.apache.paimon.shade.org.apache.parquet.hadoop.InternalParquetRecordWriter.close(InternalParquetRecordWriter.java:124)
at org.apache.paimon.shade.org.apache.parquet.hadoop.ParquetWriter.close(ParquetWriter.java:112)
at org.apache.paimon.format.parquet.writer.ParquetBulkWriter.close(ParquetBulkWriter.java:52)
at org.apache.paimon.io.SingleFileWriter.close(SingleFileWriter.java:170)
at org.apache.paimon.io.RowDataFileWriter.close(RowDataFileWriter.java:104)
at org.apache.paimon.io.RollingFileWriter.closeCurrentWriter(RollingFileWriter.java:131)
at org.apache.paimon.io.RollingFileWriter.close(RollingFileWriter.java:168)
at org.apache.paimon.append.AppendOnlyWriter$DirectSinkWriter.flush(AppendOnlyWriter.java:418)
at org.apache.paimon.append.AppendOnlyWriter.flush(AppendOnlyWriter.java:219)
at org.apache.paimon.append.AppendOnlyWriter.prepareCommit(AppendOnlyWriter.java:207)
at org.apache.paimon.operation.AbstractFileStoreWrite.prepareCommit(AbstractFileStoreWrite.java:210)
at org.apache.paimon.operation.MemoryFileStoreWrite.prepareCommit(MemoryFileStoreWrite.java:152)
at org.apache.paimon.table.sink.TableWriteImpl.prepareCommit(TableWriteImpl.java:253)
at org.apache.paimon.table.sink.TableWriteImpl.prepareCommit(TableWriteImpl.java:260)
at org.apache.paimon.hive.mapred.PaimonOutputCommitter.commitTask(PaimonOutputCommitter.java:95)
at org.apache.hadoop.mapred.OutputCommitter.commitTask(OutputCommitter.java:343)
at org.apache.hadoop.mapred.Task.commit(Task.java:1341)
at org.apache.hadoop.mapred.Task.done(Task.java:1185)
at org.apache.hadoop.mapred.MapTask.run(MapTask.java:351)
at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:174)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1875)
at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:168)
2024-11-17 22:11:48,460 INFO [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: Stopping MapTask metrics system...
2024-11-17 22:11:48,460 INFO [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: MapTask metrics system stopped.
2024-11-17 22:11:48,461 INFO [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: MapTask metrics system shutdown complete.
Anything else?
I checked if my cluster supports zstd compression: Use the following command to run MR job
hadoop jar /opt/cloudera/parcels/CDH/jars/hadoop-mapreduce-examples*.jar wordcount -Dmapreduce.map.output.compress.codec=org.apache.hadoop.io.compress.ZStandardCodec -Dmapreduce.map.output.compress=true -Dmapreduce.output.fileoutputformat.compress=true -Dmapreduce.output.fileoutputformat.compress.codec=org.apache.hadoop.io.compress.ZStandardCodec wcin wcout-zst
The job ran successfully.
a part of that log.
2024-11-17 22:07:43,023 INFO [main] org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
2024-11-17 22:07:43,410 INFO [main] org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 2
2024-11-17 22:07:43,410 INFO [main] org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: FileOutputCommitter skip cleanup _temporary folders under output directory:false, ignore cleanup failures: false
2024-11-17 22:07:43,421 INFO [main] org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ]
2024-11-17 22:07:43,565 INFO [main] org.apache.hadoop.mapred.MapTask: Processing split: hdfs://cdh01.daniel.com:8020/user/hive/wcin/yum.log:0+0
2024-11-17 22:07:43,672 INFO [main] org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 67108860(268435440)
2024-11-17 22:07:43,672 INFO [main] org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 256
2024-11-17 22:07:43,672 INFO [main] org.apache.hadoop.mapred.MapTask: soft limit at 214748368
2024-11-17 22:07:43,672 INFO [main] org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 268435456
2024-11-17 22:07:43,672 INFO [main] org.apache.hadoop.mapred.MapTask: kvstart = 67108860; length = 16777216
2024-11-17 22:07:43,679 INFO [main] org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
2024-11-17 22:07:43,696 INFO [main] org.apache.hadoop.mapred.MapTask: Starting flush of map output
2024-11-17 22:07:43,715 INFO [main] org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.zst]
2024-11-17 22:07:43,739 INFO [main] org.apache.hadoop.mapred.Task: Task:attempt_1731851152753_0004_m_000000_0 is done. And is in the process of committing
2024-11-17 22:07:43,770 INFO [main] org.apache.hadoop.mapred.Task: Task 'attempt_1731851152753_0004_m_000000_0' done.
2024-11-17 22:07:43,778 INFO [main] org.apache.hadoop.mapred.Task: Final Counters for attempt_1731851152753_0004_m_000000_0: Counters: 29
File System Counters
FILE: Number of bytes read=0
FILE: Number of bytes written=220970
FILE: Number of read operations=0
FILE: Number of large read operations=0
FILE: Number of write operations=0
HDFS: Number of bytes read=116
HDFS: Number of bytes written=0
HDFS: Number of read operations=3
HDFS: Number of large read operations=0
HDFS: Number of write operations=0
HDFS: Number of bytes read erasure-coded=0
Map-Reduce Framework
Map input records=0
Map output records=0
Map output bytes=0
Map output materialized bytes=90
Input split bytes=116
Combine input records=0
Combine output records=0
Spilled Records=0
Failed Shuffles=0
Merged Map outputs=0
GC time elapsed (ms)=41
CPU time spent (ms)=430
Physical memory (bytes) snapshot=482156544
Virtual memory (bytes) snapshot=2589884416
Total committed heap usage (bytes)=480247808
Peak Map Physical memory (bytes)=482156544
Peak Map Virtual memory (bytes)=2589884416
File Input Format Counters
Bytes Read=0
2024-11-17 22:07:43,879 INFO [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: Stopping MapTask metrics system...
2024-11-17 22:07:43,879 INFO [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: MapTask metrics system stopped.
2024-11-17 22:07:43,879 INFO [main] org.apache.hadoop.metrics2.impl.MetricsSystemImpl: MapTask metrics system shutdown complete.
Are you willing to submit a PR?
- [ ] I'm willing to submit a PR!
I encountered the same problem and solved it by upgrading the zstd-jni version to zstd-jni-1.5.5-11 https://github.com/apache/paimon/issues/4531
I tied with below steps:
- put zstd-jni-1.5.5-11.jar under hive auxlib path.
- restart hiveserver2
- insert data to paimon table.
the problem still there.
the two version zstd-jni both in class.path.
java.class.path: /data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002:/etc/hadoop/conf.cloudera.yarn2:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/accessors-smart-1.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/netty-3.10.6.Final.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-mapreduce-client-uploader-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/woodstox-core-5.0.3.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jersey-servlet-1.19.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-yarn-server-common-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/aopalliance-1.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/slf4j-api-1.7.25.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/protobuf-java-2.5.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jersey-json-1.19.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/parquet-common.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/zookeeper.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/xz-1.6.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/logredactor-2.0.7.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/parquet-hadoop.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jul-to-slf4j-1.7.25.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/re2j-1.1.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jetty-util-9.3.25.v20180904.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/commons-configuration2-2.1.1.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jackson-core-2.9.9.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/json-simple-1.1.1.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-archives-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/kerby-asn1-1.0.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/commons-cli-1.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/kerb-crypto-1.0.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jetty-webapp-9.3.25.v20180904.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jackson-mapper-asl-1.9.13-cloudera.1.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/parquet-encoding.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/parquet-thrift.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/parquet-generator.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/netty-handler-4.1.17.Final.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/okhttp-2.7.5.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/audience-annotations-0.5.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-azure-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jetty-server-9.3.25.v20180904.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jettison-1.1.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/objenesis-1.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jackson-jaxrs-base-2.9.9.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/mssql-jdbc-6.2.1.jre7.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/commons-lang3-3.7.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/log4j-core-2.8.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/stax2-api-3.1.4.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/commons-math3-3.1.1.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/commons-compress-1.18.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jaxb-impl-2.2.3-1.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-mapreduce-client-jobclient-3.0.0-cdh6.3.2-tests.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/commons-net-3.1.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jersey-core-1.19.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/parquet-pig-bundle.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/avro-1.8.2-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-hdfs-client-3.0.0-cdh6.3.2-tests.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-archive-logs-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/httpclient-4.5.3.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/kerb-server-1.0.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/fst-2.50.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/kerb-admin-1.0.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/event-publish-6.3.1-shaded.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/kerby-util-1.0.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/commons-codec-1.11.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/parquet-format-sources.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/javax.activation-api-1.2.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-sls-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/kerby-pkix-1.0.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jcip-annotations-1.0-1.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/bcpkix-jdk15on-1.60.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/javax.servlet-api-3.1.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/json-io-2.5.1.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-yarn-api-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jackson-databind-2.9.9.3.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/lz4-java-1.5.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/slf4j-log4j12.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/commons-collections-3.2.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/gson-2.2.4.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-kms-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/zstd-jni-1.3.8-1.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/azure-keyvault-core-0.8.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/commons-io-2.6.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jetty-util-ajax-9.3.25.v20180904.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-yarn-applications-distributedshell-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jetty-http-9.3.25.v20180904.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-resourceestimator-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jersey-guice-1.19.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/guice-servlet-4.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jsp-api-2.1.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jsr311-api-1.1.1.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-aliyun-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/geronimo-jcache_1.0_spec-1.0-alpha-1.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/commons-logging-1.1.3.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/asm-5.0.4.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jackson-xc-1.9.13.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-common-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/kerb-util-1.0.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jackson-core-asl-1.9.13.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-yarn-registry-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-annotations-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jsch-0.1.54.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-streaming-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-aws-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/parquet-hadoop-bundle.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-mapreduce-client-core-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-yarn-server-web-proxy-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jetty-io-9.3.25.v20180904.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/curator-recipes-2.12.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jaxb-api-2.2.11.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-datajoin-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jersey-server-1.19.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/htrace-core4-4.1.0-incubating.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jackson-annotations-2.9.9.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jetty-xml-9.3.25.v20180904.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/gcs-connector-hadoop3-1.9.10-cdh6.3.2-shaded.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/curator-framework-2.12.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/zookeeper-3.4.5-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-mapreduce-client-hs-plugins-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-rumen-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/parquet-cascading3.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-yarn-server-tests-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jdom-1.1.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/spark-2.4.0-cdh6.3.2-yarn-shuffle.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/avro.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/netty-transport-4.1.17.Final.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jsr305-3.0.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/okio-1.6.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/parquet-column.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/parquet-format-javadoc.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/metrics-core-3.0.1.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-yarn-client-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/kerby-config-1.0.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/kerb-core-1.0.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/ojalgo-43.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-hdfs-native-client-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/commons-daemon-1.0.13.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/kafka-clients-2.2.1-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/tt-instrumentation-6.3.1.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/parquet-protobuf.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/guice-4.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/aws-java-sdk-bundle-1.11.271.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/azure-data-lake-store-sdk-2.2.9.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/bcprov-jdk15on-1.60.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jackson-jaxrs-1.9.13.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-hdfs-3.0.0-cdh6.3.2-tests.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/json-smart-2.3.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jersey-client-1.19.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-nfs-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/netty-codec-4.1.17.Final.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-mapreduce-client-common-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/kerby-xdr-1.0.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-hdfs-client-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/paranamer-2.8.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-hdfs-httpfs-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-mapreduce-client-nativetask-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-yarn-common-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/HikariCP-java7-2.4.12.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-auth-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/commons-lang-2.6.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/kerb-identity-1.0.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-mapreduce-client-hs-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-hdfs-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/parquet-pig.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/kerb-simplekdc-1.0.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/parquet-scala_2.11.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-distcp-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/kerb-client-1.0.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/netty-common-4.1.17.Final.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/azure-storage-5.4.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/parquet-jackson.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/java-util-1.9.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-yarn-server-nodemanager-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/javax.inject-1.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/snappy-java-1.1.4.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/parquet-avro.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-kafka-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/ehcache-3.3.1.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/kerb-common-1.0.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/parquet-format.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/commons-beanutils-1.9.4.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/parquet-cascading.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jetty-servlet-9.3.25.v20180904.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/guava-11.0.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-openstack-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/aliyun-sdk-oss-2.8.3.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/nimbus-jose-jwt-4.41.1.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-mapreduce-client-app-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-gridmix-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/curator-client-2.12.0.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jackson-jaxrs-json-provider-2.9.9.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jetty-security-9.3.25.v20180904.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-hdfs-nfs-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/log4j-1.2.17.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-yarn-applications-unmanaged-am-launcher-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-mapreduce-client-jobclient-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/netty-codec-http-4.1.17.Final.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/jackson-module-jaxb-annotations-2.9.9.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-common-3.0.0-cdh6.3.2-tests.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-azure-datalake-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/netty-resolver-4.1.17.Final.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-mapreduce-client-shuffle-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-extras-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-hdfs-native-client-3.0.0-cdh6.3.2-tests.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/wildfly-openssl-1.0.4.Final.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/hadoop-mapreduce-examples-3.0.0-cdh6.3.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/netty-buffer-4.1.17.Final.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/httpcore-4.4.6.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/log4j-api-2.8.2.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/mr-framework/leveldbjni-all-1.8.jar::job.jar/job.jar:job.jar/classes/:job.jar/lib/*:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/job.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/hive-exec-2.1.1-cdh6.3.2-core.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/hive-exec-core.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/paimon-hive-connector-2.1-cdh-6.3-1.0-20241109.002448-47.jar:/data1/yarn/nm/usercache/hive/appcache/application_1726120453401_1470/container_1726120453401_1470_01_000002/zstd-jni-1.5.5-11.jar
Try the following steps:
- delete zstd-jni-1.3.8-1.jar under hive/lib
- put zstd-jni-1.5.5-11.jar under hive/lib
- restart hiveserver2
- insert data to paimon table.
@ChrisKyrie , Thank you qulickly answer.
my cluster is CHD6.3.2 , the zstd-jni not under hive/lib. it under mr/lib
I don't think it's a good idea to remove the zstd-jni-1.3.8.jar on all yarn nodemanager nodes.
I'm not sure if this will cause other problems.
The essence of the problem is that zstd-jni-1.3.8-1.jar does not contain the class ZstdOutputStreamNoFinalizer,and paimon require zstd-jni-1.5.5-11.jar I try to shade zstd-jni https://github.com/apache/paimon/issues/4531,but failed,I think it is a bug
Update:
I finally discovered that MR utilizes /user/yarn/mapreduce/mr-framework/3.0.0-cdh6.3.2-mr-framework.tar.gz on HDFS as the location for all dependency jars, so I updated this file to include zstd-jni-1.5.5-1.jar. After making this change, the insert statement executed successfully! Additionally, I ran the original MR job with zstd compression, and it worked perfectly as well.
Perhaps we should consider adding more test cases to ensure that there are no issues with the original MR job. However, for now, everything seems fine to me.
Update:
I have another CDH cluster. And follow the same steps. However, I'm unable to query the result table.
beeline got below error:
org.apache.thrift.transport.TTransportException
at org.apache.thrift.transport.TIOStreamTransport.read(TIOStreamTransport.java:132)
at org.apache.thrift.transport.TTransport.readAll(TTransport.java:86)
at org.apache.thrift.transport.TSaslTransport.readLength(TSaslTransport.java:374)
at org.apache.thrift.transport.TSaslTransport.readFrame(TSaslTransport.java:451)
at org.apache.thrift.transport.TSaslTransport.read(TSaslTransport.java:433)
at org.apache.thrift.transport.TSaslClientTransport.read(TSaslClientTransport.java:37)
at org.apache.thrift.transport.TTransport.readAll(TTransport.java:86)
at org.apache.thrift.protocol.TBinaryProtocol.readAll(TBinaryProtocol.java:429)
at org.apache.thrift.protocol.TBinaryProtocol.readI32(TBinaryProtocol.java:318)
at org.apache.thrift.protocol.TBinaryProtocol.readMessageBegin(TBinaryProtocol.java:219)
at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:77)
at org.apache.hive.service.rpc.thrift.TCLIService$Client.recv_FetchResults(TCLIService.java:559)
at org.apache.hive.service.rpc.thrift.TCLIService$Client.FetchResults(TCLIService.java:546)
at sun.reflect.GeneratedMethodAccessor2.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hive.jdbc.HiveConnection$SynchronizedHandler.invoke(HiveConnection.java:1415)
at com.sun.proxy.$Proxy19.FetchResults(Unknown Source)
at org.apache.hive.jdbc.HiveQueryResultSet.next(HiveQueryResultSet.java:372)
at org.apache.hive.beeline.BufferedRows.<init>(BufferedRows.java:56)
at org.apache.hive.beeline.IncrementalRowsWithNormalization.<init>(IncrementalRowsWithNormalization.java:50)
at org.apache.hive.beeline.BeeLine.print(BeeLine.java:2258)
at org.apache.hive.beeline.Commands.executeInternal(Commands.java:1005)
at org.apache.hive.beeline.Commands.execute(Commands.java:1180)
at org.apache.hive.beeline.Commands.sql(Commands.java:1109)
at org.apache.hive.beeline.BeeLine.dispatch(BeeLine.java:1331)
at org.apache.hive.beeline.BeeLine.execute(BeeLine.java:1188)
at org.apache.hive.beeline.BeeLine.begin(BeeLine.java:1027)
at org.apache.hive.beeline.BeeLine.mainWithInputRedirection(BeeLine.java:526)
at org.apache.hive.beeline.BeeLine.main(BeeLine.java:508)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.util.RunJar.run(RunJar.java:313)
at org.apache.hadoop.util.RunJar.main(RunJar.java:227)
Unknown HS2 problem when communicating with Thrift server.
Error: org.apache.thrift.transport.TTransportException: java.net.SocketException: Broken pipe (Write failed) (state=08S01,code=0)
java.sql.SQLException: org.apache.thrift.transport.TTransportException: java.net.SocketException: Broken pipe (Write failed)
at org.apache.hive.jdbc.HiveStatement.closeStatementIfNeeded(HiveStatement.java:216)
at org.apache.hive.jdbc.HiveStatement.closeClientOperation(HiveStatement.java:221)
at org.apache.hive.jdbc.HiveStatement.close(HiveStatement.java:237)
at org.apache.hive.beeline.Commands.executeInternal(Commands.java:1037)
at org.apache.hive.beeline.Commands.execute(Commands.java:1180)
at org.apache.hive.beeline.Commands.sql(Commands.java:1109)
at org.apache.hive.beeline.BeeLine.dispatch(BeeLine.java:1331)
at org.apache.hive.beeline.BeeLine.execute(BeeLine.java:1188)
at org.apache.hive.beeline.BeeLine.begin(BeeLine.java:1027)
at org.apache.hive.beeline.BeeLine.mainWithInputRedirection(BeeLine.java:526)
at org.apache.hive.beeline.BeeLine.main(BeeLine.java:508)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.util.RunJar.run(RunJar.java:313)
at org.apache.hadoop.util.RunJar.main(RunJar.java:227)
Caused by: org.apache.thrift.transport.TTransportException: java.net.SocketException: Broken pipe (Write failed)
at org.apache.thrift.transport.TIOStreamTransport.flush(TIOStreamTransport.java:161)
at org.apache.thrift.transport.TSaslTransport.flush(TSaslTransport.java:499)
at org.apache.thrift.transport.TSaslClientTransport.flush(TSaslClientTransport.java:37)
at org.apache.thrift.TServiceClient.sendBase(TServiceClient.java:73)
at org.apache.thrift.TServiceClient.sendBase(TServiceClient.java:62)
at org.apache.hive.service.rpc.thrift.TCLIService$Client.send_CloseOperation(TCLIService.java:507)
at org.apache.hive.service.rpc.thrift.TCLIService$Client.CloseOperation(TCLIService.java:499)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hive.jdbc.HiveConnection$SynchronizedHandler.invoke(HiveConnection.java:1415)
at com.sun.proxy.$Proxy19.CloseOperation(Unknown Source)
at org.apache.hive.jdbc.HiveStatement.closeStatementIfNeeded(HiveStatement.java:209)
... 16 more
Caused by: java.net.SocketException: Broken pipe (Write failed)
at java.net.SocketOutputStream.socketWrite0(Native Method)
at java.net.SocketOutputStream.socketWrite(SocketOutputStream.java:111)
at java.net.SocketOutputStream.write(SocketOutputStream.java:155)
at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:82)
at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:140)
at org.apache.thrift.transport.TIOStreamTransport.flush(TIOStreamTransport.java:159)
... 29 more
and hiveserver2 log
2024-11-19 11:41:38,425 INFO org.apache.hadoop.hive.conf.HiveConf: [HiveServer2-Handler-Pool: Thread-983]: Using the default value passed in for log id: 4d95fbd9-4332-45b6-a075-8a86742f4415
2024-11-19 11:41:38,425 INFO org.apache.hadoop.hive.ql.session.SessionState: [HiveServer2-Handler-Pool: Thread-983]: Updating thread name to 4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983
2024-11-19 11:41:38,427 INFO org.apache.hive.service.cli.operation.OperationManager: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Adding operation: OperationHandle [opType=EXECUTE_STATEMENT, getHandleIdentifier()=9cc0e29b-5930-4062-9ffc-edca9abe9d05]
2024-11-19 11:41:38,434 INFO org.apache.hadoop.hive.ql.Driver: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Compiling command(queryId=hive_20241119114138_675e95c0-97e9-4497-83e6-d32fdac73ad9): select * from hive_test_table
2024-11-19 11:41:38,471 INFO org.apache.sentry.binding.hive.conf.HiveAuthzConf: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: DefaultFS: hdfs://nameservice1
2024-11-19 11:41:38,485 WARN org.apache.sentry.binding.hive.conf.HiveAuthzConf: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Using the deprecated config setting hive.sentry.server instead of sentry.hive.server
2024-11-19 11:41:38,485 WARN org.apache.sentry.binding.hive.conf.HiveAuthzConf: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Using the deprecated config setting hive.sentry.provider instead of sentry.hive.provider
2024-11-19 11:41:38,501 INFO hive.metastore: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: HMS client filtering is enabled.
2024-11-19 11:41:38,501 INFO hive.metastore: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Trying to connect to metastore with URI thrift://test-xxxxxx:9083
2024-11-19 11:41:38,502 INFO hive.metastore: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Opened a connection to metastore, current connections: 3
2024-11-19 11:41:38,513 INFO hive.metastore: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Connected to metastore.
2024-11-19 11:41:38,698 INFO org.apache.hadoop.hive.ql.parse.SemanticAnalyzer: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Starting Semantic Analysis
2024-11-19 11:41:38,714 INFO org.apache.sentry.binding.hive.conf.HiveAuthzConf: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: DefaultFS: hdfs://nameservice1
2024-11-19 11:41:38,727 WARN org.apache.sentry.binding.hive.conf.HiveAuthzConf: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Using the deprecated config setting hive.sentry.server instead of sentry.hive.server
2024-11-19 11:41:38,727 INFO org.apache.hadoop.hive.ql.parse.SemanticAnalyzer: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Completed phase 1 of Semantic Analysis
2024-11-19 11:41:38,727 INFO org.apache.hadoop.hive.ql.parse.SemanticAnalyzer: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Get metadata for source tables
2024-11-19 11:41:38,741 INFO org.apache.hadoop.hive.ql.parse.SemanticAnalyzer: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Get metadata for subqueries
2024-11-19 11:41:38,741 INFO org.apache.hadoop.hive.ql.parse.SemanticAnalyzer: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Get metadata for destination tables
2024-11-19 11:41:38,762 INFO hive.ql.Context: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: New scratch dir is hdfs://nameservice1/tmp/hive/hive/4d95fbd9-4332-45b6-a075-8a86742f4415/hive_2024-11-19_11-41-38_454_775893566460184920-35
2024-11-19 11:41:38,762 INFO org.apache.hadoop.hive.ql.parse.SemanticAnalyzer: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Completed getting MetaData in Semantic Analysis
2024-11-19 11:41:38,815 INFO org.apache.hadoop.hive.common.FileUtils: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Creating directory if it doesn't exist: hdfs://nameservice1/tmp/hive/hive/4d95fbd9-4332-45b6-a075-8a86742f4415/hive_2024-11-19_11-41-38_454_775893566460184920-35/-mr-10000/.hive-staging_hive_2024-11-19_11-41-38_454_775893566460184920-35
2024-11-19 11:41:38,909 INFO org.apache.hadoop.hive.ql.ppd.OpProcFactory: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Processing for FS(2)
2024-11-19 11:41:38,909 INFO org.apache.hadoop.hive.ql.ppd.OpProcFactory: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Processing for SEL(1)
2024-11-19 11:41:38,909 INFO org.apache.hadoop.hive.ql.ppd.OpProcFactory: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Processing for TS(0)
2024-11-19 11:41:38,946 INFO org.apache.hadoop.hive.ql.parse.SemanticAnalyzer: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Completed plan generation
2024-11-19 11:41:38,980 WARN org.apache.sentry.binding.hive.conf.HiveAuthzConf: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Using the deprecated config setting hive.sentry.server instead of sentry.hive.server
2024-11-19 11:41:38,980 WARN org.apache.sentry.binding.hive.conf.HiveAuthzConf: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Using the deprecated config setting hive.sentry.provider instead of sentry.hive.provider
2024-11-19 11:41:38,995 INFO org.apache.hadoop.hive.ql.Driver: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Semantic Analysis Completed
2024-11-19 11:41:38,995 INFO org.apache.hadoop.hive.ql.Driver: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:hive_test_table.a, type:int, comment:null), FieldSchema(name:hive_test_table.b, type:string, comment:null)], properties:null)
2024-11-19 11:41:38,996 INFO org.apache.hadoop.hive.ql.exec.TableScanOperator: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Initializing operator TS[0]
2024-11-19 11:41:38,997 INFO org.apache.hadoop.hive.ql.exec.SelectOperator: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Initializing operator SEL[1]
2024-11-19 11:41:38,997 INFO org.apache.hadoop.hive.ql.exec.SelectOperator: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: SELECT struct<a:int,b:string>
2024-11-19 11:41:38,997 INFO org.apache.hadoop.hive.ql.exec.ListSinkOperator: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Initializing operator LIST_SINK[3]
2024-11-19 11:41:38,997 INFO org.apache.hadoop.hive.schshim.FairSchedulerShim: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Setting queue name to: 'root.users.hive' for user 'hive'
2024-11-19 11:41:38,997 INFO org.apache.hadoop.hive.ql.Driver: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Completed compiling command(queryId=hive_20241119114138_675e95c0-97e9-4497-83e6-d32fdac73ad9); Time taken: 0.567 seconds
2024-11-19 11:41:38,998 INFO org.apache.hadoop.hive.conf.HiveConf: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Using the default value passed in for log id: 4d95fbd9-4332-45b6-a075-8a86742f4415
2024-11-19 11:41:38,998 INFO org.apache.hadoop.hive.ql.session.SessionState: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Resetting thread name to HiveServer2-Handler-Pool: Thread-983
2024-11-19 11:41:38,999 INFO org.apache.hadoop.hive.ql.lockmgr.DummyTxnManager: [HiveServer2-Background-Pool: Thread-1039]: Creating lock manager of type org.apache.hadoop.hive.ql.lockmgr.zookeeper.ZooKeeperHiveLockManager
2024-11-19 11:41:39,005 INFO org.apache.hadoop.hive.ql.Driver: [HiveServer2-Background-Pool: Thread-1039]: Executing command(queryId=hive_20241119114138_675e95c0-97e9-4497-83e6-d32fdac73ad9): select * from hive_test_table
2024-11-19 11:41:39,007 INFO org.apache.hadoop.hive.ql.Driver: [HiveServer2-Background-Pool: Thread-1039]: Completed executing command(queryId=hive_20241119114138_675e95c0-97e9-4497-83e6-d32fdac73ad9); Time taken: 0.001 seconds
2024-11-19 11:41:39,007 INFO org.apache.hadoop.hive.ql.Driver: [HiveServer2-Background-Pool: Thread-1039]: OK
2024-11-19 11:41:39,013 INFO org.apache.hadoop.hive.conf.HiveConf: [HiveServer2-Handler-Pool: Thread-983]: Using the default value passed in for log id: 4d95fbd9-4332-45b6-a075-8a86742f4415
2024-11-19 11:41:39,013 INFO org.apache.hadoop.hive.ql.session.SessionState: [HiveServer2-Handler-Pool: Thread-983]: Updating thread name to 4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983
2024-11-19 11:41:39,013 INFO org.apache.hadoop.hive.conf.HiveConf: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Using the default value passed in for log id: 4d95fbd9-4332-45b6-a075-8a86742f4415
2024-11-19 11:41:39,013 INFO org.apache.hadoop.hive.ql.session.SessionState: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Resetting thread name to HiveServer2-Handler-Pool: Thread-983
2024-11-19 11:41:39,015 INFO org.apache.hadoop.hive.conf.HiveConf: [HiveServer2-Handler-Pool: Thread-983]: Using the default value passed in for log id: 4d95fbd9-4332-45b6-a075-8a86742f4415
2024-11-19 11:41:39,015 INFO org.apache.hadoop.hive.ql.session.SessionState: [HiveServer2-Handler-Pool: Thread-983]: Updating thread name to 4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983
2024-11-19 11:41:39,015 INFO org.apache.hadoop.hive.conf.HiveConf: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Using the default value passed in for log id: 4d95fbd9-4332-45b6-a075-8a86742f4415
2024-11-19 11:41:39,015 INFO org.apache.hadoop.hive.ql.session.SessionState: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Resetting thread name to HiveServer2-Handler-Pool: Thread-983
2024-11-19 11:41:39,016 INFO org.apache.hadoop.hive.conf.HiveConf: [HiveServer2-Handler-Pool: Thread-983]: Using the default value passed in for log id: 4d95fbd9-4332-45b6-a075-8a86742f4415
2024-11-19 11:41:39,016 INFO org.apache.hadoop.hive.ql.session.SessionState: [HiveServer2-Handler-Pool: Thread-983]: Updating thread name to 4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983
2024-11-19 11:41:39,016 INFO org.apache.hadoop.hive.conf.HiveConf: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Using the default value passed in for log id: 4d95fbd9-4332-45b6-a075-8a86742f4415
2024-11-19 11:41:39,016 INFO org.apache.hadoop.hive.ql.session.SessionState: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Resetting thread name to HiveServer2-Handler-Pool: Thread-983
2024-11-19 11:41:39,018 INFO org.apache.hadoop.hive.conf.HiveConf: [HiveServer2-Handler-Pool: Thread-983]: Using the default value passed in for log id: 4d95fbd9-4332-45b6-a075-8a86742f4415
2024-11-19 11:41:39,018 INFO org.apache.hadoop.hive.ql.session.SessionState: [HiveServer2-Handler-Pool: Thread-983]: Updating thread name to 4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983
2024-11-19 11:41:48,405 INFO org.apache.hadoop.hive.conf.HiveConf: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Using the default value passed in for log id: 4d95fbd9-4332-45b6-a075-8a86742f4415
2024-11-19 11:41:48,405 INFO org.apache.hadoop.hive.ql.session.SessionState: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Resetting thread name to HiveServer2-Handler-Pool: Thread-983
2024-11-19 11:41:48,405 INFO org.apache.hive.service.cli.thrift.ThriftCLIService: [HiveServer2-Handler-Pool: Thread-983]: Session disconnected without closing properly.
2024-11-19 11:41:48,405 INFO org.apache.hive.service.cli.thrift.ThriftCLIService: [HiveServer2-Handler-Pool: Thread-983]: Closing the session: SessionHandle [4d95fbd9-4332-45b6-a075-8a86742f4415]
2024-11-19 11:41:48,405 INFO org.apache.hive.service.CompositeService: [HiveServer2-Handler-Pool: Thread-983]: Session closed, SessionHandle [4d95fbd9-4332-45b6-a075-8a86742f4415], current sessions:2
2024-11-19 11:41:48,405 INFO org.apache.hadoop.hive.conf.HiveConf: [HiveServer2-Handler-Pool: Thread-983]: Using the default value passed in for log id: 4d95fbd9-4332-45b6-a075-8a86742f4415
2024-11-19 11:41:48,405 INFO org.apache.hadoop.hive.ql.session.SessionState: [HiveServer2-Handler-Pool: Thread-983]: Updating thread name to 4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983
2024-11-19 11:41:48,405 INFO org.apache.hive.service.cli.operation.OperationManager: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Closing operation: OperationHandle [opType=EXECUTE_STATEMENT, getHandleIdentifier()=9cc0e29b-5930-4062-9ffc-edca9abe9d05]
2024-11-19 11:41:48,537 INFO org.apache.hive.service.cli.session.HiveSessionImpl: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Operation log session directory is deleted: /var/log/hive/operation_logs/4d95fbd9-4332-45b6-a075-8a86742f4415
2024-11-19 11:41:48,537 INFO org.apache.hadoop.hive.conf.HiveConf: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Using the default value passed in for log id: 4d95fbd9-4332-45b6-a075-8a86742f4415
2024-11-19 11:41:48,537 INFO org.apache.hadoop.hive.ql.session.SessionState: [4d95fbd9-4332-45b6-a075-8a86742f4415 HiveServer2-Handler-Pool: Thread-983]: Resetting thread name to HiveServer2-Handler-Pool: Thread-983
2024-11-19 11:41:48,584 INFO org.apache.hadoop.hive.ql.session.SessionState: [HiveServer2-Handler-Pool: Thread-983]: Deleted directory: /tmp/hive/hive/4d95fbd9-4332-45b6-a075-8a86742f4415 on fs with scheme hdfs
2024-11-19 11:41:48,584 INFO org.apache.hadoop.hive.ql.session.SessionState: [HiveServer2-Handler-Pool: Thread-983]: Deleted directory: /tmp/hive/4d95fbd9-4332-45b6-a075-8a86742f4415 on fs with scheme file
2024-11-19 11:41:48,584 INFO hive.metastore: [HiveServer2-Handler-Pool: Thread-983]: Closed a connection to metastore, current connections: 2
It turns out that I set the wrong value for "hive.metastore.warehouse.dir".
I think this is a package conflict issue, can you please find out the lower version of the jar package where this class is located using the following command?
- cd $HIVE_HOME/lib
- jar -vtf *.jar | grep 'com.github.luben.zstd.ZstdOutputStreamNoFinalizer'
- delete zstd-jni-.jar -- or mv zstd-jni-.jar /tmp/jars/
- shaded the zstd-jni
- restart hivemetastor and hiveserver2
- insert data to paimon table.
I would also suggest lowering the log level and turning on debug to get more clue information to help troubleshoot the problem.
@FrommyMind 大佬,你好,请教个问题,我用的hive版本应该和你的差不多 , hive-2.1.1-cdh6.3.1, 请问你有没有遇到以下问题没? 我是在flink-sql-client中建paimon表('metastore.partitioned-table' = 'true' 同步分区数据到hivemetestore),通过flink从kafka读取ogg-json数据,然后写到paimon表的,但在flink运行1-2h的样子之后就会失败,看日志像是flink要修改paimon表分区时hive不支持报错(报错日志如下),如果我修改paimon表设置'metastore.partitioned-table' = 'false'不同步分区数据到hive, flink任务就能正常恢复,创建对应的hive外部表又查不到数据(无报错)
报错内容:java.lang.NoSuchMethodError: org.apache.hadoop.hive.metastore.IMetaStoreClient.alter_partition
具体日志如下:
2025-03-28 11:29:53,518 INFO org.apache.flink.runtime.executiongraph.ExecutionGraph [] - Writer : paimon_test_ogg -> Global Committer : paimon_test_ogg -> end: Writer (1/1) (9097ca9872f9d8c1a563314b90452db1_c16a96e78194357634f51bcfea60f56a_0_0) switched from RUNNING to FAILED on container_1742182751832_102612_01_000004 @ 10.xx.xx.xx (dataPort=28456).
java.lang.NoSuchMethodError: org.apache.hadoop.hive.metastore.IMetaStoreClient.alter_partition(Ljava/lang/String;Ljava/lang/String;Lorg/apache/hadoop/hive/metastore/api/Partition;)V
at org.apache.paimon.hive.HiveMetastoreClient.lambda$alterPartition$5(HiveMetastoreClient.java:140) ~[paimon-flink-1.18-1.0.1.jar:1.0.1]
at org.apache.paimon.client.ClientPool$ClientPoolImpl.lambda$execute$0(ClientPool.java:80) ~[paimon-flink-1.18-1.0.1.jar:1.0.1]
at org.apache.paimon.client.ClientPool$ClientPoolImpl.run(ClientPool.java:68) ~[paimon-flink-1.18-1.0.1.jar:1.0.1]
at org.apache.paimon.client.ClientPool$ClientPoolImpl.execute(ClientPool.java:77) ~[paimon-flink-1.18-1.0.1.jar:1.0.1]
at org.apache.paimon.hive.pool.CachedClientPool.execute(CachedClientPool.java:139) ~[paimon-flink-1.18-1.0.1.jar:1.0.1]
at org.apache.paimon.hive.HiveMetastoreClient.alterPartition(HiveMetastoreClient.java:138) ~[paimon-flink-1.18-1.0.1.jar:1.0.1]
at org.apache.paimon.flink.sink.partition.PartitionStatisticsReporter.report(PartitionStatisticsReporter.java:88) ~[paimon-flink-1.18-1.0.1.jar:1.0.1]
at org.apache.paimon.flink.sink.partition.ReportPartStatsListener.notifyCommittable(ReportPartStatsListener.java:111) ~[paimon-flink-1.18-1.0.1.jar:1.0.1]
at org.apache.paimon.flink.sink.partition.PartitionListeners.notifyCommittable(PartitionListeners.java:42) ~[paimon-flink-1.18-1.0.1.jar:1.0.1]
at org.apache.paimon.flink.sink.StoreCommitter.commit(StoreCommitter.java:113) ~[paimon-flink-1.18-1.0.1.jar:1.0.1]
at org.apache.paimon.flink.sink.CommitterOperator.commitUpToCheckpoint(CommitterOperator.java:210) ~[paimon-flink-1.18-1.0.1.jar:1.0.1]
at org.apache.paimon.flink.sink.CommitterOperator.notifyCheckpointComplete(CommitterOperator.java:187) ~[paimon-flink-1.18-1.0.1.jar:1.0.1]
at org.apache.flink.streaming.runtime.tasks.StreamOperatorWrapper.notifyCheckpointComplete(StreamOperatorWrapper.java:104) ~[flink-dist-1.18.1.jar:1.18.1]
at org.apache.flink.streaming.runtime.tasks.RegularOperatorChain.notifyCheckpointComplete(RegularOperatorChain.java:145) ~[flink-dist-1.18.1.jar:1.18.1]
at org.apache.flink.streaming.runtime.tasks.SubtaskCheckpointCoordinatorImpl.notifyCheckpoint(SubtaskCheckpointCoordinatorImpl.java:468) ~[flink-dist-1.18.1.jar:1.18.1]
at org.apache.flink.streaming.runtime.tasks.SubtaskCheckpointCoordinatorImpl.notifyCheckpointComplete(SubtaskCheckpointCoordinatorImpl.java:401) ~[flink-dist-1.18.1.jar:1.18.1]
at org.apache.flink.streaming.runtime.tasks.StreamTask.notifyCheckpointComplete(StreamTask.java:1449) ~[flink-dist-1.18.1.jar:1.18.1]
at org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$notifyCheckpointCompleteAsync$17(StreamTask.java:1390) ~[flink-dist-1.18.1.jar:1.18.1]
at org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$notifyCheckpointOperation$20(StreamTask.java:1429) ~[flink-dist-1.18.1.jar:1.18.1]
at org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$1.runThrowing(StreamTaskActionExecutor.java:50) ~[flink-dist-1.18.1.jar:1.18.1]
at org.apache.flink.streaming.runtime.tasks.mailbox.Mail.run(Mail.java:90) ~[flink-dist-1.18.1.jar:1.18.1]
at org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMail(MailboxProcessor.java:398) ~[flink-dist-1.18.1.jar:1.18.1]
at org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMailsWhenDefaultActionUnavailable(MailboxProcessor.java:367) ~[flink-dist-1.18.1.jar:1.18.1]
at org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMail(MailboxProcessor.java:352) ~[flink-dist-1.18.1.jar:1.18.1]
at org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMailboxLoop(MailboxProcessor.java:229) ~[flink-dist-1.18.1.jar:1.18.1]
at org.apache.flink.streaming.runtime.tasks.StreamTask.runMailboxLoop(StreamTask.java:858) ~[flink-dist-1.18.1.jar:1.18.1]
at org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:807) ~[flink-dist-1.18.1.jar:1.18.1]
at org.apache.flink.runtime.taskmanager.Task.runWithSystemExitMonitoring(Task.java:953) ~[flink-dist-1.18.1.jar:1.18.1]
at org.apache.flink.runtime.taskmanager.Task.restoreAndInvoke(Task.java:932) ~[flink-dist-1.18.1.jar:1.18.1]
at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:746) ~[flink-dist-1.18.1.jar:1.18.1]
at org.apache.flink.runtime.taskmanager.Task.run(Task.java:562) ~[flink-dist-1.18.1.jar:1.18.1]
at java.lang.Thread.run(Thread.java:748) ~[?:1.8.0_181]
```