Экспорт Sqoop из таблицы hive в таблицу teradata

Я пытаюсь загрузить данные из улья в тераданные. Для этого я создал внешнюю таблицу в улье, вставил в нее значения и запустил команду экспорта sqoop.

create external table Sample_load
(
name varchar(32),
addr varchar(50),
);

insert into table Sample_load select F_name, Address from data where age > 18;

sqoop export -Dhadoop.security.credential.provider.path=pwd_file_for_connection-
Dmapreduce.map.java.opts=" -Duser.timezone=GMT" --connect jdbc_url/database=Tera_db,charset=utf8 -m 
10 --username uname --password-alias pwd --table Teradata_table --hcatalog-database db_name -- 
hcatalog-table sample_load ;

но я получаю следующую ошибку:

Error: java.lang.RuntimeException: java.lang.reflect.InvocationTargetException
        at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:135)
        at org.apache.hadoop.io.serializer.WritableSerialization$WritableDeserializer.deserialize(WritableSerialization.java:67)
        at org.apache.hadoop.io.serializer.WritableSerialization$WritableDeserializer.deserialize(WritableSerialization.java:42)
        at org.apache.hadoop.mapred.MapTask.getSplitDetails(MapTask.java:378)
        at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:766)
        at org.apache.hadoop.mapred.MapTask.run(MapTask.java:347)
        at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:174)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:422)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)
        at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:168)
Caused by: java.lang.reflect.InvocationTargetException
        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
        at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
        ... 10 more
Caused by: com.teradata.connector.common.exception.ConnectorException: java.lang.ClassNotFoundException: org.apache.hive.hcatalog.mapreduce.HCatSplit
        at java.net.URLClassLoader.findClass(URLClassLoader.java:382)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:418)
        at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:352)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:351)
        at java.lang.Class.forName0(Native Method)
        at java.lang.Class.forName(Class.java:264)
        at com.teradata.connector.hcat.ConnectorCombineFileHCatSplit.<init>(ConnectorCombineFileHCatInputFormat.java:276)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
        at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
        at org.apache.hadoop.io.serializer.WritableSerialization$WritableDeserializer.deserialize(WritableSerialization.java:67)
        at org.apache.hadoop.io.serializer.WritableSerialization$WritableDeserializer.deserialize(WritableSerialization.java:42)
        at org.apache.hadoop.mapred.MapTask.getSplitDetails(MapTask.java:378)
        at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:766)
        at org.apache.hadoop.mapred.MapTask.run(MapTask.java:347)
        at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:174)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:422)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)
        at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:168)

        at com.teradata.connector.hcat.ConnectorCombineFileHCatSplit.<init>(ConnectorCombineFileHCatInputFormat.java:281)
        ... 15 more

21/01/05 03:21:01 INFO mapreduce.Job: Task Id : attempt_1608639506866_65714_m_000000_1, Status : FAILED
Error: java.lang.RuntimeException: java.lang.reflect.InvocationTargetException
        at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:135)
        at org.apache.hadoop.io.serializer.WritableSerialization$WritableDeserializer.deserialize(WritableSerialization.java:67)
        at org.apache.hadoop.io.serializer.WritableSerialization$WritableDeserializer.deserialize(WritableSerialization.java:42)
        at org.apache.hadoop.mapred.MapTask.getSplitDetails(MapTask.java:378)
        at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:766)
        at org.apache.hadoop.mapred.MapTask.run(MapTask.java:347)
        at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:174)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:422)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)
        at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:168)
Caused by: java.lang.reflect.InvocationTargetException
        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
        at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
        ... 10 more
Caused by: com.teradata.connector.common.exception.ConnectorException: java.lang.ClassNotFoundException: org.apache.hive.hcatalog.mapreduce.HCatSplit
        at java.net.URLClassLoader.findClass(URLClassLoader.java:382)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:418)
        at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:352)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:351)
        at java.lang.Class.forName0(Native Method)
        at java.lang.Class.forName(Class.java:264)
        at com.teradata.connector.hcat.ConnectorCombineFileHCatSplit.<init>(ConnectorCombineFileHCatInputFormat.java:276)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
        at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
        at org.apache.hadoop.io.serializer.WritableSerialization$WritableDeserializer.deserialize(WritableSerialization.java:67)
        at org.apache.hadoop.io.serializer.WritableSerialization$WritableDeserializer.deserialize(WritableSerialization.java:42)
        at org.apache.hadoop.mapred.MapTask.getSplitDetails(MapTask.java:378)
        at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:766)
        at org.apache.hadoop.mapred.MapTask.run(MapTask.java:347)
        at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:174)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:422)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)
        at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:168)

        at com.teradata.connector.hcat.ConnectorCombineFileHCatSplit.<init>(ConnectorCombineFileHCatInputFormat.java:281)
        ... 15 more

21/01/05 03:21:08 INFO mapreduce.Job: Task Id : attempt_1608639506866_65714_m_000000_2, Status : FAILED
Error: java.lang.RuntimeException: java.lang.reflect.InvocationTargetException
        at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:135)
        at org.apache.hadoop.io.serializer.WritableSerialization$WritableDeserializer.deserialize(WritableSerialization.java:67)
        at org.apache.hadoop.io.serializer.WritableSerialization$WritableDeserializer.deserialize(WritableSerialization.java:42)
        at org.apache.hadoop.mapred.MapTask.getSplitDetails(MapTask.java:378)
        at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:766)
        at org.apache.hadoop.mapred.MapTask.run(MapTask.java:347)
        at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:174)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:422)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)
        at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:168)
Caused by: java.lang.reflect.InvocationTargetException
        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
        at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
        ... 10 more
Caused by: com.teradata.connector.common.exception.ConnectorException: java.lang.ClassNotFoundException: org.apache.hive.hcatalog.mapreduce.HCatSplit
        at java.net.URLClassLoader.findClass(URLClassLoader.java:382)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:418)
        at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:352)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:351)
        at java.lang.Class.forName0(Native Method)
        at java.lang.Class.forName(Class.java:264)
        at com.teradata.connector.hcat.ConnectorCombineFileHCatSplit.<init>(ConnectorCombineFileHCatInputFormat.java:276)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
        at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
        at org.apache.hadoop.io.serializer.WritableSerialization$WritableDeserializer.deserialize(WritableSerialization.java:67)
        at org.apache.hadoop.io.serializer.WritableSerialization$WritableDeserializer.deserialize(WritableSerialization.java:42)
        at org.apache.hadoop.mapred.MapTask.getSplitDetails(MapTask.java:378)
        at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:766)
        at org.apache.hadoop.mapred.MapTask.run(MapTask.java:347)
        at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:174)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:422)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)
        at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:168)

        at com.teradata.connector.hcat.ConnectorCombineFileHCatSplit.<init>(ConnectorCombineFileHCatInputFormat.java:281)
        ... 15 more

21/01/05 03:21:16 INFO mapreduce.Job:  map 100% reduce 0%
21/01/05 03:21:16 INFO mapreduce.Job: Job job_1608639506866_65714 failed with state FAILED due to: Task failed task_1608639506866_65714_m_000000
Job failed as tasks failed. failedMaps:1 failedReduces:0 killedMaps:0 killedReduces: 0

21/01/05 03:21:16 INFO mapreduce.Job: Counters: 9
        Job Counters
                Failed map tasks=4
                Launched map tasks=4
                Other local map tasks=3
                Data-local map tasks=1
                Total time spent by all maps in occupied slots (ms)=76036
                Total time spent by all reduces in occupied slots (ms)=0
                Total time spent by all map tasks (ms)=19009
                Total vcore-milliseconds taken by all map tasks=19009
                Total megabyte-milliseconds taken by all map tasks=77860864
21/01/05 03:21:16 INFO processor.TeradataOutputProcessor: output postprocessor com.teradata.connector.teradata.processor.TeradataBatchInsertProcessor starts at:  1609834876349
21/01/05 03:21:16 INFO processor.TeradataBatchInsertProcessor: drop stage table "Sample_A01_032038570"
21/01/05 03:21:16 INFO processor.TeradataOutputProcessor: output postprocessor com.teradata.connector.teradata.processor.TeradataBatchInsertProcessor ends at:  1609834876349
21/01/05 03:21:16 INFO processor.TeradataOutputProcessor: the total elapsed time of output postprocessor com.teradata.connector.teradata.processor.TeradataBatchInsertProcessor is: 0s
21/01/05 03:21:16 INFO teradata.TeradataSqoopExportHelper: Teradata export job completed with exit code 1
21/01/05 03:21:16 ERROR tool.ExportTool: Error during export:
Import Job failed
        at org.apache.sqoop.teradata.TeradataConnManager.exportTable(TeradataConnManager.java:550)
        at org.apache.sqoop.tool.ExportTool.exportTable(ExportTool.java:94)
        at org.apache.sqoop.tool.ExportTool.run(ExportTool.java:113)
        at org.apache.sqoop.Sqoop.run(Sqoop.java:151)
        at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:76)
        at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:187)
        at org.apache.sqoop.Sqoop.runTool(Sqoop.java:241)
        at org.apache.sqoop.Sqoop.runTool(Sqoop.java:250)
        at org.apache.sqoop.Sqoop.main(Sqoop.java:259)
21/01/05 03:21:16 INFO metastore.HiveMetaStoreClient: Closed a connection to metastore, current connections: 1

Когда я проверил журналы, я не смог найти запущенный процесс карты, т.е. он дал сбой еще до того, как процесс карты мог начаться. Пожалуйста, помогите мне выяснить проблему.


person Devi    schedule 05.01.2021    source источник
comment
Аналогичный вопрос: stackoverflow.com/q/30100592/2700344   -  person leftjoin    schedule 05.01.2021
comment
Если вам действительно нужен HCatalog, вам нужно определить HCAT_HOME, что-то вроде export HCAT_HOME=/usr/local/hive/hcatalog.   -  person Andrew    schedule 05.01.2021