概述
Sqoop配置
配置sqoop-env.sh
vi ./sqoop-env.sh
export HADOOP_COMMON_HOME=${HADOOP_HOME}
export HADOOP_HDFS_HOME=${HADOOP_HOME}/share/hadoop/hdfs
export HADOOP_MAPRED_HOME=${HADOOP_HOME}/share/hadoop/mapreduce
export HADOOP_YARN_HOME=${HADOOP_HOME}/share/hadoop/yarn
export HIVE_HOME=/usr/local/hive
export HBASE_HOME=/usr/local/hbase
export ZOOCFGDIR=/usr/local/zookeeper/conf
export ACCUMULO_HOME=/usr/local/accumulo
export HADOOP_HDFS_HOME=${HADOOP_HOME}/share/hadoop/hdfs
export HADOOP_MAPRED_HOME=${HADOOP_HOME}/share/hadoop/mapreduce
export HADOOP_YARN_HOME=${HADOOP_HOME}/share/hadoop/yarn
export HIVE_HOME=/usr/local/hive
export HBASE_HOME=/usr/local/hbase
export ZOOCFGDIR=/usr/local/zookeeper/conf
export ACCUMULO_HOME=/usr/local/accumulo
复制Hadoop包
cd /usr/local/sqoop/lib
\cp /usr/local/hadoop-3.3.1/share/hadoop/client/*.jar ./
\cp /usr/local/hadoop-3.3.1/share/hadoop/tools/lib/*.jar ./
\cp /usr/local/hadoop-3.3.1/share/hadoop/common/lib/*.jar ./
\cp /usr/local/hadoop-3.3.1/share/hadoop/mapreduce/*.jar ./
\cp /usr/local/hadoop-3.3.1/share/hadoop/client/*.jar ./
\cp /usr/local/hadoop-3.3.1/share/hadoop/tools/lib/*.jar ./
\cp /usr/local/hadoop-3.3.1/share/hadoop/yarn/*.jar ./
\cp /usr/local/hadoop-3.3.1/share/hadoop/yarn/lib/*.jar ./
\cp /usr/local/hadoop-3.3.1/share/hadoop/yarn/timelineservice/lib/*.jar ./
\cp /usr/local/hadoop-3.3.1/share/hadoop/common/*.jar ./
\cp /usr/local/hadoop-3.3.1/share/hadoop/client/*.jar ./
\cp /usr/local/hadoop-3.3.1/share/hadoop/tools/lib/*.jar ./
\cp /usr/local/hadoop-3.3.1/share/hadoop/common/lib/*.jar ./
\cp /usr/local/hadoop-3.3.1/share/hadoop/mapreduce/*.jar ./
\cp /usr/local/hadoop-3.3.1/share/hadoop/client/*.jar ./
\cp /usr/local/hadoop-3.3.1/share/hadoop/tools/lib/*.jar ./
\cp /usr/local/hadoop-3.3.1/share/hadoop/yarn/*.jar ./
\cp /usr/local/hadoop-3.3.1/share/hadoop/yarn/lib/*.jar ./
\cp /usr/local/hadoop-3.3.1/share/hadoop/yarn/timelineservice/lib/*.jar ./
\cp /usr/local/hadoop-3.3.1/share/hadoop/common/*.jar ./
sqoop测试
显示版本
[hadoop@master ~]$ sqoop version
Warning: /usr/local/sqoop/../hbase does not exist! HBase imports will fail.
Please set $HBASE_HOME to the root of your HBase installation.
Warning: /usr/local/sqoop/../hcatalog does not exist! HCatalog jobs will fail.
Please set $HCAT_HOME to the root of your HCatalog installation.
Warning: /usr/local/sqoop/../accumulo does not exist! Accumulo imports will fail.
Please set $ACCUMULO_HOME to the root of your Accumulo installation.
Warning: /usr/local/sqoop/../zookeeper does not exist! Accumulo imports will fail.
Please set $ZOOKEEPER_HOME to the root of your Zookeeper installation.
2022-08-25 08:10:40,911 INFO sqoop.Sqoop: Running Sqoop version: 1.4.7
Sqoop 1.4.7
git commit id 2328971411f57f0cb683dfb79d19d4d19d185dd8
Compiled by maugli on Thu Dec 21 15:59:58 STD 2017
[hadoop@master ~]$
Warning: /usr/local/sqoop/../hbase does not exist! HBase imports will fail.
Please set $HBASE_HOME to the root of your HBase installation.
Warning: /usr/local/sqoop/../hcatalog does not exist! HCatalog jobs will fail.
Please set $HCAT_HOME to the root of your HCatalog installation.
Warning: /usr/local/sqoop/../accumulo does not exist! Accumulo imports will fail.
Please set $ACCUMULO_HOME to the root of your Accumulo installation.
Warning: /usr/local/sqoop/../zookeeper does not exist! Accumulo imports will fail.
Please set $ZOOKEEPER_HOME to the root of your Zookeeper installation.
2022-08-25 08:10:40,911 INFO sqoop.Sqoop: Running Sqoop version: 1.4.7
Sqoop 1.4.7
git commit id 2328971411f57f0cb683dfb79d19d4d19d185dd8
Compiled by maugli on Thu Dec 21 15:59:58 STD 2017
[hadoop@master ~]$
sqoop使用
测试前准备
在业务数据库test库
中添加课堂练习test相关表
中添加课堂练习test相关表
CREATE TABLE test.`test` (
`id` int(10) NOT NULL AUTO_INCREMENT,
`name` varchar(20) COLLATE utf8_bin NOT NULL,
`code` varchar(200) COLLATE utf8_bin NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=482 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
insert into test.`test` (id,name,code) values (100,'limm','100');
CREATE TABLE test.`test_hdfs` (
`id` int(10) NOT NULL AUTO_INCREMENT,
`name` varchar(20) COLLATE utf8_bin NOT NULL,
`code` varchar(200) COLLATE utf8_bin NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=482 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
CREATE TABLE test.`test_hive` (
`id` int(10) NOT NULL AUTO_INCREMENT,
`name` varchar(20) COLLATE utf8_bin NOT NULL,
`code` varchar(200) COLLATE utf8_bin NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
`id` int(10) NOT NULL AUTO_INCREMENT,
`name` varchar(20) COLLATE utf8_bin NOT NULL,
`code` varchar(200) COLLATE utf8_bin NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=482 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
insert into test.`test` (id,name,code) values (100,'limm','100');
CREATE TABLE test.`test_hdfs` (
`id` int(10) NOT NULL AUTO_INCREMENT,
`name` varchar(20) COLLATE utf8_bin NOT NULL,
`code` varchar(200) COLLATE utf8_bin NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=482 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
CREATE TABLE test.`test_hive` (
`id` int(10) NOT NULL AUTO_INCREMENT,
`name` varchar(20) COLLATE utf8_bin NOT NULL,
`code` varchar(200) COLLATE utf8_bin NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
在业务数据库test库
中添加强化练习class相关表
中添加强化练习class相关表
#test.`class`来源于网络信息采集同学们采集上来的class表,存于home.hddly.cn:53306的test库中
#以下添加回传的数据表
CREATE TABLE test.`class_hdfs` (
`id` int(10) NOT NULL AUTO_INCREMENT,
`name` varchar(20) COLLATE utf8_bin NOT NULL,
`text` varchar(200) COLLATE utf8_bin NOT NULL,
`stud` varchar(100) COLLATE utf8_bin NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=482 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
CREATE TABLE test.`class_hive` (
`id` int(10) NOT NULL AUTO_INCREMENT,
`name` varchar(20) COLLATE utf8_bin NOT NULL,
`text` varchar(200) COLLATE utf8_bin NOT NULL,
`stud` varchar(100) COLLATE utf8_bin NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=482 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
#以下添加回传的数据表
CREATE TABLE test.`class_hdfs` (
`id` int(10) NOT NULL AUTO_INCREMENT,
`name` varchar(20) COLLATE utf8_bin NOT NULL,
`text` varchar(200) COLLATE utf8_bin NOT NULL,
`stud` varchar(100) COLLATE utf8_bin NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=482 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
CREATE TABLE test.`class_hive` (
`id` int(10) NOT NULL AUTO_INCREMENT,
`name` varchar(20) COLLATE utf8_bin NOT NULL,
`text` varchar(200) COLLATE utf8_bin NOT NULL,
`stud` varchar(100) COLLATE utf8_bin NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=482 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
hdfs导入导出
课堂练习
从mysql库导入test表
到hdfs的test目录
到hdfs的test目录
在hdfs上test目录下
添加测试数据
添加测试数据
从hdfs导出test
目录到mysql的
test_hdfs表
目录到mysql的
test_hdfs表
从mysql上导入test表
至hive的
myname库的
test_hive表
至hive的
myname库的
test_hive表
sqoop import --connect "jdbc:mysql://home.hddly.cn:53306/test?useSSL=false&useUnicode=true&characterEncoding=utf-8" --username test --password test --table test --delete-target-dir --target-dir /user/root/sqoop_hive --hive-database myname --fields-terminated-by '\t' --lines-terminated-by '\n' --hive-import --hive-overwrite --hive-table test_hive --hive-drop-import-delims --m 1
在test_hive表中
添加测试数据
添加测试数据
从hive库导出test_hive
至mysql的test_hive表
至mysql的test_hive表
在实验室中
使用的脚本
使用的脚本
sqoop list-tables \
--connect jdbc:mysql://10.255.10.51:3306/test?useSSL=false \
--username test \
--password test
sqoop import --connect "jdbc:mysql://10.255.10.51:3306/test?useSSL=false&useUnicode=true&characterEncoding=utf-8" --username test --password test --table test --delete-target-dir --target-dir /user/myname/test --fields-terminated-by '\t' -m 1
sqoop export --connect "jdbc:mysql://10.255.10.51:3306/test?useSSL=false&useUnicode=true&characterEncoding=utf-8" --username test --password test --table test_hdfs --export-dir /user/myname/test --input-fields-terminated-by '\t' --m 1
sqoop import --connect "jdbc:mysql://10.255.10.51:3306/test?useSSL=false&useUnicode=true&characterEncoding=utf-8" --username test --password test --table test --delete-target-dir --target-dir /user/root/sqoop_hive --hive-database myname --fields-terminated-by '\t' --lines-terminated-by '\n' --hive-import --hive-overwrite --hive-table test_hive --hive-drop-import-delims --m 1
sqoop export --connect "jdbc:mysql://10.255.10.51:3306/test?useSSL=false&useUnicode=true&characterEncoding=utf-8" --username test --password test --table test_hive --fields-terminated-by '\001' --lines-terminated-by '\n' --export-dir /user/hive/warehouse/myname.db/test_hive_1
--connect jdbc:mysql://10.255.10.51:3306/test?useSSL=false \
--username test \
--password test
sqoop import --connect "jdbc:mysql://10.255.10.51:3306/test?useSSL=false&useUnicode=true&characterEncoding=utf-8" --username test --password test --table test --delete-target-dir --target-dir /user/myname/test --fields-terminated-by '\t' -m 1
sqoop export --connect "jdbc:mysql://10.255.10.51:3306/test?useSSL=false&useUnicode=true&characterEncoding=utf-8" --username test --password test --table test_hdfs --export-dir /user/myname/test --input-fields-terminated-by '\t' --m 1
sqoop import --connect "jdbc:mysql://10.255.10.51:3306/test?useSSL=false&useUnicode=true&characterEncoding=utf-8" --username test --password test --table test --delete-target-dir --target-dir /user/root/sqoop_hive --hive-database myname --fields-terminated-by '\t' --lines-terminated-by '\n' --hive-import --hive-overwrite --hive-table test_hive --hive-drop-import-delims --m 1
sqoop export --connect "jdbc:mysql://10.255.10.51:3306/test?useSSL=false&useUnicode=true&characterEncoding=utf-8" --username test --password test --table test_hive --fields-terminated-by '\001' --lines-terminated-by '\n' --export-dir /user/hive/warehouse/myname.db/test_hive_1
强化练习
导出class
至mysql
至mysql
从mysql库导入
class表到
hive的myname库
的class_hive表
class表到
hive的myname库
的class_hive表
sqoop import --connect "jdbc:mysql://home.hddly.cn:53306/test?useSSL=false&useUnicode=true&characterEncoding=utf-8" --username test --password test --table class --where "stud='张三'" --delete-target-dir --target-dir /user/myname/sqoop_hive/class --hive-database myname --hive-import --hive-overwrite --hive-table class_hive --hive-drop-import-delims --m 1
从hive库中导出
class_hive表到
mysql库的
class_hive表
class_hive表到
mysql库的
class_hive表
其它参考
hive与mongodb数据互传
hdfs导入导出
hive导入导出
导入语法参考
sqoop import \
--connect "jdbc:mysql://home.hddly.cn:53306/test?useSSL=false&useUnicode=true&characterEncoding=utf-8" \
--username test \
--password test \
--table test \ # 待导入的表
--delete-target-dir \ # 如果临时目录存在删除
--target-dir /user/hadoop/sqoop_hive \ # 临时目录位置
--hive-database sqoop_test \ # 导入到Hive的sqoop_test数据库,数据库需要预先创建。不指定则默认为default库
--hive-import \ # 导入到Hive
--hive-overwrite \ # 如果Hive表中有数据则覆盖,这会清除表中原有的数据,然后再写入
--m 1 # 并行度
--connect "jdbc:mysql://home.hddly.cn:53306/test?useSSL=false&useUnicode=true&characterEncoding=utf-8" \
--username test \
--password test \
--table test \ # 待导入的表
--delete-target-dir \ # 如果临时目录存在删除
--target-dir /user/hadoop/sqoop_hive \ # 临时目录位置
--hive-database sqoop_test \ # 导入到Hive的sqoop_test数据库,数据库需要预先创建。不指定则默认为default库
--hive-import \ # 导入到Hive
--hive-overwrite \ # 如果Hive表中有数据则覆盖,这会清除表中原有的数据,然后再写入
--m 1 # 并行度
hive与oracle数据互传
连接串
sqoop import --connect jdbc:oracle:thin:@oracle-host:port:orcl --username name--password passwd --hive-import -table tablename
指定
sqoop import --connect jdbc:oracle:thin:@oracle-host:port:orcl --username name--password passwd --hive-import -table tablename --fields-terminated-by '\n' --lines-terminated-by ','
-split-by id //按照id(int类型,例如共500个)的最大最小区间划分where id>=min(id) and id<=500/2 和 id>500/2 and max(id) 两个阶段导入,结合-m map job的个数来运用
-m 2
-split-by id //按照id(int类型,例如共500个)的最大最小区间划分where id>=min(id) and id<=500/2 和 id>500/2 and max(id) 两个阶段导入,结合-m map job的个数来运用
-m 2
scp批量复制到从机
scp -r /usr/local/hadoop-3.3.1/etc/hadoop slave1://usr/local/hadoop-3.3.1/etc/
scp -r /usr/local/hadoop-3.3.1/etc/hadoop slave2://usr/local/hadoop-3.3.1/etc/
scp -r /usr/local/spark/conf slave1://usr/local/spark/
scp -r /usr/local/spark/conf slave2://usr/local/spark/
scp -r /usr/local/hbase/conf slave1://usr/local/hbase/
scp -r /usr/local/hbase/conf slave2://usr/local/hbase/
scp -r /usr/local/hive/conf slave1://usr/local/hive/
scp -r /usr/local/hive/conf slave2://usr/local/hive/
scp -r /usr/local/zookeeper/conf slave1://usr/local/zookeeper/
scp -r /usr/local/zookeeper/conf slave2://usr/local/zookeeper/
scp -r /usr/local/hadoop-3.3.1/etc/hadoop slave2://usr/local/hadoop-3.3.1/etc/
scp -r /usr/local/spark/conf slave1://usr/local/spark/
scp -r /usr/local/spark/conf slave2://usr/local/spark/
scp -r /usr/local/hbase/conf slave1://usr/local/hbase/
scp -r /usr/local/hbase/conf slave2://usr/local/hbase/
scp -r /usr/local/hive/conf slave1://usr/local/hive/
scp -r /usr/local/hive/conf slave2://usr/local/hive/
scp -r /usr/local/zookeeper/conf slave1://usr/local/zookeeper/
scp -r /usr/local/zookeeper/conf slave2://usr/local/zookeeper/
常见问题
运行Sqoop报错解决:
java.lang.NoClassDefFoundError:
java.lang.NoClassDefFoundError:
错误信息
Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/commons/lang/StringUtils
at org.apache.sqoop.tool.BaseSqoopTool.validateHiveOptions(BaseSqoopTool.java:1583)
at org.apache.sqoop.tool.ImportTool.validateOptions(ImportTool.java:1178)
at org.apache.sqoop.Sqoop.run(Sqoop.java:137)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:76)
at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:183)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:234)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:243)
at org.apache.sqoop.Sqoop.main(Sqoop.java:252)
Caused by: java.lang.ClassNotFoundException: org.apache.commons.lang.StringUtils
at java.net.URLClassLoader.findClass(URLClassLoader.java:387)
at java.lang.ClassLoader.loadClass(ClassLoader.java:418)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:352)
at java.lang.ClassLoader.loadClass(ClassLoader.java:351)
at org.apache.sqoop.tool.BaseSqoopTool.validateHiveOptions(BaseSqoopTool.java:1583)
at org.apache.sqoop.tool.ImportTool.validateOptions(ImportTool.java:1178)
at org.apache.sqoop.Sqoop.run(Sqoop.java:137)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:76)
at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:183)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:234)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:243)
at org.apache.sqoop.Sqoop.main(Sqoop.java:252)
Caused by: java.lang.ClassNotFoundException: org.apache.commons.lang.StringUtils
at java.net.URLClassLoader.findClass(URLClassLoader.java:387)
at java.lang.ClassLoader.loadClass(ClassLoader.java:418)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:352)
at java.lang.ClassLoader.loadClass(ClassLoader.java:351)
sqoop从hdfs到mysql报错
报表不存在
处理:在mysql创建表
处理:添加表
CREATE TABLE test.`help_keyword_from_hdfs` (
`id` int(10) NOT NULL AUTO_INCREMENT,
`name` varchar(20) COLLATE utf8_bin NOT NULL,
`text` varchar(200) COLLATE utf8_bin NOT NULL,
`stud` varchar(100) COLLATE utf8_bin NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=482 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
`id` int(10) NOT NULL AUTO_INCREMENT,
`name` varchar(20) COLLATE utf8_bin NOT NULL,
`text` varchar(200) COLLATE utf8_bin NOT NULL,
`stud` varchar(100) COLLATE utf8_bin NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=482 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
运行结果报异常
异常信息
2022-10-08 21:25:32,494 ERROR mapreduce.ExportJobBase: Export job failed!
2022-10-08 21:25:32,497 ERROR tool.ExportTool: Error during export:
Export job failed!
at org.apache.sqoop.mapreduce.ExportJobBase.runExport(ExportJobBase.java:445)
at org.apache.sqoop.manager.SqlManager.exportTable(SqlManager.java:931)
at org.apache.sqoop.tool.ExportTool.exportTable(ExportTool.java:80)
at org.apache.sqoop.tool.ExportTool.run(ExportTool.java:99)
at org.apache.sqoop.Sqoop.run(Sqoop.java:147)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:76)
at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:183)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:234)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:243)
at org.apache.sqoop.Sqoop.main(Sqoop.java:252)
2022-10-08 21:25:32,497 ERROR tool.ExportTool: Error during export:
Export job failed!
at org.apache.sqoop.mapreduce.ExportJobBase.runExport(ExportJobBase.java:445)
at org.apache.sqoop.manager.SqlManager.exportTable(SqlManager.java:931)
at org.apache.sqoop.tool.ExportTool.exportTable(ExportTool.java:80)
at org.apache.sqoop.tool.ExportTool.run(ExportTool.java:99)
at org.apache.sqoop.Sqoop.run(Sqoop.java:147)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:76)
at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:183)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:234)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:243)
at org.apache.sqoop.Sqoop.main(Sqoop.java:252)
sqoop从mysql到 hdfs时报mysql错
sqoop从mysql到hdfs时报yarn错
错误信息
错误信息
NoClassDefFoundError: org/apache/hadoop/mapreduce/InputFormat
。。。
ClassNotFoundException: org.apache.hadoop.mapreduce.InputFormat
。。。
ClassNotFoundException: org.apache.hadoop.mapreduce.InputFormat
解决
[hadoop@master conf]$ find /usr/local/hadoop-3.3.1/ -name hadoop-common*.jar
/usr/local/hadoop-3.3.1/share/hadoop/common/sources/hadoop-common-3.3.1-sources.jar
/usr/local/hadoop-3.3.1/share/hadoop/common/sources/hadoop-common-3.3.1-test-sources.jar
/usr/local/hadoop-3.3.1/share/hadoop/common/hadoop-common-3.3.1.jar
/usr/local/hadoop-3.3.1/share/hadoop/common/hadoop-common-3.3.1-tests.jar
/usr/local/hadoop-3.3.1/share/hadoop/common/sources/hadoop-common-3.3.1-sources.jar
/usr/local/hadoop-3.3.1/share/hadoop/common/sources/hadoop-common-3.3.1-test-sources.jar
/usr/local/hadoop-3.3.1/share/hadoop/common/hadoop-common-3.3.1.jar
/usr/local/hadoop-3.3.1/share/hadoop/common/hadoop-common-3.3.1-tests.jar
错误信息
UnsupportedFileSystemException: No FileSystem for scheme "hdfs"
java.lang.RuntimeException: org.apache.hadoop.fs.UnsupportedFileSystemException: No FileSystem for scheme "hdfs"
。。。
Caused by: org.apache.hadoop.fs.UnsupportedFileSystemException: No FileSystem for scheme "hdfs"
java.lang.RuntimeException: org.apache.hadoop.fs.UnsupportedFileSystemException: No FileSystem for scheme "hdfs"
。。。
Caused by: org.apache.hadoop.fs.UnsupportedFileSystemException: No FileSystem for scheme "hdfs"
解决
cp /usr/local/hadoop-3.3.1/share/hadoop/common/lib/guava-27.0-jre.jar /usr/local/sqoop/lib/
cp /usr/local/hadoop-3.3.1/share/hadoop/common/*.jar /usr/local/sqoop/lib/
cp /usr/local/hadoop-3.3.1/share/hadoop/mapreduce/*.jar /usr/local/sqoop/lib/
cp /usr/local/hadoop-3.3.1/share/hadoop/hdfs/*.jar /usr/local/sqoop/lib/
cp /usr/local/hadoop-3.3.1/share/hadoop/yarn/*.jar /usr/local/sqoop/lib/p/lib/
cp /usr/local/hadoop-3.3.1/share/hadoop/common/*.jar /usr/local/sqoop/lib/
cp /usr/local/hadoop-3.3.1/share/hadoop/mapreduce/*.jar /usr/local/sqoop/lib/
cp /usr/local/hadoop-3.3.1/share/hadoop/hdfs/*.jar /usr/local/sqoop/lib/
cp /usr/local/hadoop-3.3.1/share/hadoop/yarn/*.jar /usr/local/sqoop/lib/p/lib/
错误信息
错误信息
解决
[hadoop@master conf]$ sudo find / -name *slf4j*.jar
/usr/local/hadoop-3.3.1/share/hadoop/common/lib/slf4j-api-1.7.30.jar
/usr/local/hadoop-3.3.1/share/hadoop/common/lib/slf4j-log4j12-1.7.30.jar
/usr/local/hadoop-3.3.1/share/hadoop/common/lib/jul-to-slf4j-1.7.30.jar
/usr/local/hive/hcatalog/share/webhcat/svr/lib/jul-to-slf4j-1.7.10.jar
/usr/local/spark_bak/jars/jcl-over-slf4j-1.7.30.jar
/usr/local/spark_bak/jars/jul-to-slf4j-1.7.30.jar
/usr/local/spark_bak/jars/slf4j-api-1.7.30.jar
/usr/local/spark/jars/jcl-over-slf4j-1.7.30.jar
/usr/local/spark/jars/jul-to-slf4j-1.7.30.jar
/usr/local/spark/jars/slf4j-api-1.7.30.jar
/usr/local/sqoop/lib/slf4j-api-1.6.1.jar
[hadoop@master conf]$ rm -f /usr/local/sqoop/lib/slf4j*.jar
[hadoop@master conf]$ cp /usr/local/hadoop-3.3.1/share/hadoop/common/lib/slf4j-log4j12-1.7.30.jar /usr/local/sqoop/lib/
cp /usr/local/hadoop-3.3.1/share/hadoop/common/lib/slf4j-api-1.7.30.jar /usr/local/sqoop/lib/
/usr/local/hadoop-3.3.1/share/hadoop/common/lib/slf4j-api-1.7.30.jar
/usr/local/hadoop-3.3.1/share/hadoop/common/lib/slf4j-log4j12-1.7.30.jar
/usr/local/hadoop-3.3.1/share/hadoop/common/lib/jul-to-slf4j-1.7.30.jar
/usr/local/hive/hcatalog/share/webhcat/svr/lib/jul-to-slf4j-1.7.10.jar
/usr/local/spark_bak/jars/jcl-over-slf4j-1.7.30.jar
/usr/local/spark_bak/jars/jul-to-slf4j-1.7.30.jar
/usr/local/spark_bak/jars/slf4j-api-1.7.30.jar
/usr/local/spark/jars/jcl-over-slf4j-1.7.30.jar
/usr/local/spark/jars/jul-to-slf4j-1.7.30.jar
/usr/local/spark/jars/slf4j-api-1.7.30.jar
/usr/local/sqoop/lib/slf4j-api-1.6.1.jar
[hadoop@master conf]$ rm -f /usr/local/sqoop/lib/slf4j*.jar
[hadoop@master conf]$ cp /usr/local/hadoop-3.3.1/share/hadoop/common/lib/slf4j-log4j12-1.7.30.jar /usr/local/sqoop/lib/
cp /usr/local/hadoop-3.3.1/share/hadoop/common/lib/slf4j-api-1.7.30.jar /usr/local/sqoop/lib/
错误信息
[2022-08-25 21:01:23.189]Container exited with a non-zero exit code 1. Error file: prelaunch.err.
Last 4096 bytes of prelaunch.err :
Last 4096 bytes of stderr :
Exception: java.lang.NoClassDefFoundError thrown from the UncaughtExceptionHandler in thread "main"
Last 4096 bytes of prelaunch.err :
Last 4096 bytes of stderr :
Exception: java.lang.NoClassDefFoundError thrown from the UncaughtExceptionHandler in thread "main"
mapreduce.JobSubmissionFiles: Permissions on staging directory /tmp/hadoop-yarn/staging/hadoop/.staging are incorrect: rwxrwxrwx. Fixing permissions to correct value rwx------
将hadoop包都copy过来
[hadoop@master lib]$ cp /usr/local/hadoop-3.3.1/share/hadoop/client/*.jar ./
[hadoop@master lib]$ cp /usr/local/hadoop-3.3.1/share/hadoop/tools/lib/*.jar ./
[hadoop@master lib]$ cp /usr/local/hadoop-3.3.1/share/hadoop/common/lib/*.jar ./
[hadoop@master lib]$ cp /usr/local/hadoop-3.3.1/share/hadoop/mapreduce/*.jar ./
[hadoop@master lib]$ cp /usr/local/hadoop-3.3.1/share/hadoop/client/*.jar ./
[hadoop@master lib]$ cp /usr/local/hadoop-3.3.1/share/hadoop/tools/lib/*.jar ./
[hadoop@master lib]$ cp /usr/local/hadoop-3.3.1/share/hadoop/yarn/*.jar ./
[hadoop@master lib]$ cp /usr/local/hadoop-3.3.1/share/hadoop/yarn/lib/*.jar ./
[hadoop@master lib]$ cp /usr/local/hadoop-3.3.1/share/hadoop/yarn/timelineservice/lib/*.jar ./
[hadoop@master lib]$ cp /usr/local/hadoop-3.3.1/share/hadoop/common/*.jar ./
[hadoop@master lib]$ cp /usr/local/hadoop-3.3.1/share/hadoop/tools/lib/*.jar ./
[hadoop@master lib]$ cp /usr/local/hadoop-3.3.1/share/hadoop/common/lib/*.jar ./
[hadoop@master lib]$ cp /usr/local/hadoop-3.3.1/share/hadoop/mapreduce/*.jar ./
[hadoop@master lib]$ cp /usr/local/hadoop-3.3.1/share/hadoop/client/*.jar ./
[hadoop@master lib]$ cp /usr/local/hadoop-3.3.1/share/hadoop/tools/lib/*.jar ./
[hadoop@master lib]$ cp /usr/local/hadoop-3.3.1/share/hadoop/yarn/*.jar ./
[hadoop@master lib]$ cp /usr/local/hadoop-3.3.1/share/hadoop/yarn/lib/*.jar ./
[hadoop@master lib]$ cp /usr/local/hadoop-3.3.1/share/hadoop/yarn/timelineservice/lib/*.jar ./
[hadoop@master lib]$ cp /usr/local/hadoop-3.3.1/share/hadoop/common/*.jar ./
sqoop从mysql到hive时报错
cleaning up错误
sqoop import --connect jdbc:mysql://192.168.31.11:3306/test?useSSL=false --username test --password test --table test --delete-target-dir --target-dir /user/root/sqoop_hive --hive-database sqoop_test --hive-import --hive-overwrite --m 1
报权限异常
sqoopl把mysql数据导到hive报错:
main ERROR Could not register mbeans java.security.AccessControlException: access denied
(“javax.management.MBeanTrustPermission” “register”)
main ERROR Could not register mbeans java.security.AccessControlException: access denied
(“javax.management.MBeanTrustPermission” “register”)
ERROR Could not register mbeans java.security.AccessControlException: access denied ("javax.management.MBeanTrustPermission" "register")
snoop连接远程mysql
报连接异常
运行导入脚本
sqoop import --connect "jdbc:mysql://192.168.31.11:3306/test?useUnicode=true&characterEncoding=utf-8&useSSL=false" --username test --password test --table class --delete-target-dir --target-dir hdfs://master:9864/user/root/class1 --fields-terminated-by '\t' -m 1
snoop连接远程hdfs
无法将数据写入从机datanode
无法读取远程的hdfs上的数据