安裝鏡像
docker pull debian:9
docker start -it debian:9 /bin/bash
docker exet -it debian:9 /bin/bash
docker commit -m="has update" -a="study" e218edb10161 study/debian
更換鏡像
cp /etc/apt/sources.list /etc/apt/sources.list.bak ; # 先備份
echo " " > /etc/apt/sources.list ; # 清空文件
echo "deb http://mirrors.aliyun.com/debian jessie main" >> /etc/apt/sources.list ;
echo "deb http://mirrors.aliyun.com/debian jessie-updates main" >> /etc/apt/sources.list ;
apt-get clean; # 清空緩存
apt-get update; # 更新
apt-get -y install vim; # -y自動確認,更方便
apt-get -y install procps # ps
apt-get install -y wget
安裝ssh并設(shè)置免登陸
apt-get install ssh
apt-get install pdsh
/etc/init.d/ssh start
ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa
cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
chmod 0600 ~/.ssh/authorized_keys
#測試
ssh localhost
安裝和配置java
docker cp ~/Downloads/jdk-8u261-linux-x64.tar.gz ba83e1829124:/home
apt remove openjdk* #刪除openjdk
tar -zxvf jdk-8u261-linux-x64.tar.gz
mv jdk1.8.0_261/ /usr/local/java
vim /etc/profile
在末尾添加以下環(huán)境變量
export JAVA_HOME=/usr/local/java
export JRE_HOME=$JAVA_HOME/jre
export CLASSPATH=.:$JAVA_HOME/lib:$JRE_HOME/lib:$CLASSPATH
export PATH=$JAVA_HOME/bin:$JRE_HOME/bin:$PATH
添加完后退出,然后使修改生效
source /etc/profile
安裝hadoop
wget https://mirror.bit.edu.cn/apache/hadoop/common/hadoop-3.2.1/hadoop-3.2.1.tar.gz
tar -zxf hadoop-3.2.1.tar.gz
mv hadoop-3.2.1 /usr/local/hadoop
vim /usr/local/hadoop/etc/hadoop/hadoop-env.sh
# 在以下位置添加java路徑:
# The java implementation to use. By default, this environment
# variable is REQUIRED on ALL platforms except OS X!
export JAVA_HOME=/usr/local/java
按官網(wǎng)配置https://hadoop.apache.org/docs/stable/hadoop-project-dist/hadoop-common/SingleCluster.html
錯誤處理
Attempting to operate on hdfs namenode as root
#將start-dfs.sh安聘,stop-dfs.sh兩個文件頂部添加以下參數(shù)
#!/usr/bin/env bash
HDFS_DATANODE_USER=root
HADOOP_SECURE_DN_USER=hdfs
HDFS_NAMENODE_USER=root
HDFS_SECONDARYNAMENODE_USER=root
#start-yarn.sh干厚,stop-yarn.sh頂部也需添加以下:
#!/usr/bin/env bash
YARN_RESOURCEMANAGER_USER=root
HADOOP_SECURE_DN_USER=yarn
YARN_NODEMANAGER_USER=root
pdsh localhost Connection refused
pdsh@xxx: localhost: connect: Connection refused
#環(huán)境變量/etc/profile里加入
export PDSH_RCMD_TYPE=ssh
安裝MariaDB
apt-get install -y mysql-server //服務(wù)端
apt-get install -y mysql-client //客戶端
service mysql start
下載驅(qū)動 https://downloads.mariadb.org/connector-java/
安裝Hive
tar -zxf apache-hive-3.1.2-bin.tar.gz
mv apache-hive-3.1.2-bin /usr/local/hive
docker cp Downloads/mariadb-java-client-2.6.2.jar 41602cd4c109:/usr/local/hive/lib
修改配置文件
vim /etc/profile
export HIVE_HOME=/usr/local/hive
export HIVE_CONF_DIR=${HIVE_HOME}/conf
source /etc/profile
#在修改配置文件之前莉撇,需要先在本地opt(可以是任意目錄)目錄下建立一些文件夾鳖眼。
mkdir /opt/hive
mkdir /opt/hive/warehouse
#在hadoop上新建同樣的目錄并賦予讀寫權(quán)限:
hadoop fs -mkdir -p /opt
hadoop fs -mkdir -p /opt/hive/
hadoop fs -mkdir -p /opt/hive/warehouse
hadoop fs -chmod 777 /opt/hive/
hadoop fs -chmod 777 /opt/hive/warehouse
#配置
cp hive-env.sh.template hive-env.sh
# hive-env.sh中加入
export JAVA_HOME=/usr/local/java
export HADOOP_HOME=/usr/local/hadoop
export HIVE_HOME=/usr/local/hive
source hive-env.sh
修改 hive-site.xml
cp hive-default.xml.template hive-site.xml
vim hive-site.xml
<!-- 指定HDFS中的hive倉庫地址 -->
<property>
<name>hive.metastore.warehouse.dir</name>
<value>/root/hive/warehouse</value>
</property>
<property>
<name>hive.exec.scratchdir</name>
<value>/root/hive</value>
</property>
<!-- 該屬性為空表示嵌入模式或本地模式骆膝,否則為遠程模式 -->
<property>
<name>hive.metastore.uris</name>
<value></value>
</property>
<!-- 指定mysql的連接 -->
<!-- localhost是mysql所在機器的IP這里是本地所以localhost-->
<property>
<name>javax.jdo.option.ConnectionURL</name>
<value>jdbc:mysql://localhost:3306/metastore?createDatabaseIfNotExist=true&useSSL=false</value>
<description>
JDBC connect string for a JDBC metastore.
To use SSL to encrypt/authenticate the connection, provide database-specific SSL flag in the connection URL.
For example, jdbc:postgresql://myhost/db?ssl=true for postgres database.
</description>
</property>
<!-- 指定驅(qū)動類 -->
<property>
<name>javax.jdo.option.ConnectionDriverName</name>
<value>com.mysql.jdbc.Driver</value>
</property>
<!-- 指定用戶名 -->
<property>
<name>javax.jdo.option.ConnectionUserName</name>
<value>root</value>
</property>
<!-- 指定密碼 -->
<property>
<name>javax.jdo.option.ConnectionPassword</name>
<value>password</value>
</property>
<property>
<name>hive.metastore.schema.verification</name>
<value>false</value>
<description>
</description>
</property>
然后將配置文件中所有的
${system:java.io.tmpdir}更改為 /opt/tmp/hive (如果沒有該文件則創(chuàng)建)祭衩,并將hive文件夾賦予777權(quán)限,將
${system:user.name}更改為 root
首先初始化數(shù)據(jù)庫
#初始化的時候注意要將mysql啟動
./schematool -initSchema -dbType mysql
#進入hive了:
./hive
報錯處理
- java.lang.NoSuchMethodError:com.google.common.base.Preconditions.checkArgument
查看hadoop安裝目錄下share/hadoop/common/lib內(nèi)guava.jar版本
查看hive安裝目錄下lib內(nèi)guava.jar的版本 如果兩者不一致阅签,刪除版本低的掐暮,并拷貝高版本的 問題解決
2.配置文件 hive-site.xml 3186行96個字符不合法,刪除特殊字符
啟動hive
#使用的本地metastore政钟,直接通過hive命令啟動
./hive #相當于 ./hive --service cli
#啟動metastore服務(wù)路克,默認9083,也可以通過-p指定端口號
./hive --service metastore -p 9083
#啟動hiveserver2
./hive --service hiveserver2