設(shè)置ssh免密碼登錄
cd ~/.ssh
cp id_rsa.pub authorized_keys
# 測(cè)試是否需要密碼
ssh localhost
使用homebrew安裝hadoop
brew install hadoop
# 安裝成功標(biāo)識(shí)
?? /usr/local/Cellar/hadoop/3.3.1: 22,487 files, 1GB
修改配置文件
core-site.xml
vi /usr/local/Cellar/hadoop/3.3.1/libexec/etc/hadoop/core-site.xml
# configuration 標(biāo)簽下新增
<property>
<name>hadoop.tmp.dir</name>
<value>file:/usr/local/Cellar/hadoop/3.3.1/libexec/tmp</value>
</property>
<property>
<name>fs.defaultFS</name>
<value>hdfs://localhost:8020</value>
</property>
hdfs-site.xml
vi /usr/local/Cellar/hadoop/3.3.1/libexec/etc/hadoop/hdfs-site.xml
# configuration 標(biāo)簽下新增
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>file:/usr/local/Cellar/hadoop/3.3.1/libexec/tmp/dfs/name</value>
</property>
<property>
<name>dfs.namenode.data.dir</name>
<value>file:/usr/local/Cellar/hadoop/3.3.1/libexec/tmp/dfs/data</value>
</property>
<property>
<name>dfs.permissions</name>
<value>false</value>
</property>
==下面兩個(gè)xml是yarn的配置==
yarn-site.xml
vi /usr/local/Cellar/hadoop/3.3.1/libexec/etc/hadoop/yarn-site.xml
# configuration 標(biāo)簽下新增
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
<property>
<name>yarn.nodemanager.env-whitelist</name>
<value>JAVA_HOME,HADOOP_COMMON_HOME,HADOOP_HDFS_HOME,HADOOP_CONF_DIR,CLASSPATH_PREPEND_DISTCACHE,HADOOP_YARN_HOME,HADOOP_MAPRED_HOME</value>
</property>
mapred-site.xml
vi /usr/local/Cellar/hadoop/3.3.1/libexec/etc/hadoop/mapred-site.xml
# configuration 標(biāo)簽下新增
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
添加hadoop環(huán)境變量
export HADOOP_HOME=/usr/local/Cellar/hadoop/3.3.1/libexec
export HADOOP_COMMON_HOME=$HADOOP_HOME
export PATH="$PATH:$HADOOP_HOME/bin"
第一次安裝需要初始化(僅第一次需要)
cd /usr/local/Cellar/hadoop/3.3.1/bin
./hdfs namenode -format
啟動(dòng)yarn
# 啟動(dòng)hdfs
cd /usr/local/Cellar/hadoop/3.3.1/sbin
./start-dfs.sh
# 查看是否啟動(dòng)成功
# jps
6306 SecondaryNameNode
6069 NameNode
6392 Jps
6170 DataNode
# 啟動(dòng)yarn
cd /usr/local/Cellar/hadoop/3.3.1/sbin
./start-yarn.sh
hdfs頁面 http://localhost:9870/dfshealth.html#tab-overview
yarn頁面 http://localhost:8088/cluster
如果起不來指定下java環(huán)境變量 參考鏈接
vi /usr/local/Cellar/hadoop/3.3.1/libexec/etc/hadoop/hadoop-env.sh
export JAVA_HOME=/Library/Java/JavaVirtualMachines/adoptopenjdk-8.jdk/Contents/Home
export HADOOP_HOME=/usr/local/Cellar/hadoop/3.3.1/libexec
export HADOOP_OS_TYPE=${HADOOP_OS_TYPE:-$(uname -s)}
case ${HADOOP_OS_TYPE} in
Darwin*)
export HADOOP_OPTS="${HADOOP_OPTS} -Djava.security.krb5.realm= "
export HADOOP_OPTS="${HADOOP_OPTS} -Djava.security.krb5.kdc= "
export HADOOP_OPTS="${HADOOP_OPTS} -Djava.security.krb5.conf= "
;;
esac
export HADOOP_ROOT_LOGGER=DEBUG,console
export HADOOP_DAEMON_ROOT_LOGGER=DEBUG,RFA
WordCount驗(yàn)證
hadoop jar /usr/local/Cellar/hadoop/3.3.1/libexec/share/hadoop/mapreduce/hadoop-mapreduce-examples-3.3.1.jar wordcount /input /output
# 最后三個(gè)參數(shù)的意思是:wordcount是測(cè)試用例的名稱奖唯,/input表示輸入文件的目錄,/output表示輸出文件的目錄伍掀。運(yùn)行結(jié)果如下
注意??:輸出文件必須是一個(gè)不存在的文件刘莹,如果指定一個(gè)已有目錄作為hadoop作業(yè)輸出的話,作業(yè)將無法運(yùn)行目代。如果想讓hadoop將輸出存儲(chǔ)到一個(gè)目錄屈梁,它必須是不存在的目錄,應(yīng)該是hadoop的一種安全機(jī)制榛了,防止hadoop重寫有用的文件)
最后在讶,查看程序輸出結(jié)果及存放位置。在terminal上輸入 hadoop fs -ls /output 霜大,可以看到
結(jié)果就存放在part-r-00000文件中构哺,在terminal上輸入 hadoop fs -cat /output/part-r-00000
權(quán)限問題沒跑通,看下面原文吧
參考 https://blog.csdn.net/pgs1004151212/article/details/104391391
安裝指定版本Flink
官網(wǎng)下載最新版本的flink并解壓 flink-1.14.0-bin-scala_2.11.tgz
復(fù)制hadoop的配置文件到flink conf 目錄
cd /usr/local/Cellar/hadoop/3.3.1/libexec/etc/hadoop
cp hdfs-site.xml yarn-site.xml core-site.xml /usr/local/develop/flink-1.14.0/lib
添加hadoop 和 flink的環(huán)境變量
export PATH="/usr/local/develop/flink-1.14.0/bin:$PATH"
export HADOOP_HOME=/usr/local/Cellar/hadoop/3.3.1/libexec
export HADOOP_COMMON_HOME=$HADOOP_HOME
export PATH="$PATH:$HADOOP_HOME/bin"
export HADOOP_CLASSPATH=$(find $HADOOP_HOME -name '*.jar' | xargs echo | tr ' ' ':')
執(zhí)行WordCount命令
flink run -m yarn-cluster /usr/local/develop/flink-1.14.0/examples/batch/WordCount.jar
安裝Anaconda
brew search anaconda
# 不帶數(shù)字的就是最新版
brew install --cask anaconda
# 配置anaconda環(huán)境變量
echo 'export PATH="/usr/local/anaconda3/bin:$PATH"' >> ~/.zshrc
# 查看版本
conda --version
conda 4.10.1
環(huán)境
http://www.reibang.com/p/ce99bf9d9008
conda env list
conda create -n learn
conda activate learn
conda deactivate
安裝TensorFlow
pip install --upgrade pip
pip install tensorflow