http://www.easyhadoop.com
//hadoop自动化安装
#!/bin/sh
if [ $# -lt 3 ]; then
echo "Usage: $0 map.tasks.maximum reduce.tasks.maximum memory child.java.opts"
else
map=$1
reduce=$2
mem=$3
yum -y install lrzsz gcc gcc-c++ libstdc++-devel
/usr/sbin/groupadd hadoop
/usr/sbin/useradd hadoop -g hadoop
mkdir -p /opt/modules/hadoop/
mkdir -p /opt/data/hadoop1/
chown hadoop:hadoop /opt/data/hadoop1/
echo "-------------config hosts----------------"
wget http://www.easyhadoop.com/hadoop/hosts
cat hosts >> /etc/hosts
echo "----------------env init finish and prepare su hadoop---------------"
HADOOP=/home/hadoop
cd $HADOOP
mkdir .ssh
ssh-keygen -q -t rsa -N "" -f $HADOOP/.ssh/id_rsa
cd $HADOOP/.ssh/ && cat id_rsa.pub > $HADOOP/.ssh/authorized_keys
chmod go-rwx $HADOOP/.ssh/authorized_keys
wget http://www.easyhadoop.com/hadoop/hadoop-0.20.203.0.tar.gz
wget http://www.easyhadoop.com/hadoop/hadoop-gpl-packaging-0.2.8-1.x86_64.rpm
wget http://www.easyhadoop.com/hadoop/jdk-6u21-linux-amd64.rpm
wget http://www.easyhadoop.com/hadoop/lrzsz-0.12.20-19.x86_64.rpm
wget http://www.easyhadoop.com/hadoop/lzo-2.04-1.el5.rf.x86_64.rpm
wget http://www.easyhadoop.com/hadoop/lzo-2.06.tar.gz
wget http://www.easyhadoop.com/hadoop/lzop-1.03.tar.gz
wget http://www.easyhadoop.com/hadoop/hive-0.7.1.tar.gz
wget http://www.easyhadoop.com/hadoop/pig.tar.gz
mkdir $HADOOP/hadoop
mv *.tar.gz $HADOOP/hadoop
mv *.rpm $HADOOP/hadoop
cd $HADOOP/hadoop
rpm -ivh jdk-6u21-linux-amd64.rpm
rpm -ivh lrzsz-0.12.20-19.x86_64.rpm
rpm -ivh lzo-2.04-1.el5.rf.x86_64.rpm
rpm -ivh hadoop-gpl-packaging-0.2.8-1.x86_64.rpm
tar xzvf lzo-2.06.tar.gz
cd lzo-2.06 && ./configure --enable-shared && make && make install
cp /usr/local/lib/liblzo2.* /usr/lib/
cd ..
tar xzvf lzop-1.03.tar.gz
cd lzop-1.03
./configure && make && make install && cd ..
chown -R hadoop:hadoop /opt/modules/hadoop/
cp hadoop-0.20.203.0.tar.gz /opt/modules/hadoop/
cd /opt/modules/hadoop/ && tar -xzvf hadoop-0.20.203.0.tar.gz
sed -i "s/^<value>6<\/value>/<value>${map}<\/value>/g" /opt/modules/hadoop/hadoop-0.20.203.0/conf/mapred-site.xml
sed -i "s/^<value>2<\/value>/<value>${reduce}<\/value>/g" /opt/modules/hadoop/hadoop-0.20.203.0/conf/mapred-site.xml
sed -i "s/^<value>-Xmx1536M<\/value>/<value>-Xmx${mem}M<\/value>/g" /opt/modules/hadoop/hadoop-0.20.203.0/conf/mapred-site.xml
mkdir -p /opt/modules/hive/
cp hive-0.7.1.tar.gz /opt/modules/hive/
cd /opt/modules/hive/ && tar -xzvf hive-0.7.1.tar.gz
chown -R hadoop:hadoop /opt/modules/hive/
chown -R hadoop:hadoop /opt/modules/hadoop/
chown -R hadoop:hadoop /home/hadoop/
mkdir -p /opt/modules/pig/
cp pig.tar.gz /opt/modules/pig/
cd /opt/modules/pig/ && tar -xzvf pig.tar.gz
chown -R hadoop:hadoop /opt/modules/pig/
#sudo -u hadoop /opt/modules/hadoop/hadoop-0.20.203.0/bin/hadoop namenode -format
#sudo -u hadoop /opt/modules/hadoop/hadoop-0.20.203.0/bin/hadoop-daemon.sh start namenode
#sudo -u hadoop /opt/modules/hadoop/hadoop-0.20.203.0/bin/hadoop-daemon.sh start jobtracker
#sudo -u hadoop /opt/modules/hadoop/hadoop-0.20.203.0/bin/hadoop-daemon.sh start datanode
#sudo -u hadoop /opt/modules/hadoop/hadoop-0.20.203.0/bin/hadoop-daemon.sh start tasktracker
fi
curl -# http://www.easyhadoop.com/setup.html?type=setup
分享到:
相关推荐
sqoop数据导入 sqoop数据导入 sqoop数据导入 sqoop数据导入 sqoop数据导入
CDH_Hadoop_单机安装_集群安装_CDH-Hadoop-Install
export HADOOP_OPTS="-Djava.library.path=${HADOOP_INSTALL}/lib:${HADOOP_INSTALL}/lib/native" ``` 配置 Hadoop-env.sh 使用以下命令配置 Hadoop-env.sh 文件: ``` cd /opt/modules/hadoop vim ./etc/hadoop/...
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_INSTALL/lib/native export HADOOP_OPTS="-Djava.library.path=$HADOOP_INSTALL/lib" #HADOOP VARIABLES END ``` 保存并关闭文件,然后使修改生效: ```bash source ...
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_INSTALL/lib/native export HADOOP_OPTS="-Djava.library.path=$HADOOP_INSTALL/lib" ``` 使修改生效: ```bash source ~/.bashrc ``` b. 编辑`/usr/local/...
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_INSTALL/lib/native export HADOOP_OPTS="-Djava.library.path=$HADOOP_INSTALL/lib" # HADOOP VARIABLES END ``` 然后,更新环境变量: ```bash source ~/.bashrc `...
2. 下载 Hadoop,并上传到 Linux 上并解压文件,配置环境变量 HADOOP_INSTALL,在 PATH 中追加内容:HADOOP_INSTALL/bin 和 HADOOP_INSTALL/sbin。 3. 按照配置文件中的信息,配置 core-site.xml、hdfs-site.xml、...
* `export HADOOP_INSTALL=$HADOOP_HOME` 配置 Hadoop Hadoop 配置文件包括 `hadoop-env.sh`、`slaves`、`core-site.xml`、`hdfs-site.xml` 等。 1. 修改 `hadoop-env.sh` 文件,设置 `JAVA_HOME`。 2. 修改 `...
export HADOOP_MAPRED_HOME=$HADOOP_INSTALL export HADOOP_COMMON_HOME=$HADOOP_INSTALL export HADOOP_HDFS_HOME=$HADOOP_INSTALL export YARN_HOME=$HADOOP_INSTALL ``` 2. 保存并关闭文件,使修改生效:`...
export JAVA_HOME HADOOP_INSTALL PATH HADOOP_MAPRED_HOME HADOOP_COMMON_HOME HADOOP_HDFS_HOME YARN_HOME HADOOP_COMMON_LIB_NATIVE_DIR HADOOP_CONF_DIR ``` 保存并关闭文件,使更改生效: ```bash source ~/....
Hadoop 0.23.9 安装指南 Apache Hadoop 是一个开源框架,它允许使用简单的编程模型在跨计算机集群存储和处理大量数据。它主要用于大数据处理,是大数据技术栈中的重要组成部分。安装 Hadoop 需要进行一系列的准备...
01_hadoop_hdfs1分布式文件系统01 02_hadoop_hdfs1分布式文件系统02 03_hadoop_hdfs1分布式文件系统03 04_hadoop_hdfs1分布式文件系统04 05_hadoop_hdfs1分布式文件系统05 06_hadoop_hdfs1分布式文件系统06 07_...
$sudo apt-get install default-jdk ``` 安装完成后,检查Java版本以确认安装成功: ``` $java -version ``` **下载和解压Hadoop** 访问Apache官网(http://hadoop.apache.org/releases.html)下载Hadoop 2.6.0...
export HADOOP_INSTALL=$HADOOP_HOME export HADOOP_MAPRED_HOME=$HADOOP_HOME export HADOOP_COMMON_HOME=$HADOOP_HOME export HADOOP_HDFS_HOME=$HADOOP_HOME export YARN_HOME=$HADOOP_HOME export HADOOP_...
2. 在Eclipse中,选择“Help” -> “Install New Software”。 3. 在“Work with”下拉框中,选择“Add”。 4. 输入插件的临时仓库名称,如“Hadoop Plugin”,然后在Location栏中添加本地JAR文件的路径。 5. 点击...
Hadoop在centOS系统下的安装文档,系统是虚拟机上做出来的,一个namenode,两个datanode,详细讲解了安装过程。
例如,在Ubuntu上,可以运行`sudo apt-get install vsftpd`。 2. **配置VSFTP**:安装完成后,需要编辑`/etc/vsftpd.conf`配置文件,根据需求调整参数。常见的配置包括: - `anonymous_enable=NO`:禁用匿名访问。...
export HADOOP_INSTALL=/usr/hadoop_installs/hadoop-1.1.2 export PATH=$HADOOP_INSTALL/bin:$PATH ``` #### 三、Hadoop配置 接下来是对Hadoop进行必要的配置。 **(a)系统环境变量的设置** 在`~/.bashrc`...
使用Rust的Hadoop流 描述 在Rust中使用Hadoop流的示例。 这是一个MapReduce程序,用于从气象记录中逐年查找全球最高温度。...$ hadoop jar $HADOOP_INSTALL /hadoop-streaming- * .jar \ -input ncdc_data \