`

Ambari卸载shell

阅读更多
#!/bin/bash
# Program:
#    uninstall ambari automatic
# History:
#    2014/01/13    -    Ivan    -    2862099249@qq.com    -    First release
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin:~/bin
export PATH

#取得集群的所有主机名,这里需要注意:/etc/hosts配置的IP和主机名只能用一个空格分割
hostList=$(cat /etc/hosts | tail -n +3 | cut -d ' ' -f 2)
yumReposDir=/etc/yum.repos.d/
alterNativesDir=/etc/alternatives/
pingCount=5
logPre=TDP

read -p "Please input your master hostname: " master
master=${master:-"master"}
ssh $master "ambari-server stop"
#重置ambari数据库
ssh $master "ambari-server reset"

for host in $hostList
do
    #echo $host
    #检测主机的连通性
    unPing=$(ping $host -c $pingCount | grep 'Unreachable' | wc -l)
    if [ "$unPing" == "$pingCount" ]; then
        echo -e "$logPre======>$host is Unreachable,please check '/etc/hosts' file"
        continue
    fi

    echo "$logPre======>$host deleting... \n"
    #1.)删除hdp.repo、HDP.repo、HDP-UTILS.repo和ambari.repo
    ssh $host "cd $yumReposDir"
    ssh $host "rm -rf $yumReposDir/hdp.repo"
    ssh $host "rm -rf $yumReposDir/HDP*"
    ssh $host "rm -rf $yumReposDir/ambari.repo"
   
    #删除HDP相关的安装包
    ssh $host "yum remove -y  sqoop.noarch"
    ssh $host "yum remove -y  lzo-devel.x86_64"
    ssh $host "yum remove -y  hadoop-libhdfs.x86_64"
    ssh $host "yum remove -y  rrdtool.x86_64"
    ssh $host "yum remove -y  hbase.noarch"
    ssh $host "yum remove -y  pig.noarch"
    ssh $host "yum remove -y  lzo.x86_64"
    ssh $host "yum remove -y  ambari-log4j.noarch"
    ssh $host "yum remove -y  oozie.noarch"
    ssh $host "yum remove -y  oozie-client.noarch"
    ssh $host "yum remove -y  gweb.noarch"
    ssh $host "yum remove -y  snappy-devel.x86_64"
    ssh $host "yum remove -y  hcatalog.noarch"
    ssh $host "yum remove -y  python-rrdtool.x86_64"
    ssh $host "yum remove -y  nagios.x86_64"
    ssh $host "yum remove -y  webhcat-tar-pig.noarch"
    ssh $host "yum remove -y  snappy.x86_64"
    ssh $host "yum remove -y  libconfuse.x86_64"
    ssh $host "yum remove -y  webhcat-tar-hive.noarch"
    ssh $host "yum remove -y  ganglia-gmetad.x86_64"
    ssh $host "yum remove -y  extjs.noarch"
    ssh $host "yum remove -y  hive.noarch"
    ssh $host "yum remove -y  hadoop-lzo.x86_64"
    ssh $host "yum remove -y  hadoop-lzo-native.x86_64"
    ssh $host "yum remove -y  hadoop-native.x86_64"
    ssh $host "yum remove -y  hadoop-pipes.x86_64"
    ssh $host "yum remove -y  nagios-plugins.x86_64"
    ssh $host "yum remove -y  hadoop.x86_64"
    ssh $host "yum remove -y  zookeeper.noarch"  
    ssh $host "yum remove -y  hadoop-sbin.x86_64"
    ssh $host "yum remove -y  ganglia-gmond.x86_64"
    ssh $host "yum remove -y  libganglia.x86_64"
    ssh $host "yum remove -y  perl-rrdtool.x86_64"
    ssh $host "yum remove -y  epel-release.noarch"
    ssh $host "yum remove -y  compat-readline5*"
    ssh $host "yum remove -y  fping.x86_64"
    ssh $host "yum remove -y  perl-Crypt-DES.x86_64"
    ssh $host "yum remove -y  exim.x86_64"
    ssh $host "yum remove -y ganglia-web.noarch"
    ssh $host "yum remove -y perl-Digest-HMAC.noarch"
    ssh $host "yum remove -y perl-Digest-SHA1.x86_64"
    ssh $host "yum remove -y bigtop-jsvc.x86_64"
   
    #删除快捷方式
    ssh $host "cd $alterNativesDir"
    ssh $host "rm -rf hadoop-etc"
    ssh $host "rm -rf zookeeper-conf"
    ssh $host "rm -rf hbase-conf"
    ssh $host "rm -rf hadoop-log"
    ssh $host "rm -rf hadoop-lib"
    ssh $host "rm -rf hadoop-default"
    ssh $host "rm -rf oozie-conf"
    ssh $host "rm -rf hcatalog-conf"
    ssh $host "rm -rf hive-conf"
    ssh $host "rm -rf hadoop-man"
    ssh $host "rm -rf sqoop-conf"
    ssh $host "rm -rf hadoop-confone"

    #删除用户
    ssh $host "userdel -rf nagios"
    ssh $host "userdel -rf hive"
    ssh $host "userdel -rf ambari-qa"
    ssh $host "userdel -rf hbase"
    ssh $host "userdel -rf oozie"
    ssh $host "userdel -rf hcat"
    ssh $host "userdel -rf mapred"
    ssh $host "userdel -rf hdfs"
    ssh $host "userdel -rf rrdcached"
    ssh $host "userdel -rf zookeeper"
    ssh $host "userdel -rf sqoop"
    ssh $host "userdel -rf puppet"
    ssh $host "userdel -rf flume"
    ssh $host "userdel -rf tez"
    ssh $host "userdel -rf yarn"
ssh $host "userdel -rf storm"
ssh $host "userdel -rf knox"
ssh $host "userdel -rf kafka"
ssh $host "userdel -rf falcon"
ssh $host "userdel -rf hcat"
ssh $host "userdel -rf atlas"
ssh $host "userdel -rf mahout"
ssh $host "userdel -rf spark"


    #删除文件夹
    ssh $host "rm -rf /hadoop"
    ssh $host "rm -rf /etc/hadoop"
    ssh $host "rm -rf /etc/hbase"
    ssh $host "rm -rf /etc/hcatalog"
    ssh $host "rm -rf /etc/hive"
    ssh $host "rm -rf /etc/ganglia"
    ssh $host "rm -rf /etc/nagios"
    ssh $host "rm -rf /etc/oozie"
    ssh $host "rm -rf /etc/sqoop"
    ssh $host "rm -rf /etc/zookeeper"
ssh $host "rm -rf /etc/kafka"
ssh $host "rm -rf /etc/falcon"
ssh $host "rm -rf /etc/yarn"
ssh $host "rm -rf /etc/spark"
ssh $host "rm -rf /etc/flume"
ssh $host "rm -rf /etc/mapred"
ssh $host "rm -rf /etc/ambari-qa"
ssh $host "rm -rf /etc/tez"


    ssh $host "rm -rf /var/run/hadoop"
    ssh $host "rm -rf /var/run/hbase"
    ssh $host "rm -rf /var/run/hive"
    ssh $host "rm -rf /var/run/ganglia"
    ssh $host "rm -rf /var/run/nagios"
    ssh $host "rm -rf /var/run/oozie"
    ssh $host "rm -rf /var/run/zookeeper"
ssh $host "rm -rf /var/run/ambari-metrics-monitor"
ssh $host "rm -rf /var/run/ambari-server"
ssh $host "rm -rf /var/run/hadoop-mapreduce"
ssh $host "rm -rf /var/run/hadoop-yarn"
ssh $host "rm -rf /var/run/spark"



    ssh $host "rm -rf /var/log/hadoop"
    ssh $host "rm -rf /var/log/hbase"
    ssh $host "rm -rf /var/log/hive"
    ssh $host "rm -rf /var/log/nagios"
    ssh $host "rm -rf /var/log/oozie"
    ssh $host "rm -rf /var/log/zookeeper"
ssh $host "rm -rf /var/log/hadoop-mapreduce"
ssh $host "rm -rf /var/log/hadoop-yarn"
ssh $host "rm -rf /var/log/spark"
    ssh $host "rm -rf /var/nagios"

    ssh $host "rm -rf /usr/lib/hadoop"
    ssh $host "rm -rf /usr/lib/hbase"
    ssh $host "rm -rf /usr/lib/hcatalog"
    ssh $host "rm -rf /usr/lib/hive"
    ssh $host "rm -rf /usr/lib/oozie"
    ssh $host "rm -rf /usr/lib/sqoop"
    ssh $host "rm -rf /usr/lib/zookeeper"
    ssh $host "rm -rf /var/lib/hive"
    ssh $host "rm -rf /var/lib/ganglia"
    ssh $host "rm -rf /var/lib/oozie"
    ssh $host "rm -rf /var/lib/zookeeper"
ssh $host "rm -rf /var/lib/hadoop-hdfs"
ssh $host "rm -rf /var/lib/hadoop-mapreduce"
ssh $host "rm -rf /var/lib/hadoop-yarn"

    ssh $host "rm -rf /var/tmp/oozie"
    ssh $host "rm -rf /tmp/hive"
    ssh $host "rm -rf /tmp/nagios"
    ssh $host "rm -rf /tmp/ambari-qa"
    ssh $host "rm -rf /tmp/sqoop-ambari-qa"

    ssh $host "rm -rf /hadoop/oozie"
    ssh $host "rm -rf /hadoop/zookeeper"
    ssh $host "rm -rf /hadoop/mapred"
    ssh $host "rm -rf /hadoop/hdfs"
    ssh $host "rm -rf /tmp/hadoop-hive"
    ssh $host "rm -rf /tmp/hadoop-nagios"
    ssh $host "rm -rf /tmp/hadoop-hcat"
    ssh $host "rm -rf /tmp/hadoop-ambari-qa"
    ssh $host "rm -rf /tmp/hsperfdata_hbase"
    ssh $host "rm -rf /tmp/hsperfdata_hive"
    ssh $host "rm -rf /tmp/hsperfdata_nagios"
    ssh $host "rm -rf /tmp/hsperfdata_oozie"
    ssh $host "rm -rf /tmp/hsperfdata_zookeeper"
    ssh $host "rm -rf /tmp/hsperfdata_mapred"
    ssh $host "rm -rf /tmp/hsperfdata_hdfs"
    ssh $host "rm -rf /tmp/hsperfdata_hcat"
    ssh $host "rm -rf /tmp/hsperfdata_ambari-qa"
ssh $host "rm -rf /tmp/hsperfdata_admin"
ssh $host "rm -rf /tmp/hsperfdata_spark"
    #删除ambari相关包
    ssh $host "yum remove -y ambari-*"
    ssh $host "yum remove -y postgresql"
    ssh $host "rm -rf /var/lib/ambari*"
    ssh $host "rm -rf /var/log/ambari*"
    ssh $host "rm -rf /etc/ambari*"

    echo "$logPre======>$host is done! \n"
done
分享到:
评论

相关推荐

    Ambari2.6.*集群完全卸载清除脚本

    Ambari完全卸载脚本,参数加host列表文件,卸载前请确认脚本是否符合本地真实情况

    ambari 集群 卸载

    ambari集群安装后不成功的反安装过程,经过积累的操作记录

    ambri_hdp 完全卸载脚本

    ambari_HDP 完全卸载脚本,包括所有 Ambari 和 HDP 组件及数据库文件,日志文件。工具包由 3 个脚本组成,解压之后不要丢失其中的任何一个文件。使用方法:解压后,进入 uninstall_hdp_ambri 目录,运行: sudo ./...

    Ambari卸载脚本

    执行命令: sh cleanAmbariNew.sh hostfile 其中,hostfile文件内容(机器地址): 可以reboot重启下,防止启用组件端口会被占用 注意:脚本中删除Postgres数据库(重装会造成数据丢失)

    ambari卸载脚本

    安装ambari集群后,若果需要卸载的同学,可以参考下。未必能直接用,但是借鉴是可以的。毕竟环境不同,我这边的环境可以用,你那边的环境未必可行。

    快速移除ambari和hadoop脚本

    通过ambari安装hadoop及组件有时候会失败,卸载清除非常麻烦,通过此脚本可以快速实现用户删除,目录删除,组件卸载,非常好用。

    局域网下离线安装Ambari详细说明

    局域网下离线安装Ambari,卸载旧版本数据库,安装新的Mysql,一些组件的验证,详细叙述过程

    Ambari hdp升级说明

    Apache Ambari 是一个用于管理和监控 Hadoop 生态系统服务的开源工具,它提供了一个用户友好的 Web 界面和 REST API,使得集群的部署、配置、管理和维护变得更加简单。在 HDP(Hortonworks Data Platform)环境中,...

    apache-ambari-2.7.6-src.tar.gz

    Apache Ambari是一款开源的工具,专为简化Hadoop集群的部署、管理和监控而设计。Ambari由Apache软件基金会维护,是大数据生态系统中的重要组件,尤其在HDP(Hortonworks Data Platform)中占据核心地位。这个"apache...

    ambari 2.7.5 安装包下载

    该资源为Ambari2.7.5预编译安装包,资源已放到百度网盘,可以先下载《ambari预编译安装包网盘下载地址.txt》获取网盘地址进行下载,资源内容:ambari-2.7.5.0-centos7.tar.gz 如有其他ambari相关资源需求可私信我

    Ambari 操作指南 .docx

    Apache Ambari 操作指南 摘要: Apache Ambari 是一个基于Web的管理工具,用于监控和管理大规模的分布式系统,如Hadoop集群。Ambari从集群节点和服务收集了大量的信息,并把它们表现为容易使用的,集中化的接口:...

    [Ambari]==Ambari+Hadoop3集成impala3.2、kudu1.10.0

    ambari2.7.5集成HDP3,本身不带impala、kudu 故集成cloudera的impala、kudu安装方式 ambari插件安装方式。 解压放到/var/lib/ambari-server/resources/stacks/HDP/3.1/services/下

    Ambari2.7.4集成Hue4.6.0

    Apache Ambari 是一个用于管理和监控 Hadoop 集群的开源工具,它提供了一个直观的 Web UI 和 REST API。Ambari 2.7.4 版本是该工具的一个稳定版本,支持多种 Hadoop 组件的安装、配置和管理。而 Hue 是一个开源的 ...

    大数据Ambari之flume集成编译好的源码包

    **大数据Ambari之flume集成编译好的源码包** Apache Ambari 是一个用于管理和监控Hadoop集群的开源工具,它提供了直观的Web界面和RESTful API,使得安装、配置、管理Hadoop生态系统变得更加简单。Flume是Apache的一...

    ambari官方安装指南-中文翻译版-程序员人工翻译,非机器翻译.docx

    Ambari 安装指南中文翻译版 Ambari 是一个基于 Apache 的开源集群管理平台,能够帮助用户快速部署、管理和监控 Hadoop 集群。下面是根据给定文件信息生成的相关知识点: Apache Ambari 概述 Apache Ambari 是一个...

    ambari server 添加主机api

    通过ambari server api 添加节点 可以脱离ambari dashboard

    202x年ambari搭建大数据平台安装手册(专业完整版).pdf

    本文档主要介绍了使用 Ambari 搭建大数据平台的安装手册,对于大数据的安装和配置进行了详细的介绍。 大数据平台的基本概念: * 大数据是指无法在一定时间内用传统处理方式处理的巨量数据,包括结构化、非结构化和...

    Ambari搭建Hadoop

    Ambari 搭建 Hadoop 集群环境 Ambari 是一个商业化的平台,用于搭建 Hadoop 集群环境。该平台提供了一个端到端的解决方案,包括安装、操作、修改配置和监控集群中的所有节点。使用 Ambari 的 Web UI 和 REST API,...

Global site tag (gtag.js) - Google Analytics