`

hadoop启动脚本精华汇总

 
阅读更多

 
 
start-all.cmd
    --->set HADOOP_BIN_PATH=${hadoop_install}\sbin
    --->set HADOOP_LIBEXEC_DIR=${hadoop_install}\libexec
    ===>${hadoop_install}\libexec\hadoop-config.cmd
	--->set HADOOP_COMMON_DIR=share\hadoop\common
	--->set HADOOP_COMMON_LIB_JARS_DIR=share\hadoop\common\lib
	--->set HADOOP_COMMON_LIB_NATIVE=lib\native
	--->set HDFS_DIR=share\hadoop\hdfs
	--->set HDFS_LIB_JARS_DIR=share\hadoop\hdfs\lib
	--->set YARN_DIR=share\hadoop\yarn
	--->set YARN_LIB_JARS_DIR=share\hadoop\yarn\lib
	--->set MAPRED_DIR=share\hadoop\mapreduce
	--->set MAPRED_LIB_JARS_DIR=share\hadoop\mapreduce\lib
	--->判断是否存在%HADOOP_HOME%\share\hadoop\common\hadoop-common-*.jar
	--->set HADOOP_CONF_DIR=%HADOOP_HOME%\etc\hadoop
	===>调用hadoop-env.cmd
			--->set JAVA_HOME=%JAVA_HOME%
			--->set HADOOP_CLASSPATH=%HADOOP_HOME%\contrib\capacity-scheduler\*.jar
			--->set HADOOP_NAMENODE_OPTS=-Dhadoop.security.logger=%HADOOP_SECURITY_LOGGER% -Dhdfs.audit.logger=%HDFS_AUDIT_LOGGER% %HADOOP_NAMENODE_OPTS%
			--->set HADOOP_DATANODE_OPTS=-Dhadoop.security.logger=ERROR,RFAS %HADOOP_DATANODE_OPTS%
			--->set HADOOP_SECONDARYNAMENODE_OPTS=-Dhadoop.security.logger=%HADOOP_SECURITY_LOGGER% -Dhdfs.audit.logger=%HDFS_AUDIT_LOGGER% %HADOOP_SECONDARYNAMENODE_OPTS%
	--->set JAVA=%JAVA_HOME%\bin\java.exe
	--->设置java堆最大为1000M
	--->set JAVA_HEAP_MAX=-Xmx%HADOOP_HEAPSIZE%m 如果有,则覆盖上面java的堆参数。
	--->set CLASSPATH=!CLASSPATH!;%HADOOP_COMMON_HOME%\%HADOOP_COMMON_DIR%\*
	--->设置日志路径、文件名、输出级别
	--->set HADOOP_HDFS_HOME=%HADOOP_HOME%
	--->set CLASSPATH=!CLASSPATH!;%HADOOP_HDFS_HOME%\%HDFS_DIR%
	--->set CLASSPATH=!CLASSPATH!;%HADOOP_HDFS_HOME%\%HDFS_LIB_JARS_DIR%\*
	--->set CLASSPATH=!CLASSPATH!;%HADOOP_HDFS_HOME%\%HDFS_DIR%\*
	--->set CLASSPATH=!CLASSPATH!;%HADOOP_YARN_HOME%\%YARN_DIR%
	--->set CLASSPATH=!CLASSPATH!;%HADOOP_YARN_HOME%\%YARN_LIB_JARS_DIR%\*
	--->set CLASSPATH=!CLASSPATH!;%HADOOP_YARN_HOME%\%YARN_DIR%\*
	--->set CLASSPATH=!CLASSPATH!;%HADOOP_MAPRED_HOME%\%MAPRED_DIR%
	--->set CLASSPATH=!CLASSPATH!;%HADOOP_MAPRED_HOME%\%MAPRED_LIB_JARS_DIR%\*
	--->set CLASSPATH=!CLASSPATH!;%HADOOP_MAPRED_HOME%\%MAPRED_DIR%\*
    ===>sbin\start-dfs.cmd --config %HADOOP_CONF_DIR%
	--->set HADOOP_BIN_PATH=${hadoop_install}\sbin
	--->set DEFAULT_LIBEXEC_DIR=%HADOOP_BIN_PATH%\..\libexec
	===>调用hdfs-config.cmd
		--->set HADOOP_BIN_PATH=%~dp0
		--->set DEFAULT_LIBEXEC_DIR=%HADOOP_BIN_PATH%\..\libexec
		===>调用%HADOOP_LIBEXEC_DIR%\hadoop-config.cmd %*
					===>……参考前方

	--->start "Apache Hadoop Distribution" hadoop namenode
		===>hadoop.cmd
		--->set HADOOP_BIN_PATH=%~dp0
		--->call :updatepath %HADOOP_BIN_PATH%
		  set path_to_add=%*
		  set current_path_comparable=%path%
		  set current_path_comparable=%current_path_comparable: =_%
		  set current_path_comparable=%current_path_comparable:(=_%
		  set current_path_comparable=%current_path_comparable:)=_%
		  set path_to_add_comparable=%path_to_add%
		  set path_to_add_comparable=%path_to_add_comparable: =_%
		  set path_to_add_comparable=%path_to_add_comparable:(=_%
		  set path_to_add_comparable=%path_to_add_comparable:)=_%

		--->set DEFAULT_LIBEXEC_DIR=%HADOOP_BIN_PATH%\..\libexec
		--->set HADOOP_LIBEXEC_DIR=%DEFAULT_LIBEXEC_DIR%
		===>调用%HADOOP_LIBEXEC_DIR%\hadoop-config.cmd %*
					===>……参考前方
		--->set hadoop-command=%1
		--->set hadoop-command-arguments=%_arguments%
		//定义hdfs命令集合
		--->set hdfscommands=namenode secondarynamenode datanode dfs dfsadmin fsck balancer fetchdt oiv dfsgroups //设置hdfs的核心命令
		===>call %HADOOP_HDFS_HOME%\bin\hdfs.cmd
			--->set HADOOP_BIN_PATH=%~dp0
			--->set HADOOP_BIN_PATH=%HADOOP_BIN_PATH:~0,-1%
			--->set HADOOP_LIBEXEC_DIR=%DEFAULT_LIBEXEC_DIR%
			===>call %HADOOP_LIBEXEC_DIR%\hdfs-config.cmd %*  参考前方
			===>call %HADOOP_CONF_DIR%\hadoop-env.cmd   参考前方
			--->set hdfs-command=%1
			--->call :make_command_arguments %*
			--->	set hdfs-command-arguments=%_hdfsarguments%
			--->如果命令无效goto print_usage
			--->set hdfscommands=dfs namenode secondarynamenode journalnode zkfc datanode dfsadmin haadmin fsck balancer jmxget oiv oev fetchdt getconf groups snapshotDiff lsSnapshottableDir cacheadmin mover storagepolicies classpath
			--->call :%hdfs-command%
				:namenode
				  set CLASS=org.apache.hadoop.hdfs.server.namenode.NameNode
				  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_NAMENODE_OPTS%
				  goto :eof

				:journalnode
				  set CLASS=org.apache.hadoop.hdfs.qjournal.server.JournalNode
				  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_JOURNALNODE_OPTS%
				  goto :eof

				:zkfc
				  set CLASS=org.apache.hadoop.hdfs.tools.DFSZKFailoverController
				  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_ZKFC_OPTS%
				  goto :eof

				:secondarynamenode
				  set CLASS=org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode
				  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_SECONDARYNAMENODE_OPTS%
				  goto :eof

				:datanode
				  set CLASS=org.apache.hadoop.hdfs.server.datanode.DataNode
				  set HADOOP_OPTS=%HADOOP_OPTS% -server %HADOOP_DATANODE_OPTS%
				  goto :eof

				:dfs
				  set CLASS=org.apache.hadoop.fs.FsShell
				  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
				  goto :eof

				:dfsadmin
				  set CLASS=org.apache.hadoop.hdfs.tools.DFSAdmin
				  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
				  goto :eof

				:haadmin
				  set CLASS=org.apache.hadoop.hdfs.tools.DFSHAAdmin
				  set CLASSPATH=%CLASSPATH%;%TOOL_PATH%
				  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
				  goto :eof

				:fsck
				  set CLASS=org.apache.hadoop.hdfs.tools.DFSck
				  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
				  goto :eof

				:balancer
				  set CLASS=org.apache.hadoop.hdfs.server.balancer.Balancer
				  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_BALANCER_OPTS%
				  goto :eof

				:jmxget
				  set CLASS=org.apache.hadoop.hdfs.tools.JMXGet
				  goto :eof

				:classpath
				  set CLASS=org.apache.hadoop.util.Classpath
				  goto :eof

				:oiv
				  set CLASS=org.apache.hadoop.hdfs.tools.offlineImageViewer.OfflineImageViewerPB
				  goto :eof

				:oev
				  set CLASS=org.apache.hadoop.hdfs.tools.offlineEditsViewer.OfflineEditsViewer
				  goto :eof

				:fetchdt
				  set CLASS=org.apache.hadoop.hdfs.tools.DelegationTokenFetcher
				  goto :eof

				:getconf
				  set CLASS=org.apache.hadoop.hdfs.tools.GetConf
				  goto :eof

				:groups
				  set CLASS=org.apache.hadoop.hdfs.tools.GetGroups
				  goto :eof

				:snapshotDiff
				  set CLASS=org.apache.hadoop.hdfs.tools.snapshot.SnapshotDiff
				  goto :eof

				:lsSnapshottableDir
				  set CLASS=org.apache.hadoop.hdfs.tools.snapshot.LsSnapshottableDir
				  goto :eof

				:cacheadmin
				  set CLASS=org.apache.hadoop.hdfs.tools.CacheAdmin
				  goto :eof

				:mover
				  set CLASS=org.apache.hadoop.hdfs.server.mover.Mover
				  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_MOVER_OPTS%
				  goto :eof

				:storagepolicies
				  set CLASS=org.apache.hadoop.hdfs.tools.StoragePolicyAdmin
				  goto :eof
			--->set java_arguments=%JAVA_HEAP_MAX% %HADOOP_OPTS% -classpath %CLASSPATH% %CLASS% %hdfs-command-arguments%
			// %JAVA% 就是调用java.exe
			--->call %JAVA% %java_arguments%
		--->goto:eof
		//定义mapred命令集合	
		--->set mapredcommands=pipes job queue mrgroups mradmin jobtracker tasktracker	//设置mapredcommand的核心命令
		===>call %HADOOP_MAPRED_HOME%\bin\mapred.cmd %*
			--->set HADOOP_BIN_PATH=%~dp0
			--->set HADOOP_BIN_PATH=%HADOOP_BIN_PATH:~0,-1%
			--->set HADOOP_LIBEXEC_DIR=%DEFAULT_LIBEXEC_DIR%
			===>call %DEFAULT_LIBEXEC_DIR%\mapred-config.cmd %*
				--->set HADOOP_BIN_PATH=%~dp0
				--->set HADOOP_BIN_PATH=%HADOOP_BIN_PATH:~0,-1%
				--->set DEFAULT_LIBEXEC_DIR=%HADOOP_BIN_PATH%\..\libexec
				--->call %HADOOP_LIBEXEC_DIR%\hadoop-config.cmd %* 参考以前

			===>call %MAPRED_CONF_DIR%\mapred-env.cmd
				--->set HADOOP_JOB_HISTORYSERVER_HEAPSIZE=1000
				--->set HADOOP_MAPRED_ROOT_LOGGER=%HADOOP_LOGLEVEL%,RFA

			--->call :make_command_arguments %*
			--->set JAVA_HEAP_SIZE=-Xmx%MAPRED_HEAPSIZE%m
			--->set CLASSPATH=%HADOOP_CONF_DIR%;%MAPRED_CONF_DIR%;%CLASSPATH%
			--->set CLASSPATH=%CLASSPATH%;%HADOOP_MAPRED_HOME%\build\classes
			--->set CLASSPATH=%CLASSPATH%;%HADOOP_MAPRED_HOME%\build
			--->set CLASSPATH=%CLASSPATH%;%HADOOP_MAPRED_HOME%\build\test\classes
			--->set CLASSPATH=%CLASSPATH%;%HADOOP_MAPRED_HOME%\build\tools
			--->set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\%YARN_DIR%\*
			--->set CLASSPATH=%CLASSPATH%;%HADOOP_MAPRED_HOME%\%MAPRED_LIB_JARS_DIR%\*
			--->set CLASSPATH=%CLASSPATH%;%HADOOP_MAPRED_HOME%\modules\*
			--->call :%mapred-command% %mapred-command-arguments%
				
				:classpath
				  set CLASS=org.apache.hadoop.util.Classpath 
				  goto :eof

				:job
				  set CLASS=org.apache.hadoop.mapred.JobClient
				  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
				  goto :eof

				:queue
				  set CLASS=org.apache.hadoop.mapred.JobQueueClient
				  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
				  goto :eof

				:sampler
				  set CLASS=org.apache.hadoop.mapred.lib.InputSampler
				  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
				  goto :eof

				:historyserver
				  set CLASS=org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer
				  set HADOOP_OPTS=%HADOOP_OPTS% -Dmapred.jobsummary.logger=%HADOOP_JHS_LOGGER% %HADOOP_JOB_HISTORYSERVER_OPTS%
				  if defined HADOOP_JOB_HISTORYSERVER_HEAPSIZE (
				    set JAVA_HEAP_MAX=-Xmx%HADOOP_JOB_HISTORYSERVER_HEAPSIZE%m
				  )
				  goto :eof

				:distcp
				  set CLASS=org.apache.hadoop.tools.DistCp
				  set CLASSPATH=%CLASSPATH%;%TOO_PATH%
				  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
				  goto :eof

				:archive
				  set CLASS=org.apache.hadop.tools.HadoopArchives
				  set CLASSPATH=%CLASSPATH%;%TOO_PATH%
				  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%

				:hsadmin
				  set CLASS=org.apache.hadoop.mapreduce.v2.hs.client.HSAdmin
				  set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%

				:pipes
				  goto not_supported

				:mradmin
				  goto not_supported

				:jobtracker
				  goto not_supported

				:tasktracker
				  goto not_supported

				:groups
				  goto not_supported
			--->set java_arguments=%JAVA_HEAP_MAX% %HADOOP_OPTS% -classpath %CLASSPATH% %CLASS% %mapred-command-arguments%
			--->call %JAVA% %java_arguments%
		--->goto:eof
		 //classpath既不是hdfs命令也不是mapred命令
		 classpath
		---> set corecommands=fs version jar checknative distcp daemonlog archive classpath credential key  //定义核心命令集合 fs文件系统命令 version查看版本 jar运行jar包中的类 checknative检查本地类库 discp批量拷贝,archive归档
		--->call :%hadoop-command%
			set CLASS=%hadoop-command%
			:fs 
			set CLASS=org.apache.hadoop.fs.FsShell
			:jar
			 set CLASS=org.apache.hadoop.util.RunJar
			:checknative
			set CLASS=org.apache.hadoop.util.NativeLibraryChecker
			:distcp
			set CLASS=org.apache.hadoop.tools.DistCp
			set CLASSPATH=%CLASSPATH%;%TOOL_PATH%
			:daemonlog
			set CLASS=org.apache.hadoop.log.LogLevel
			:archive
			set CLASS=org.apache.hadoop.tools.HadoopArchives
			set CLASSPATH=%CLASSPATH%;%TOOL_PATH%
			:classpath
			set CLASS=org.apache.hadoop.util.Classpath
			:credential
			set CLASS=org.apache.hadoop.security.alias.CredentialShell
			:key
			set CLASS=org.apache.hadoop.crypto.key.KeyShell
		--->set path=%PATH%;%HADOOP_BIN_PATH%
		--->set HADOOP_OPTS=%HADOOP_OPTS% %HADOOP_CLIENT_OPTS%
		--->set HADOOP_OPTS=%HADOOP_OPTS% -Dhadoop.security.logger=%HADOOP_SECURITY_LOGGER%
		//调用java程序,java.exe
		--->call %JAVA% %JAVA_HEAP_MAX% %HADOOP_OPTS% -classpath %CLASSPATH% %CLASS% %hadoop-command-arguments%
	===>start "Apache Hadoop Distribution" hadoop.cmd datanode 参考以前	
	//yarn框架的启动脚本
    ===>sbin\start-yarn.cmd --config %HADOOP_CONF_DIR%
	--->set HADOOP_BIN_PATH=%~dp0
	--->set HADOOP_BIN_PATH=%HADOOP_BIN_PATH:~0,-1%
	--->set HADOOP_LIBEXEC_DIR=%DEFAULT_LIBEXEC_DIR%
	===>call %HADOOP_LIBEXEC_DIR%\yarn-config.cmd %*
		--->set HADOOP_BIN_PATH=%~dp0
		--->set HADOOP_BIN_PATH=%HADOOP_BIN_PATH:~0,-1%
		--->set HADOOP_LIBEXEC_DIR=%DEFAULT_LIBEXEC_DIR%
		===>call %HADOOP_LIBEXEC_DIR%\hadoop-config.cmd %* 参考以前
		--->set YARN_CONF_DIR=%2
		--->set YARN_CONF_DIR=%HADOOP_CONF_DIR%
		--->set YARN_SLAVES=%YARN_CONF_DIR%\%2
	--->set YARN_CONF_DIR=%HADOOP_YARN_HOME%\conf
	--->
	--->
	--->
	--->
	--->
	===>start "Apache Hadoop Distribution" yarn.cmd resourcemanager
		--->set HADOOP_BIN_PATH=%~dp0
		--->set HADOOP_BIN_PATH=%HADOOP_BIN_PATH:~0,-1%
		--->set HADOOP_LIBEXEC_DIR=%DEFAULT_LIBEXEC_DIR%
		===>call %HADOOP_LIBEXEC_DIR%\hadoop-config.cmd %* 参考以前
		===>call %YARN_CONF_DIR%\yarn-env.cmd
			--->set HADOOP_YARN_USER=%yarn%
			--->set JAVA_HEAP_MAX=-Xmx%YARN_HEAPSIZE%m
			--->set YARN_LOG_DIR=%HADOOP_YARN_HOME%\logs
			--->set YARN_LOGFILE=yarn.log
			--->set YARN_POLICYFILE=hadoop-policy.xml
			--->set YARN_ROOT_LOGGER=%HADOOP_LOGLEVEL%,console
			--->set YARN_OPTS=%YARN_OPTS% -Dhadoop.log.dir=%YARN_LOG_DIR%
			--->set YARN_OPTS=%YARN_OPTS% -Dyarn.log.dir=%YARN_LOG_DIR%
			--->set YARN_OPTS=%YARN_OPTS% -Dhadoop.log.file=%YARN_LOGFILE%
			--->set YARN_OPTS=%YARN_OPTS% -Dyarn.log.file=%YARN_LOGFILE%
			--->set YARN_OPTS=%YARN_OPTS% -Dyarn.home.dir=%HADOOP_YARN_HOME%
			--->set YARN_OPTS=%YARN_OPTS% -Dyarn.id.str=%YARN_IDENT_STRING%
			--->set YARN_OPTS=%YARN_OPTS% -Dhadoop.home.dir=%HADOOP_YARN_HOME%
			--->set YARN_OPTS=%YARN_OPTS% -Dhadoop.root.logger=%YARN_ROOT_LOGGER%
			--->set YARN_OPTS=%YARN_OPTS% -Dyarn.root.logger=%YARN_ROOT_LOGGER%
			--->set YARN_OPTS=%YARN_OPTS% -Djava.library.path=%JAVA_LIBRARY_PATH%
			--->set YARN_OPTS=%YARN_OPTS% -Dyarn.policy.file=%YARN_POLICYFILE%
		--->set yarn-command=%1
		--->call :make_command_arguments %* 将第一个参数设置为yarn命令
			set yarn-command-arguments=%_yarnarguments%
		--->set JAVA_HEAP_MAX=-Xmx%YARN_HEAPSIZE%m
		--->set CLASSPATH=%HADOOP_CONF_DIR%;%YARN_CONF_DIR%;%CLASSPATH%  
		--->set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\%YARN_DIR%\*       在share\hadoop\yarn
		--->set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\%YARN_LIB_JARS_DIR%\*       在share\hadoop\yarn\lib
		--->set yarncommands=resourcemanager nodemanager proxyserver rmadmin version jar ^
			application applicationattempt cluster container node queue logs daemonlog historyserver ^
			timelineserver classpath
				:classpath
				  set CLASS=org.apache.hadoop.util.Classpath 
				  goto :eof

				:rmadmin
				  set CLASS=org.apache.hadoop.yarn.client.cli.RMAdminCLI
				  set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
				  goto :eof

				:application
				  set CLASS=org.apache.hadoop.yarn.client.cli.ApplicationCLI
				  set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
				  set yarn-command-arguments=%yarn-command% %yarn-command-arguments%
				  goto :eof

				:applicationattempt
				  set CLASS=org.apache.hadoop.yarn.client.cli.ApplicationCLI
				  set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
				  set yarn-command-arguments=%yarn-command% %yarn-command-arguments%
				  goto :eof

				:cluster
				  set CLASS=org.apache.hadoop.yarn.client.cli.ClusterCLI
				  set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
				  goto :eof

				:container
				  set CLASS=org.apache.hadoop.yarn.client.cli.ApplicationCLI
				  set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
				  set yarn-command-arguments=%yarn-command% %yarn-command-arguments%
				  goto :eof  

				:node
				  set CLASS=org.apache.hadoop.yarn.client.cli.NodeCLI
				  set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
				  goto :eof

				:queue
				  set CLASS=org.apache.hadoop.yarn.client.cli.QueueCLI
				  set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
				  goto :eof

				:resourcemanager
				  set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%\rm-config\log4j.properties
				  set CLASS=org.apache.hadoop.yarn.server.resourcemanager.ResourceManager
				  set YARN_OPTS=%YARN_OPTS% %YARN_RESOURCEMANAGER_OPTS%
				  if defined YARN_RESOURCEMANAGER_HEAPSIZE (
				    set JAVA_HEAP_MAX=-Xmx%YARN_RESOURCEMANAGER_HEAPSIZE%m
				  )
				  goto :eof

				:historyserver
				  @echo DEPRECATED: Use of this command to start the timeline server is deprecated. 1>&2
				  @echo Instead use the timelineserver command for it. 1>&2
				  set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%\ahs-config\log4j.properties
				  set CLASS=org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryServer
				  set YARN_OPTS=%YARN_OPTS% %HADOOP_HISTORYSERVER_OPTS%
				  if defined YARN_HISTORYSERVER_HEAPSIZE (
				    set JAVA_HEAP_MAX=-Xmx%YARN_HISTORYSERVER_HEAPSIZE%m
				  )
				  goto :eof

				:timelineserver
				  set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%\timelineserver-config\log4j.properties
				  set CLASS=org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryServer
				  set YARN_OPTS=%YARN_OPTS% %HADOOP_TIMELINESERVER_OPTS%
				  if defined YARN_TIMELINESERVER_HEAPSIZE (
				    set JAVA_HEAP_MAX=-Xmx%YARN_TIMELINESERVER_HEAPSIZE%m
				  )
				  goto :eof

				:nodemanager
				  set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%\nm-config\log4j.properties
				  set CLASS=org.apache.hadoop.yarn.server.nodemanager.NodeManager
				  set YARN_OPTS=%YARN_OPTS% -server %HADOOP_NODEMANAGER_OPTS%
				  if defined YARN_NODEMANAGER_HEAPSIZE (
				    set JAVA_HEAP_MAX=-Xmx%YARN_NODEMANAGER_HEAPSIZE%m
				  )
				  goto :eof

				:proxyserver
				  set CLASS=org.apache.hadoop.yarn.server.webproxy.WebAppProxyServer
				  set YARN_OPTS=%YARN_OPTS% %HADOOP_PROXYSERVER_OPTS%
				  if defined YARN_PROXYSERVER_HEAPSIZE (
				    set JAVA_HEAP_MAX=-Xmx%YARN_PROXYSERVER_HEAPSIZE%m
				  )
				  goto :eof

				:version
				  set CLASS=org.apache.hadoop.util.VersionInfo
				  set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
				  goto :eof

				:jar
				  set CLASS=org.apache.hadoop.util.RunJar
				  set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
				  goto :eof

				:logs
				  set CLASS=org.apache.hadoop.yarn.client.cli.LogsCLI
				  set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
				  goto :eof

				:daemonlog
				  set CLASS=org.apache.hadoop.log.LogLevel
				  set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
				  goto :eof
		--->set java_arguments=%JAVA_HEAP_MAX% %YARN_OPTS% -classpath %CLASSPATH% %CLASS% %yarn-command-arguments%
		--->call %JAVA% %java_arguments%  调用java.exe
 
	===>start "Apache Hadoop Distribution" yarn.cmd nodemanager 参考以前
	===>@rem start "Apache Hadoop Distribution" yarn.cmd proxyserver 参考以前
  • 大小: 314.5 KB
分享到:
评论

相关推荐

    hadoop2.7.3启动脚本流程

    在学习hadoop启动脚本过程中记录的,有一定的参考价值,值得一看!

    HadoopHA集群 批量启动脚本

    HadoopHA集群 批量启动脚本HadoopHA集群 批量启动脚本HadoopHA集群 批量启动脚本HadoopHA集群 批量启动脚本

    hadoop 高可用集群一键启动脚本

    hadoop 高可用启动脚本,运行启动zookeeper集群和hadoop ha集群

    hadoop启动集群脚本

    启动集群脚本,私聊免费发。上传只为防丢失。以备后用

    用python编写nagios hadoop监控脚本

    标题 "用python编写nagios hadoop监控脚本" 暗示了本文将探讨如何使用Python编程语言来创建Nagios监控系统针对Hadoop集群的监控脚本。Nagios是一款广泛使用的开源网络监控系统,它能检测各种IT基础设施的状态,包括...

    hadoop集群安装脚本

    7. **启动服务**:通过脚本启动Hadoop和Zookeeper服务,检查各个节点的运行状态,确保所有服务正常运行。 8. **测试集群**:运行简单的Hadoop命令(如`hadoop fs -ls /`)和MapReduce程序,验证集群安装是否成功。 ...

    jps判断hadoop启动是否成功

    jps判断hadoop启动是否成功;分别对master和slave进行了判断。jps不是hadoop的什么命令,是java的命令,所以直接执行就行了。

    docker中部署hadoop集群脚本

    docker中启动大数据脚本

    hadoop停止集群的脚本

    停止集群麻烦,所以写的脚本。私我可以免费发给你。上传只为自己以后用,防止丢失。

    hadoop全分布式-脚本一键安装

    7. 启动Hadoop服务:通过start-dfs.sh和start-yarn.sh命令启动Hadoop的各个组件。 8. 设置SSH免密登录:为了集群间通信,脚本可能会包含一个步骤来配置所有节点间的SSH无密码登录。 9. 验证安装:最后,脚本可能会...

    脚本搭建hadoop集群

    脚本搭建hadoop集群 可以自定义主机名和IP地址 可以自定义安装jdk和hadoop(格式为*tar.gz) 注意事项 1、安装完jdk和hadoop请手动source /etc/profile 刷新环境变量 2测试脚本环境为centOS6,其他操作系统会有些...

    hadoop运行脚本run.sh

    hadoop运行脚本run.sh

    hadoop启动日志

    "hadoop启动日志"这个主题涉及到的是Hadoop集群启动过程中的日志记录,这对于系统管理员进行故障排查、性能优化以及理解Hadoop运行状态至关重要。日志文件通常包含了丰富的信息,如服务启动时间、初始化步骤、依赖...

    Hadoop入门脚本WordCount

    《Hadoop入门脚本WordCount详解》 在大数据处理领域,Hadoop是一个不可或缺的重要工具,它的分布式计算模型为海量数据的处理提供了强大的支持。而WordCount则是Hadoop入门的经典示例,通过它,我们可以深入理解...

    hadoop 部分 shell 脚本

    在Hadoop生态系统中,Shell脚本扮演着至关重要的角色,特别是在大数据处理和集群管理中。这些脚本通常用于自动化任务,如数据迁移、作业调度、集群监控等。下面我们将深入探讨Hadoop Shell脚本的相关知识点。 一、...

    hadoop集群启动脚本

    统一启动集群中各个节点

    Hadoop部署脚本

    Apache Hadoop软件库是一个框架,它允许使用简单的编程模型跨计算机群集分布式处理大型数据集。它旨在从单个服务器扩展到数千台机器,每台机器提供本地计算和存储。该库本身不是依靠硬件来提供高可用性,而是设计...

    Hadoop2的HA配置一键运行脚本startall

    2. "hadoop2的HA配置一键运行脚本startall.sh":这是实际的配置脚本,用于自动化执行Hadoop HA的安装和启动过程。脚本中可能包含了初始化Hadoop集群、配置HA参数、启动和检查服务状态等命令。 3. "教程重要说明.txt...

    hadoop环境部署自动化shell脚本(伪分布式、完全分布式集群搭建).docx

    【Hadoop环境部署自动化Shell脚本】是一种高效的方法,用于快速搭建Hadoop集群,无论是用于学习还是开发。本文档提供了一个详细的脚本,涵盖了从Java环境配置到Hadoop集群的完全分布式安装的所有步骤,旨在降低...

    shell脚本配置Hadoop伪分布式.zip

    它可能涵盖了如何运行上述脚本、编辑配置文件和启动/停止Hadoop服务等内容。 在配置Hadoop伪分布式环境时,你需要: 1. 首先,运行`install_jdk.sh`脚本安装JDK。 2. 然后,解压`hadoop-2.8.1.tar.gz`到你选择的...

Global site tag (gtag.js) - Google Analytics