|
@@ -65,6 +65,16 @@ param(
|
|
|
[Switch]
|
|
|
$skipNamenodeFormat = $false
|
|
|
)
|
|
|
+function CreateURIMultiplePath($value,$adjunct)
|
|
|
+{
|
|
|
+ $dirs = (Get-AppendedPath $value $adjunct).split(',')
|
|
|
+ foreach ($dir in $dirs)
|
|
|
+ {
|
|
|
+ $result_dir += ConvertToFileURI($dir)
|
|
|
+ $result_dir += ","
|
|
|
+ }
|
|
|
+ return $result_dir.Substring(0,$result_dir.Length-1)
|
|
|
+}
|
|
|
|
|
|
function Main( $scriptDir )
|
|
|
{
|
|
@@ -290,10 +300,10 @@ function Main( $scriptDir )
|
|
|
$NMAndMRLogDir = Join-Path (${ENV:HDP_DATA_DIR}.Split(",") | Select -first 1).Trim() "$shortUsername/logs"
|
|
|
|
|
|
$hdfsConfigs = @{
|
|
|
- "dfs.namenode.checkpoint.dir" = ConvertToFileURI(Get-AppendedPath $ENV:HDFS_DATA_DIR "snn");
|
|
|
- "dfs.namenode.checkpoint.edits.dir" = ConvertToFileURI(Get-AppendedPath $ENV:HDFS_DATA_DIR "snn");
|
|
|
- "dfs.namenode.name.dir" = ConvertToFileURI(Get-AppendedPath $ENV:HDFS_DATA_DIR "nn");
|
|
|
- "dfs.datanode.data.dir" = ConvertToFileURI(Get-AppendedPath $ENV:HDFS_DATA_DIR "dn");
|
|
|
+ "dfs.namenode.checkpoint.dir" = CreateURIMultiplePath $ENV:HDFS_DATA_DIR "snn";
|
|
|
+ "dfs.namenode.checkpoint.edits.dir" = CreateURIMultiplePath $ENV:HDFS_DATA_DIR "snn";
|
|
|
+ "dfs.namenode.name.dir" = CreateURIMultiplePath $ENV:HDFS_DATA_DIR "nn";
|
|
|
+ "dfs.datanode.data.dir" = CreateURIMultiplePath $ENV:HDFS_DATA_DIR "dn";
|
|
|
"dfs.replication" = "$replicationfactor";
|
|
|
"dfs.hosts" = "${hadoopInstallDir}\etc\hadoop\dfs.include";
|
|
|
"dfs.hosts.exclude" = "${hadoopInstallDir}\etc\hadoop\dfs.exclude";
|