### ~/hadoop-3.0.0/etc/hadoop/core-site.xml
fs.default.name
hdfs://node-master:9000
hadoop.http.staticuser.user
engdados
### ~/hadoop-3.0.0/etc/hadoop/hdfs-site.xml
dfs.namenode.name.dir
/home/engdados/data/nameNode
dfs.datanode.data.dir
/home/engdados/data/dataNode
dfs.replication
2
### ~/hadoop-3.0.0/etc/hadoop/yarn-site.xml
yarn.resourcemanager.hostname
node-master
yarn.nodemanager.aux-services
mapreduce_shuffle
yarn.nodemanager.resource.memory-mb
1536
yarn.scheduler.maximum-allocation-mb
1536
yarn.scheduler.minimum-allocation-mb
128
yarn.nodemanager.vmem-check-enabled
false
yarn.nodemanager.aux-services.mapreduce.shuffle.class
org.apache.hadoop.mapred.ShuffleHandler
yarn.scheduler.capacity.maximum-am-resource-percent
0.95
yarn.application.classpath
$HADOOP_CONF_DIR,$HADOOP_COMMON_HOME/share/hadoop/common/*,$HADOOP_COMMON_HOME/share/hadoop/common/lib/*,$HADOOP_HDFS_HOME/share/hadoop/hdfs/*,$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib/*,$HADOOP_CONF_DIR/*,$HADOOP_YARN_HOME/share/hadoop/yarn/*,$HADOOP_YARN_HOME/share/hadoop/yarn/lib/*
### ~/hadoop-3.0.0/etc/hadoop/mapred-site.xml
mapreduce.framework.name
yarn
yarn.app.mapreduce.am.resource.mb
512
mapreduce.map.memory.mb
256
mapreduce.reduce.memory.mb
256
mapreduce.application.classpath
$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*, $HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*
yarn.app.mapreduce.am.env
HADOOP_MAPRED_HOME=${HADOOP_HOME}
mapreduce.map.env
HADOOP_MAPRED_HOME=${HADOOP_HOME}
mapreduce.reduce.env
HADOOP_MAPRED_HOME=${HADOOP_HOME}
### ~/hadoop-3.0.0/etc/hadoop/workers
node-master
node1
node2
### ~/hadoop-3.0.0/etc/hadoop/hadoop-env.sh
export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-i386/jre
export HADOOP_OPTS="$HADOOP_OPTS -XX:-PrintWarnings -Djava.net.preferIPv4Stack=true"
### ~/.bashrc
HADOOP_HOME=/home/engdados/hadoop-3.0.0
HADOOP_COMMON_HOME=$HADOOP_HOME
HADOOP_HDFS_HOME=$HADOOP_HOME
HADOOP_MAPRED_HOME=$HADOOP_HOME
HADOOP_YARN_HOME=$HADOOP_HOME
HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
HADOOP_OPTS="-Djava.library.path=$HADOOP_COMMON_LIB_NATIVE_DIR"
JAVA_HOME=/usr/lib/jvm/java-8-openjdk-i386/jre
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$HADOOP_COMMON_LIB_NATIVE_DIR"
SPARK_HOME=$HADOOP_HOME/spark
PATH="$PATH:$HADOOP_HOME/sbin:$HADOOP_HOME/bin:$HADOOP_HOME/spark/sbin:$HADOOP_HOME/spark/bin"
### ~/hadoop-3.0.0/spark/conf/spark-defaults.conf
spark.master yarn
spark.driver.memory 512m
spark.executor.memory 512m
spark.eventLog.enabled true
spark.eventLog.dir hdfs://node-master:9000/spark-logs
spark.history.provider org.apache.spark.deploy.history.FsHistoryProvider
spark.history.fs.logDirectory hdfs://node-master:9000/spark-logs
spark.history.fs.update.interval 10s
spark.history.ui.port 18080
spark.yarn.stagingDir hdfs://node-master:9000/user/engdados
### ~/hadoop-3.0.0/spark/conf/spark-env.sh
export HADOOP_CONF_DIR=${HADOOP_HOME}/etc/hadoop