--- conf/hadoop-env.sh.orig	2013-07-23 07:26:38.000000000 +0900
+++ conf/hadoop-env.sh	2013-08-18 23:31:06.000000000 +0900
@@ -6,13 +6,16 @@
 # remote nodes.
 
 # The java implementation to use.  Required.
-# export JAVA_HOME=/usr/lib/j2sdk1.5-sun
+export JAVA_HOME=@java_home@
+
+# Extra Java options.
+export JAVA_OPTS="-Dfile.encoding=UTF-8 $JAVA_OPTS"
 
 # Extra Java CLASSPATH elements.  Optional.
 # export HADOOP_CLASSPATH=
 
 # The maximum amount of heap to use, in MB. Default is 1000.
-# export HADOOP_HEAPSIZE=2000
+export HADOOP_HEAPSIZE=2000
 
 # Extra Java runtime options.  Empty by default.
 # export HADOOP_OPTS=-server
@@ -31,7 +34,7 @@
 # export HADOOP_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HADOOP_CONF_DIR"
 
 # Where log files are stored.  $HADOOP_HOME/logs by default.
-# export HADOOP_LOG_DIR=${HADOOP_HOME}/logs
+export HADOOP_LOG_DIR=@hadoop_log_dir@
 
 # File naming remote slave hosts.  $HADOOP_HOME/conf/slaves by default.
 # export HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves
@@ -48,7 +51,7 @@
 # NOTE: this should be set to a directory that can only be written to by 
 #       the users that are going to run the hadoop daemons.  Otherwise there is
 #       the potential for a symlink attack.
-# export HADOOP_PID_DIR=/var/hadoop/pids
+export HADOOP_PID_DIR=@hadoop_pid_dir@
 
 # A string representing this instance of hadoop. $USER by default.
 # export HADOOP_IDENT_STRING=$USER
