I am trying to set hadoop in single-cluster node. And I need to create tables in hive and hbase inorder to handle the tables using c#.
I have cygwin,hadoop-1.2.1 and hive-1.1.0 on windows 7 32bit.
Running hadoop, it gives "Warning: $HADOOP_HOME is deprecated." still it works!!
But when starting hive (bin/hive) on cygwin gives message "Missing Hive CLI Jar" and exit.
My configuration settings:
hadoop-env.sh
export JAVA_HOME="C:\\Java"
# Extra Java CLASSPATH elements. Optional.
# export HADOOP_CLASSPATH=
export HADOOP_CLASSPATH=$HIVE_HOME/conf:$HIVE_HOME/lib
# The maximum amount of heap to use, in MB. Default is 1000.
# export HADOOP_HEAPSIZE=2000
# Extra Java runtime options. Empty by default.
# export HADOOP_OPTS=-server
# Command specific options appended to HADOOP_OPTS when specified
export HADOOP_NAMENODE_OPTS="-Dcom.sun.management.jmxremote $HADOOP_NAMENODE_OPTS"
export HADOOP_SECONDARYNAMENODE_OPTS="-Dcom.sun.management.jmxremote $HADOOP_SECONDARYNAMENODE_OPTS"
export HADOOP_DATANODE_OPTS="-Dcom.sun.management.jmxremote $HADOOP_DATANODE_OPTS"
export HADOOP_BALANCER_OPTS="-Dcom.sun.management.jmxremote $HADOOP_BALANCER_OPTS"
export HADOOP_JOBTRACKER_OPTS="-Dcom.sun.management.jmxremote $HADOOP_JOBTRACKER_OPTS"
/etc/bash.bashrc
#environment variables
export JAVA_HOME='c:\Java'
export CYGWIN_HOME='c:\cygwin'
export HADOOP_HOME='c:\cygwin\home\Administrator\hadoop-1.2.1'
export HADOOP_MAPRED_HOME=$HADOOP_HOME
export HADOOP_COMMON_HOME=$HADOOP_HOME
export HADOOP_HDFS_HOME=$HADOOP_HOME
export YARN_HOME=$HADOOP_HOME
export HADOOP_COMMON_LIB=$HADOOP_HOME/lib
export HIVE_HOME='c:\cygwin\home\Administrator\hadoop-1.2.1\apache-hive-1.1.0-bin'
export DERBY_HOME='c:\cygwin\home\Administrator\hadoop-1.2.1\db-derby-10.4.2.0-bin'
export SQOOP_HOME='c:\cygwin\home\Administrator\hadoop-1.2.1\sqoop-1.4.5.bin__hadoop-1.0.0'
export CLASSPATH=$CLASSPATH:C:\cygwin\home\Administrator\hadoop-1.2.1\lib\*:.
export CLASSPATH=$CLASSPATH:C:\cygwin\home\Administrator\hadoop-1.2.1\apache-hive-1.1.0-bin\lib\*:.
export PATH=$JAVA_HOME\\bin:$CYGWIN_HOME\\bin:$HADOOP_HOME\\bin:$HIVE_HOME\\bin:$SQOOP_HOME\\bin:$DERBY_HOME\\bin
hive-env.sh
# Set HADOOP_HOME to point to a specific hadoop install directory
# HADOOP_HOME=${bin}/../../hadoop
export HADOOP_HOME='C:\cygwin\home\Administrator\hadoop-1.2.1'
# Hive Configuration Directory can be controlled by:
export HIVE_CONF_DIR='c:\cygwin\home\Administrator\hadoop-1.2.1\apache-hive-1.1.0-bin\conf'
# Folder containing extra ibraries required for hive compilation/execution can be controlled by:
# export HIVE_AUX_JARS_PATH=
mapred-site.xml
<configuration>
<property>
<name>mapred.job.tracker</name>
<value>localhost:50001</value>
</property>
</configuration>
core-site.xml
<configuration>
<property>
<name>fs.default.name</name>
<value>hdfs://localhost:50000</value>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>/cygwin/home/Administrator/hadoop/</value>
<description>A base for other temporary directories.</description>
</property>
</configuration>
hdfs-site.xml
<configuration>
<property>
<name>dfs.data.dir</name>
<value>/home/Administrator/hadoop/datanode</value>
</property>
<property>
<name>dfs.name.dir</name>
<value>/home/Administrator/hadoop/namenode</value>
</property>
<property>
<name>dfs.permissions</name>
<value>false</value>
<description>
If "true", enable permission checking in HDFS.
If "false", permission checking is turned off, but all other behaviour is unchanged. Switching from one parameter value to the other does not change the mode, owner, or group of files or directories.
</description>
</property>
<property>
<name>dfs.replication</name>
<value>1</value>
<description>Default block replication.
The actual number of replications can be specified when the file is created. The default is used if replication is not specified in create time.
</description>
</property>
</configuration>
Is there any changes to be made to the settings and what else to be done? Please help me a way out.
Thanks in advance