My favorites | Sign in
Project Home Downloads Wiki Issues Source
Repository:
Checkout   Browse   Changes   Clones  
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
#!/bin/bash
sourced=0
if [ "$0" = "-bash" ]; then
sourced=1
fi

if [ -z $HADOOP_VERSION ]; then
export HADOOP_VERSION=0.20.1
fi

if [ -z $HADOOP_HOME ]; then
export HADOOP_HOME=/Users/jduprey/Dropbox/Java/Hadoop/hadoop-${HADOOP_VERSION}
fi

if [ -z $HD_LOCAL_CONF ]; then
export HD_LOCAL_CONF=~/.hdconf
fi

if [ -z $WEB_LAUNCH_CMD ]; then
export WEB_LAUNCH_CMD=open
fi

export PATH=$HADOOP_HOME/bin:$PATH

returnOrExit()
{
if [ "$sourced" = "0" ]; then
exit $1
else
return $1
fi
}

clustername=$1
if [ "${clustername}" = "" ]; then
if [ -f $HD_LOCAL_CONF/lastcluster.env ]; then
. $HD_LOCAL_CONF/lastcluster.env
else
clustername="-l" # List cluster names
fi
if [ "$hdname" != "" ]; then
clustername=$hdname
fi
fi

if [ ! -d $HD_LOCAL_CONF ]; then
echo "ERROR: $HD_LOCAL_CONF directory does not exist. Create it."
#mkdir $HD_LOCAL_CONF
returnOrExit 1
return 1
fi
if [ ! -f $HD_LOCAL_CONF/clusters.csv ]; then
echo "ERROR: Clusters configuration does not exist. Create it."
#echo "#cluster name, tracker host:port, namenode host:port, hadoop user '-' for not set, hadoop group '-' for not set" > $HD_LOCAL_CONF/clusters.csv
fi

while IFS=, read hdname hdtracker hdnamenode hduser hdgroup hdwebtracker hdwebhdfs
do
if [ "${hdname:0:1}" = "#" ]; then continue; fi
if [ "${clustername}" = "${hdname}" ]; then
export hdname hdtracker hdnamenode hduser hdgroup hdwebtracker hdwebhdfs;
break;
fi
if [ "$clustername" = "-l" ]; then
echo "$hdname: "
echo " tracker: $hdtracker "
echo " web: $hdwebtracker "
echo " namenode: $hdnamenode "
echo " web: $hdwebhdfs "
echo " user: $hduser "
echo " group: $hdgroup"
fi
done < $HD_LOCAL_CONF/clusters.csv

if [ "$clustername" = "-l" ]; then
echo "Pick a valid cluster name from the list."
returnOrExit 1
return 1
fi

if [ "$hdname" = "" ]; then
echo "Pick a valid cluster name."
returnOrExit 1
return 1
fi

if [ "$hdname" != "" ]; then
echo > $HD_LOCAL_CONF/lastcluster.env
echo export hdname=${hdname} >> $HD_LOCAL_CONF/lastcluster.env
echo export hdtracker=${hdtracker} >> $HD_LOCAL_CONF/lastcluster.env
echo export hdwebtracker=${hdwebtracker} >> $HD_LOCAL_CONF/lastcluster.env
echo export hdwebhdfs=${hdwebhdfs} >> $HD_LOCAL_CONF/lastcluster.env
echo export hdnamenode=${hdnamenode} >> $HD_LOCAL_CONF/lastcluster.env
echo export hduser=${hduser} >> $HD_LOCAL_CONF/lastcluster.env
echo export hdgroup=${hdgroup} >> $HD_LOCAL_CONF/lastcluster.env
fi

echo "Hadoop client $HADOOP_VERSION, using cluster: $hdname"
export hdusergroup=""
if [ ! -z $hduser ]; then
if [ "$hduser" != "-" ]; then
export hdusergroup="-Dhadoop.job.ugi=$hduser,$hdgroup"
fi
fi

#if [ "$hdtracker" = "local" ]; then
# export hdopts="$hdusergroup -Dfs.default.name=file:/// -Dmapred.job.tracker=$hdtracker"
#else
# export hdopts="$hdusergroup -Dfs.default.name=hdfs://$hdnamenode/ -Dmapred.job.tracker=$hdtracker"
#fi
export hdopts="$hdusergroup -Dfs.default.name=$hdnamenode -Dmapred.job.tracker=$hdtracker"

alias hdalias="alias | grep hd"
alias hdenv="hdenv.sh"
alias hdjar="hdjar.sh"
alias hdfs="hdfs.sh"
alias hdjob="hdjob.sh"
alias hdcluster="echo $hdname"
alias hdclusters="hdenv -l"
alias hdwhich="echo \$hdname:=\> tracker: \$hdtracker, namenode: \$hdnamenode, user: \$hduser, group: \$hdgroup"
alias hdweb="$WEB_LAUNCH_CMD $hdwebtracker"
alias hdwebf="$WEB_LAUNCH_CMD $hdwebhdfs"

###############################################################################
## Bash Autocompletion for HDFS
# hdfs(1) completion
# taken from: http://blog.rapleaf.com/dev/2009/11/17/command-line-auto-completion-for-hadoop-dfs-commands/
have()
{
unset -v have
PATH=$PATH:/sbin:/usr/sbin:/usr/local/sbin type $1 &>/dev/null &&
have="yes"
}
have hadoop &&
_hdfs()
{
local cur prev

COMPREPLY=()
cur=${COMP_WORDS[COMP_CWORD]}
prev=${COMP_WORDS[COMP_CWORD-1]}

if [[ "$prev" == hdfs ]]; then
COMPREPLY=( $( compgen -W '-ls -lsr -du -dus -count -mv -cp -rm \
-rmr -expunge -put -copyFromLocal -moveToLocal -mkdir -setrep \
-touchz -test -stat -tail -chmod -chown -chgrp -help' -- $cur ) )
fi

if [[ "$prev" == -ls ]] || [[ "$prev" == -lsr ]] || \
[[ "$prev" == -du ]] || [[ "$prev" == -dus ]] || \
[[ "$prev" == -cat ]] || [[ "$prev" == -mkdir ]] || \
[[ "$prev" == -put ]] || [[ "$prev" == -rm ]] || \
[[ "$prev" == -rmr ]] || [[ "$prev" == -tail ]] || \
[[ "$prev" == -cp ]]; then
if [[ -z "$cur" ]]; then
COMPREPLY=( $( compgen -W "$( hdfs -ls / 2>-|grep -v ^Found|awk '{print $8}' )" -- "$cur" ) )
elif [[ `echo $cur | grep \/$` ]]; then
COMPREPLY=( $( compgen -W "$( hdfs -ls $cur 2>-|grep -v ^Found|awk '{print $8}' )" -- "$cur" ) )
else
COMPREPLY=( $( compgen -W "$( hdfs -ls $cur* 2>-|grep -v ^Found|awk '{print $8}' )" -- "$cur" ) )
fi
fi
} &&
complete -F _hdfs hdfs
unset have


Change log

1594ad0f9f92 by john.duprey <john.duprey> on May 12, 2011   Diff
hdenv now expect cluster defs in
clusters.csv to include the protocol in
the namenode - e.g. file:/// or hdfs://.
Go to: 
Sign in to write a code review

Older revisions

eb04362fff0e by john.duprey <john.duprey> on Feb 8, 2011   Diff
Updated how hdnamenode and hdfs is
setup.  Now assumes hdnamenode has
hdfs of file prefix.  CAUTION:
Requires change to clusters.csv
hdnamenode field.
d9f67e0bc0d1 by John Duprey <John.Duprey> on Jan 19, 2011   Diff
Removed quotes around "hadoop.job.ugi"
value which seemed to cause problems.
d1870a976e5d by John Duprey <John.Duprey> on Aug 5, 2010   Diff
Added option to kill job on control C
in doJob.sh.  Creating supporting
hdjob.sh command.
All revisions of this file

File info

Size: 4903 bytes, 167 lines
Powered by Google Project Hosting