1#!/bin/sh
2
3tar -zxvf hpcc-1.5.0.tar.gz
4cd hpcc-1.5.0
5
6# Find MPI To Use
7if [ ! "X$MPI_PATH" = "X" ] && [ -d $MPI_PATH ] && [ -d $MPI_INCLUDE ] && [ -x $MPI_CC ] && [ -e $MPI_LIBS ]
8then
9	# PRE-SET MPI
10	echo "Using pre-set environment variables."
11elif [ -d /usr/lib/openmpi/include ]
12then
13	# OpenMPI On Ubuntu
14	MPI_PATH=/usr/lib/openmpi
15	MPI_INCLUDE=/usr/lib/openmpi/include
16	MPI_LIBS=/usr/lib/openmpi/lib/libmpi.so
17	MPI_CC=/usr/bin/mpicc.openmpi
18	MPI_VERSION=`$MPI_CC -showme:version 2>&1 | grep MPI | cut -d "(" -f1  | cut -d ":" -f2`
19elif [ -d /usr/lib/x86_64-linux-gnu/openmpi/ ] && [ -d /usr/include/openmpi/ ]
20then
21        # OpenMPI On Debian
22        MPI_PATH=/usr/lib/x86_64-linux-gnu/openmpi
23        MPI_INCLUDE=/usr/include/openmpi/
24        MPI_LIBS=/usr/lib/x86_64-linux-gnu/openmpi/lib/libmpi.so
25        MPI_CC=/usr/bin/mpicc
26        MPI_VERSION=`$MPI_CC -showme:version 2>&1 | grep MPI | cut -d "(" -f1  | cut -d ":" -f2`
27elif [ -d /usr/lib/x86_64-linux-gnu/openmpi/ ] && [ -d /usr/lib/x86_64-linux-gnu/openmpi/include/ ]
28then
29        # OpenMPI On Newer Ubuntu
30        MPI_PATH=/usr/lib/x86_64-linux-gnu/openmpi
31        MPI_INCLUDE=/usr/lib/x86_64-linux-gnu/openmpi/include/
32        MPI_LIBS=/usr/lib/x86_64-linux-gnu/openmpi/lib/libmpi.so
33        MPI_CC=/usr/bin/mpicc
34        MPI_VERSION=`$MPI_CC -showme:version 2>&1 | grep MPI | cut -d "(" -f1  | cut -d ":" -f2`
35elif [ -d /usr/lib/aarch64-linux-gnu/openmpi/ ] && [ -d /usr/lib/aarch64-linux-gnu/openmpi/include/ ]
36then
37        # OpenMPI On Newer Ubuntu AArch64
38        MPI_PATH=/usr/lib/aarch64-linux-gnu/openmpi
39        MPI_INCLUDE=/usr/lib/aarch64-linux-gnu/openmpi/include/
40        MPI_LIBS=/usr/lib/aarch64-linux-gnu/openmpi/lib/libmpi.so
41        MPI_CC=/usr/bin/mpicc
42        MPI_VERSION=`$MPI_CC -showme:version 2>&1 | grep MPI | cut -d "(" -f1  | cut -d ":" -f2`
43elif [ -d /usr/lib64/openmpi ] && [ -x /usr/bin/mpicc ]
44then
45	# OpenMPI On Clear Linux
46	MPI_PATH=/usr/lib64/openmpi
47	MPI_INCLUDE=/usr/include/
48	MPI_LIBS=/usr/lib64/libmpi.so
49	MPI_CC=/usr/bin/mpicc
50	MPI_VERSION=`$MPI_CC -showme:version 2>&1 | grep MPI | cut -d "(" -f1  | cut -d ":" -f2`
51elif [ -d /usr/lib64/openmpi ] && [ -x /usr/lib64/openmpi/bin/mpicc ]
52then
53	# OpenMPI On RHEL
54	MPI_PATH=/usr/lib64/openmpi
55	MPI_INCLUDE=/usr/include/openmpi-x86_64/
56	MPI_LIBS=/usr/lib64/openmpi/lib/libmpi.so
57	MPI_CC=/usr/lib64/openmpi/bin/mpicc
58	MPI_VERSION=`$MPI_CC -showme:version 2>&1 | grep MPI | cut -d "(" -f1  | cut -d ":" -f2`
59elif [ -d /usr/lib/mpich/include ] && [ -x /usr/bin/mpicc.mpich ]
60then
61	# MPICH
62	MPI_PATH=/usr/lib/mpich
63	MPI_INCLUDE=/usr/lib/mpich/include
64	MPI_LIBS=/usr/lib/mpich/lib/libmpich.so
65	MPI_CC=/usr/bin/mpicc.mpich
66	MPI_VERSION=`$MPI_CC -v 2>&1 | grep "MPICH version"`
67elif [ -d /usr/lib/mpich/include ]
68then
69	# MPICH
70	MPI_PATH=/usr/lib/mpich
71	MPI_INCLUDE=/usr/lib/mpich/include
72	MPI_LIBS=/usr/lib/libmpich.so.1.0
73	MPI_CC=/usr/lib/mpich/bin/mpicc.mpich
74	MPI_VERSION=`$MPI_CC -v 2>&1 | grep "MPICH version"`
75elif [ -d /usr/include/mpich2 ]
76then
77	# MPICH2
78	MPI_PATH=/usr/include/mpich2
79	MPI_INCLUDE=/usr/include/mpich2
80	MPI_LIBS=/usr/lib/mpich2/lib/libmpich.so
81	MPI_CC=/usr/bin/mpicc.mpich2
82	MPI_VERSION=`$MPI_CC -v 2>&1 | grep "MPICH2 version"`
83elif [ -d /usr/include/mpich2-x86_64 ]
84then
85	# MPICH2
86	MPI_PATH=/usr/include/mpich2-x86_64
87	MPI_INCLUDE=/usr/include/mpich2-x86_64
88	MPI_LIBS=/usr/lib64/mpich2/lib/libmpich.so
89	MPI_CC=/usr/bin/mpicc
90	MPI_VERSION=`$MPI_CC -v 2>&1 | grep "MPICH2 version"`
91fi
92
93# Find Linear Algebra Package To Use
94if [ ! "X$LA_PATH" = "X" ] && [ -d $LA_PATH ] && [ -d $LA_INCLUDE ] && [ -e $LA_LIBS ]
95then
96	# PRE-SET MPI
97	echo "Using pre-set environment variables."
98elif [ -d /usr/lib/libblas ]
99then
100	# libblas
101	LA_PATH=/usr/lib
102	LA_INCLUDE=/usr/include
103	LA_LIBS="-lblas"
104	LA_VERSION="BLAS"
105elif [ -d /usr/lib/openblas-base ]
106then
107	# OpenBLAS
108	LA_PATH=/usr/lib/openblas-base
109	LA_INCLUDE=/usr/include
110	LA_LIBS=/usr/lib/openblas-base/libopenblas.so.0
111	LA_VERSION="OpenBLAS"
112elif [ -d /usr/lib/atlas-base ]
113then
114	# ATLAS
115	LA_PATH=/usr/lib/atlas-base
116	LA_INCLUDE=/usr/include
117	LA_LIBS="-llapack -lf77blas -lcblas -latlas"
118	LA_VERSION="ATLAS"
119elif [ -d /usr/lib64/atlas ]
120then
121	# ATLAS
122	LA_PATH=/usr/lib64/atlas
123	LA_INCLUDE=/usr/include
124	LA_LIBS="-L$LA_PATH -lblas"
125	LA_VERSION="ATLAS"
126elif [ -d /usr/lib/x86_64-linux-gnu/atlas ]
127then
128	# ATLAS on Ubuntu
129	LA_PATH=/usr/lib/x86_64-linux-gnu/atlas
130	LA_INCLUDE=/usr/include/x86_64-linux-gnu/
131	LA_LIBS="-L$LA_PATH -lblas"
132	LA_VERSION="ATLAS"
133elif [ -d /usr/lib/x86_64-linux-gnu/blas ]
134then
135	# OpenBLAS on Ubuntu
136	LA_PATH=/usr/lib/x86_64-linux-gnu/blas
137	LA_INCLUDE=/usr/include/x86_64-linux-gnu/
138	LA_LIBS="-L$LA_PATH -lblas"
139	LA_VERSION="OpenBLAS"
140fi
141
142if [ ! "X$MPI_VERSION" = "X" ]
143then
144	VERSION_INFO=$MPI_VERSION
145	if [ ! "X$LA_VERSION" = "X" ]
146	then
147		VERSION_INFO="$LA_VERSION + $VERSION_INFO"
148	fi
149
150	echo $VERSION_INFO > ~/install-footnote
151fi
152
153if [ "X$CFLAGS_OVERRIDE" = "X" ]
154then
155          CFLAGS="$CFLAGS"
156else
157          CFLAGS="$CFLAGS_OVERRIDE"
158fi
159
160if [ "X$MPI_LD" = "X" ]
161then
162	MPI_LD=$MPI_CC
163fi
164
165# Make.pts generation
166echo "
167SHELL        = /bin/sh
168CD           = cd
169CP           = cp
170LN_S         = ln -s
171MKDIR        = mkdir
172RM           = /bin/rm -f
173TOUCH        = touch
174ARCH         = \$(arch)
175TOPdir       = ../../..
176INCdir       = \$(TOPdir)/include
177BINdir       = \$(TOPdir)/bin/\$(ARCH)
178LIBdir       = \$(TOPdir)/lib/\$(ARCH)
179HPLlib       = \$(LIBdir)/libhpl.a
180
181# MPI
182
183MPdir        = $MPI_PATH
184MPinc        = -I$MPI_INCLUDE
185MPlib        = $MPI_LIBS
186
187# BLAS or VSIPL
188
189LAdir        = $LA_PATH
190LAinc        = -I$LA_INCLUDE
191LAlib        = $LA_LIBS
192
193# F77 / C interface
194
195F2CDEFS      =
196
197# HPL includes / libraries / specifics
198
199HPL_INCLUDES = -I\$(INCdir) -I\$(INCdir)/\$(ARCH) \$(LAinc) \$(MPinc)
200HPL_LIBS     = \$(HPLlib) \$(LAlib) \$(MPlib) -lm
201#HPL_OPTS     = -DHPL_CALL_CBLAS
202HPL_DEFS     = \$(F2CDEFS) \$(HPL_OPTS) \$(HPL_INCLUDES)
203CC           = $MPI_CC
204CCNOOPT      = \$(HPL_DEFS)
205CCFLAGS      = \$(HPL_DEFS) -fomit-frame-pointer $CFLAGS -funroll-loops
206LINKER       = $MPI_LD
207LINKFLAGS    = $LDFLAGS
208ARCHIVER     = ar
209ARFLAGS      = r
210RANLIB       = echo
211" > hpl/Make.pts
212
213cd hpl/
214make arch=pts
215cd ..
216make arch=pts
217echo $? > ~/install-exit-status
218
219cd ~
220echo "#!/bin/sh
221cd hpcc-1.5.0
222
223if [ \"X\$MPI_NUM_THREADS\" = \"X\" ]
224then
225	MPI_NUM_THREADS=\$NUM_CPU_PHYSICAL_CORES
226fi
227
228if [ ! \"X\$HOSTFILE\" = \"X\" ] && [ -f \$HOSTFILE ]
229then
230	HOSTFILE=\"--hostfile \$HOSTFILE\"
231elif [ -f /etc/hostfile ]
232then
233	HOSTFILE=\"--hostfile /etc/hostfile\"
234fi
235
236# HPL.dat generation
237# http://pic.dhe.ibm.com/infocenter/lnxinfo/v3r0m0/index.jsp?topic=%2Fliaai.hpctune%2Fbaselinehpcc_gccatlas.htm
238
239PQ=0
240P=\$(echo \"scale=0;sqrt(\$MPI_NUM_THREADS)\" |bc -l)
241Q=\$P
242PQ=\$((\$P*\$Q))
243
244while [ \$PQ -ne \$MPI_NUM_THREADS ]; do
245    Q=\$((\$MPI_NUM_THREADS/\$P))
246    PQ=\$((\$P*\$Q))
247    if [ \$PQ -ne \$MPI_NUM_THREADS ] && [ \$P -gt 1 ]; then P=\$((\$P-1)); fi
248done
249
250if [ \"X\$N\" = \"X\" ] || [ \"X\$NB\" = \"X\" ]
251then
252	# SYS_MEMORY * about .62% of that, go from MB to bytes and divide by 8
253	N=\$(echo \"scale=0;sqrt(\${SYS_MEMORY}*0.62*1048576/8)\" |bc -l)
254	NB=\$((256 - 256 % \$MPI_NUM_THREADS))
255	N=\$((\$N - \$N % \$NB))
256fi
257
258echo \"HPLinpack benchmark input file
259Innovative Computing Laboratory, University of Tennessee
260HPL.out      output file name (if any)
2616            device out (6=stdout,7=stderr,file)
2621            # of problems sizes (N)
263\$N
2641            # of NBs
265\$NB          NBs
2660            PMAP process mapping (0=Row-,1=Column-major)
2671            # of process grids (P x Q)
268\$P           Ps
269\$Q           Qs
27016.0         threshold
2711            # of panel fact
2722            PFACTs (0=left, 1=Crout, 2=Right)
2731            # of recursive stopping criterium
2744            NBMINs (>= 1)
2751            # of panels in recursion
2762            NDIVs
2771            # of recursive panel fact.
2782            RFACTs (0=left, 1=Crout, 2=Right)
2791            # of broadcast
2801            BCASTs (0=1rg,1=1rM,2=2rg,3=2rM,4=Lng,5=LnM)
2811            # of lookahead depth
2820            DEPTHs (>=0)
2831            SWAP (0=bin-exch,1=long,2=mix)
28464           swapping threshold
2850            L1 in (0=transposed,1=no-transposed) form
2860            U  in (0=transposed,1=no-transposed) form
2871            Equilibration (0=no,1=yes)
2888            memory alignment in double (> 0)
289##### This line (no. 32) is ignored (it serves as a separator). ######
2900                      		Number of additional problem sizes for PTRANS
2911200 10000 30000        	values of N
2920                       	number of additional blocking sizes for PTRANS
29340 9 8 13 13 20 16 32 64       	values of NB
294\" > HPL.dat
295cp HPL.dat hpccinf.txt
296
297PATH=\$PATH:$MPI_PATH/bin
298LD_PRELOAD=$MPI_LIBS mpirun --allow-run-as-root -np \$MPI_NUM_THREADS \$HOSTFILE ./hpcc
299echo \$? > ~/test-exit-status
300
301cat hpccoutf.txt > \$LOG_FILE" > hpcc
302chmod +x hpcc
303