1#!/bin/sh 2 3tar -zxvf hpcc-1.5.0.tar.gz 4cd hpcc-1.5.0 5 6# Find MPI To Use 7if [ ! "X$MPI_PATH" = "X" ] && [ -d $MPI_PATH ] && [ -d $MPI_INCLUDE ] && [ -x $MPI_CC ] && [ -e $MPI_LIBS ] 8then 9 # PRE-SET MPI 10 echo "Using pre-set environment variables." 11elif [ -d /usr/lib/openmpi/include ] 12then 13 # OpenMPI On Ubuntu 14 MPI_PATH=/usr/lib/openmpi 15 MPI_INCLUDE=/usr/lib/openmpi/include 16 MPI_LIBS=/usr/lib/openmpi/lib/libmpi.so 17 MPI_CC=/usr/bin/mpicc.openmpi 18 MPI_VERSION=`$MPI_CC -showme:version 2>&1 | grep MPI | cut -d "(" -f1 | cut -d ":" -f2` 19elif [ -d /usr/lib/x86_64-linux-gnu/openmpi/ ] && [ -d /usr/include/openmpi/ ] 20then 21 # OpenMPI On Debian 22 MPI_PATH=/usr/lib/x86_64-linux-gnu/openmpi 23 MPI_INCLUDE=/usr/include/openmpi/ 24 MPI_LIBS=/usr/lib/x86_64-linux-gnu/openmpi/lib/libmpi.so 25 MPI_CC=/usr/bin/mpicc 26 MPI_VERSION=`$MPI_CC -showme:version 2>&1 | grep MPI | cut -d "(" -f1 | cut -d ":" -f2` 27elif [ -d /usr/lib/x86_64-linux-gnu/openmpi/ ] && [ -d /usr/lib/x86_64-linux-gnu/openmpi/include/ ] 28then 29 # OpenMPI On Newer Ubuntu 30 MPI_PATH=/usr/lib/x86_64-linux-gnu/openmpi 31 MPI_INCLUDE=/usr/lib/x86_64-linux-gnu/openmpi/include/ 32 MPI_LIBS=/usr/lib/x86_64-linux-gnu/openmpi/lib/libmpi.so 33 MPI_CC=/usr/bin/mpicc 34 MPI_VERSION=`$MPI_CC -showme:version 2>&1 | grep MPI | cut -d "(" -f1 | cut -d ":" -f2` 35elif [ -d /usr/lib64/openmpi ] && [ -x /usr/bin/mpicc ] 36then 37 # OpenMPI On Clear Linux 38 MPI_PATH=/usr/lib64/openmpi 39 MPI_INCLUDE=/usr/include/ 40 MPI_LIBS=/usr/lib64/libmpi.so 41 MPI_CC=/usr/bin/mpicc 42 MPI_VERSION=`$MPI_CC -showme:version 2>&1 | grep MPI | cut -d "(" -f1 | cut -d ":" -f2` 43elif [ -d /usr/lib64/openmpi ] && [ -x /usr/lib64/openmpi/bin/mpicc ] 44then 45 # OpenMPI On RHEL 46 MPI_PATH=/usr/lib64/openmpi 47 MPI_INCLUDE=/usr/include/openmpi-x86_64/ 48 MPI_LIBS=/usr/lib64/openmpi/lib/libmpi.so 49 MPI_CC=/usr/lib64/openmpi/bin/mpicc 50 MPI_VERSION=`$MPI_CC -showme:version 2>&1 | grep MPI | cut -d "(" -f1 | cut -d ":" -f2` 51elif [ -d /usr/lib/mpich/include ] && [ -x /usr/bin/mpicc.mpich ] 52then 53 # MPICH 54 MPI_PATH=/usr/lib/mpich 55 MPI_INCLUDE=/usr/lib/mpich/include 56 MPI_LIBS=/usr/lib/mpich/lib/libmpich.so 57 MPI_CC=/usr/bin/mpicc.mpich 58 MPI_VERSION=`$MPI_CC -v 2>&1 | grep "MPICH version"` 59elif [ -d /usr/lib/mpich/include ] 60then 61 # MPICH 62 MPI_PATH=/usr/lib/mpich 63 MPI_INCLUDE=/usr/lib/mpich/include 64 MPI_LIBS=/usr/lib/libmpich.so.1.0 65 MPI_CC=/usr/lib/mpich/bin/mpicc.mpich 66 MPI_VERSION=`$MPI_CC -v 2>&1 | grep "MPICH version"` 67elif [ -d /usr/include/mpich2 ] 68then 69 # MPICH2 70 MPI_PATH=/usr/include/mpich2 71 MPI_INCLUDE=/usr/include/mpich2 72 MPI_LIBS=/usr/lib/mpich2/lib/libmpich.so 73 MPI_CC=/usr/bin/mpicc.mpich2 74 MPI_VERSION=`$MPI_CC -v 2>&1 | grep "MPICH2 version"` 75elif [ -d /usr/include/mpich2-x86_64 ] 76then 77 # MPICH2 78 MPI_PATH=/usr/include/mpich2-x86_64 79 MPI_INCLUDE=/usr/include/mpich2-x86_64 80 MPI_LIBS=/usr/lib64/mpich2/lib/libmpich.so 81 MPI_CC=/usr/bin/mpicc 82 MPI_VERSION=`$MPI_CC -v 2>&1 | grep "MPICH2 version"` 83fi 84 85# Find Linear Algebra Package To Use 86if [ ! "X$LA_PATH" = "X" ] && [ -d $LA_PATH ] && [ -d $LA_INCLUDE ] && [ -e $LA_LIBS ] 87then 88 # PRE-SET MPI 89 echo "Using pre-set environment variables." 90elif [ -d /usr/lib/libblas ] 91then 92 # libblas 93 LA_PATH=/usr/lib 94 LA_INCLUDE=/usr/include 95 LA_LIBS="-lblas" 96 LA_VERSION="BLAS" 97elif [ -d /usr/lib/openblas-base ] 98then 99 # OpenBLAS 100 LA_PATH=/usr/lib/openblas-base 101 LA_INCLUDE=/usr/include 102 LA_LIBS=/usr/lib/openblas-base/libopenblas.so.0 103 LA_VERSION="OpenBLAS" 104elif [ -d /usr/lib/atlas-base ] 105then 106 # ATLAS 107 LA_PATH=/usr/lib/atlas-base 108 LA_INCLUDE=/usr/include 109 LA_LIBS="-llapack -lf77blas -lcblas -latlas" 110 LA_VERSION="ATLAS" 111elif [ -d /usr/lib64/atlas ] 112then 113 # ATLAS 114 LA_PATH=/usr/lib64/atlas 115 LA_INCLUDE=/usr/include 116 LA_LIBS="-L$LA_PATH -lblas" 117 LA_VERSION="ATLAS" 118elif [ -d /usr/lib/x86_64-linux-gnu/atlas ] 119then 120 # ATLAS on Ubuntu 121 LA_PATH=/usr/lib/x86_64-linux-gnu/atlas 122 LA_INCLUDE=/usr/include/x86_64-linux-gnu/ 123 LA_LIBS="-L$LA_PATH -lblas" 124 LA_VERSION="ATLAS" 125elif [ -d /usr/lib/x86_64-linux-gnu/blas ] 126then 127 # OpenBLAS on Ubuntu 128 LA_PATH=/usr/lib/x86_64-linux-gnu/blas 129 LA_INCLUDE=/usr/include/x86_64-linux-gnu/ 130 LA_LIBS="-L$LA_PATH -lblas" 131 LA_VERSION="OpenBLAS" 132fi 133 134if [ ! "X$MPI_VERSION" = "X" ] 135then 136 VERSION_INFO=$MPI_VERSION 137 if [ ! "X$LA_VERSION" = "X" ] 138 then 139 VERSION_INFO="$LA_VERSION + $VERSION_INFO" 140 fi 141 142 echo $VERSION_INFO > ~/install-footnote 143fi 144 145if [ "X$CFLAGS_OVERRIDE" = "X" ] 146then 147 CFLAGS="$CFLAGS" 148else 149 CFLAGS="$CFLAGS_OVERRIDE" 150fi 151 152if [ "X$MPI_LD" = "X" ] 153then 154 MPI_LD=$MPI_CC 155fi 156 157# Make.pts generation 158echo " 159SHELL = /bin/sh 160CD = cd 161CP = cp 162LN_S = ln -s 163MKDIR = mkdir 164RM = /bin/rm -f 165TOUCH = touch 166ARCH = \$(arch) 167TOPdir = ../../.. 168INCdir = \$(TOPdir)/include 169BINdir = \$(TOPdir)/bin/\$(ARCH) 170LIBdir = \$(TOPdir)/lib/\$(ARCH) 171HPLlib = \$(LIBdir)/libhpl.a 172 173# MPI 174 175MPdir = $MPI_PATH 176MPinc = -I$MPI_INCLUDE 177MPlib = $MPI_LIBS 178 179# BLAS or VSIPL 180 181LAdir = $LA_PATH 182LAinc = -I$LA_INCLUDE 183LAlib = $LA_LIBS 184 185# F77 / C interface 186 187F2CDEFS = 188 189# HPL includes / libraries / specifics 190 191HPL_INCLUDES = -I\$(INCdir) -I\$(INCdir)/\$(ARCH) \$(LAinc) \$(MPinc) 192HPL_LIBS = \$(HPLlib) \$(LAlib) \$(MPlib) -lm 193#HPL_OPTS = -DHPL_CALL_CBLAS 194HPL_DEFS = \$(F2CDEFS) \$(HPL_OPTS) \$(HPL_INCLUDES) 195CC = $MPI_CC 196CCNOOPT = \$(HPL_DEFS) 197CCFLAGS = \$(HPL_DEFS) -fomit-frame-pointer $CFLAGS -funroll-loops 198LINKER = $MPI_LD 199LINKFLAGS = $LDFLAGS 200ARCHIVER = ar 201ARFLAGS = r 202RANLIB = echo 203" > hpl/Make.pts 204 205cd hpl/ 206make arch=pts 207cd .. 208make arch=pts 209echo $? > ~/install-exit-status 210 211cd ~ 212echo "#!/bin/sh 213cd hpcc-1.5.0 214 215if [ \"X\$MPI_NUM_THREADS\" = \"X\" ] 216then 217 MPI_NUM_THREADS=\$NUM_CPU_PHYSICAL_CORES 218fi 219 220if [ ! \"X\$HOSTFILE\" = \"X\" ] && [ -f \$HOSTFILE ] 221then 222 HOSTFILE=\"--hostfile \$HOSTFILE\" 223elif [ -f /etc/hostfile ] 224then 225 HOSTFILE=\"--hostfile /etc/hostfile\" 226fi 227 228# HPL.dat generation 229# http://pic.dhe.ibm.com/infocenter/lnxinfo/v3r0m0/index.jsp?topic=%2Fliaai.hpctune%2Fbaselinehpcc_gccatlas.htm 230 231PQ=0 232P=\$(echo \"scale=0;sqrt(\$MPI_NUM_THREADS)\" |bc -l) 233Q=\$P 234PQ=\$((\$P*\$Q)) 235 236while [ \$PQ -ne \$MPI_NUM_THREADS ]; do 237 Q=\$((\$MPI_NUM_THREADS/\$P)) 238 PQ=\$((\$P*\$Q)) 239 if [ \$PQ -ne \$MPI_NUM_THREADS ] && [ \$P -gt 1 ]; then P=\$((\$P-1)); fi 240done 241 242if [ \"X\$N\" = \"X\" ] || [ \"X\$NB\" = \"X\" ] 243then 244 # SYS_MEMORY * about .62% of that, go from MB to bytes and divide by 8 245 N=\$(echo \"scale=0;sqrt(\${SYS_MEMORY}*0.62*1048576/8)\" |bc -l) 246 NB=\$((256 - 256 % \$MPI_NUM_THREADS)) 247 N=\$((\$N - \$N % \$NB)) 248fi 249 250echo \"HPLinpack benchmark input file 251Innovative Computing Laboratory, University of Tennessee 252HPL.out output file name (if any) 2536 device out (6=stdout,7=stderr,file) 2541 # of problems sizes (N) 255\$N 2561 # of NBs 257\$NB NBs 2580 PMAP process mapping (0=Row-,1=Column-major) 2591 # of process grids (P x Q) 260\$P Ps 261\$Q Qs 26216.0 threshold 2631 # of panel fact 2642 PFACTs (0=left, 1=Crout, 2=Right) 2651 # of recursive stopping criterium 2664 NBMINs (>= 1) 2671 # of panels in recursion 2682 NDIVs 2691 # of recursive panel fact. 2702 RFACTs (0=left, 1=Crout, 2=Right) 2711 # of broadcast 2721 BCASTs (0=1rg,1=1rM,2=2rg,3=2rM,4=Lng,5=LnM) 2731 # of lookahead depth 2740 DEPTHs (>=0) 2751 SWAP (0=bin-exch,1=long,2=mix) 27664 swapping threshold 2770 L1 in (0=transposed,1=no-transposed) form 2780 U in (0=transposed,1=no-transposed) form 2791 Equilibration (0=no,1=yes) 2808 memory alignment in double (> 0) 281##### This line (no. 32) is ignored (it serves as a separator). ###### 2820 Number of additional problem sizes for PTRANS 2831200 10000 30000 values of N 2840 number of additional blocking sizes for PTRANS 28540 9 8 13 13 20 16 32 64 values of NB 286\" > HPL.dat 287cp HPL.dat hpccinf.txt 288 289PATH=\$PATH:$MPI_PATH/bin 290LD_PRELOAD=$MPI_LIBS mpirun --allow-run-as-root -np \$MPI_NUM_THREADS \$HOSTFILE ./hpcc 291echo \$? > ~/test-exit-status 292 293cat hpccoutf.txt > \$LOG_FILE" > hpcc 294chmod +x hpcc 295