Commit 35dcc69a authored by Philip Carns's avatar Philip Carns

Merge remote-tracking branch 'origin/master' into dev-modular

Conflicts:
	ChangeLog
	darshan-runtime/Makefile.in
	darshan-runtime/configure
	darshan-runtime/configure.in
	darshan-util/configure
	darshan-util/configure.in
parents 72380aad be28e23f
......@@ -5,7 +5,7 @@ Darshan Release Change Log
Darshan-3.0.0-pre1
=============
darshan-2.3.1-pre2
darshan-2.3.1
=============
* added documentation and example configuration files for using the -profile
or $MPICC_PROFILE hooks to add instrumentation to MPICH-based MPI
......@@ -31,12 +31,17 @@ darshan-2.3.1-pre2
- label I/O vs. meta time more clearly
- include unique file meta time in agg_perf_by_slowest calculation
* added regression test script framework in darshan-test/regression/
- workstation-static and workstation-dynamic test environments supported
- currently support platforms include:
- Linux with static linking and generated compiler wrappers
- Linux with static linking and profiler configuration files
- Linux with dynamic linking and LD_PRELOAD
- Blue Gene/Q with static linking and profiler configuration files
* update darshan-gen-fortran.pl and darshan-gen-cxx.pl to support new library
naming conventions in MPICH 3.1.1 and higher
* update documentation to reflect known issues with some versions of MPICH
* modify darshan-runtime so that link-time instrumentation options are only used
when statically linking via Libs.private. (reported by Kalyana Chadalavada)
* Cray platforms: modify darshan-runtime so that link-time instrumentation
options are only used when statically linking via Libs.private.
(reported by Kalyana Chadalavada)
darshan-2.3.0
=============
......
......@@ -92,6 +92,9 @@ endif
# install -m 755 share/mpi-profile/darshan-cc.conf $(datarootdir)/mpi-profile/darshan-cc.conf
# install -m 755 share/mpi-profile/darshan-cxx.conf $(datarootdir)/mpi-profile/darshan-cxx.conf
# install -m 755 share/mpi-profile/darshan-f.conf $(datarootdir)/mpi-profile/darshan-f.conf
# install -m 755 share/mpi-profile/darshan-bg-cc.conf $(datarootdir)/mpi-profile/darshan-bg-cc.conf
# install -m 755 share/mpi-profile/darshan-bg-cxx.conf $(datarootdir)/mpi-profile/darshan-bg-cxx.conf
# install -m 755 share/mpi-profile/darshan-bg-f.conf $(datarootdir)/mpi-profile/darshan-bg-f.conf
# install -d $(libdir)/pkgconfig
# install -m 644 lib/pkgconfig/darshan-runtime.pc $(libdir)/pkgconfig/darshan-runtime.pc
......
......@@ -4274,7 +4274,7 @@ DARSHAN_VERSION="3.0.0-pre1"
ac_config_files="$ac_config_files Makefile darshan-mk-log-dirs.pl darshan-gen-cc.pl darshan-gen-cxx.pl darshan-gen-fortran.pl darshan-config share/craype-1.x/darshan-module share/craype-2.x/darshan-module lib/pkgconfig/darshan-runtime.pc share/mpi-profile/darshan-cc.conf share/mpi-profile/darshan-cxx.conf share/mpi-profile/darshan-f.conf"
ac_config_files="$ac_config_files Makefile darshan-mk-log-dirs.pl darshan-gen-cc.pl darshan-gen-cxx.pl darshan-gen-fortran.pl darshan-config share/craype-1.x/darshan-module share/craype-2.x/darshan-module lib/pkgconfig/darshan-runtime.pc share/mpi-profile/darshan-cc.conf share/mpi-profile/darshan-cxx.conf share/mpi-profile/darshan-f.conf share/mpi-profile/darshan-bg-cc.conf share/mpi-profile/darshan-bg-cxx.conf share/mpi-profile/darshan-bg-f.conf"
cat >confcache <<\_ACEOF
# This file is a shell script that caches the results of configure
......@@ -4980,6 +4980,9 @@ do
"share/mpi-profile/darshan-cc.conf") CONFIG_FILES="$CONFIG_FILES share/mpi-profile/darshan-cc.conf" ;;
"share/mpi-profile/darshan-cxx.conf") CONFIG_FILES="$CONFIG_FILES share/mpi-profile/darshan-cxx.conf" ;;
"share/mpi-profile/darshan-f.conf") CONFIG_FILES="$CONFIG_FILES share/mpi-profile/darshan-f.conf" ;;
"share/mpi-profile/darshan-bg-cc.conf") CONFIG_FILES="$CONFIG_FILES share/mpi-profile/darshan-bg-cc.conf" ;;
"share/mpi-profile/darshan-bg-cxx.conf") CONFIG_FILES="$CONFIG_FILES share/mpi-profile/darshan-bg-cxx.conf" ;;
"share/mpi-profile/darshan-bg-f.conf") CONFIG_FILES="$CONFIG_FILES share/mpi-profile/darshan-bg-f.conf" ;;
*) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;;
esac
......
......@@ -317,4 +317,7 @@ lib/pkgconfig/darshan-runtime.pc
share/mpi-profile/darshan-cc.conf
share/mpi-profile/darshan-cxx.conf
share/mpi-profile/darshan-f.conf
share/mpi-profile/darshan-bg-cc.conf
share/mpi-profile/darshan-bg-cxx.conf
share/mpi-profile/darshan-bg-f.conf
)
......@@ -151,21 +151,9 @@ hooks for existing MPI compiler scripts. Once this is done, Darshan
instrumentation is transparent; you simply compile applications using
the darshan-enabled MPI compiler scripts.
=== Using customized compiler wrapper scripts
For MPICH-based MPI libraries, such as MPICH1, MPICH2, or MVAPICH, custom
wrapper scripts can be generated automatically. The following example
illustrates how to produce wrappers for C, C++, and Fortran compilers:
----
darshan-gen-cc.pl `which mpicc` --output mpicc.darshan
darshan-gen-cxx.pl `which mpicxx` --output mpicxx.darshan
darshan-gen-fortran.pl `which mpif77` --output mpif77.darshan
darshan-gen-fortran.pl `which mpif90` --output mpif90.darshan
-----
=== Using a profile configuration
[[static-prof]]
The MPICH MPI implementation supports the specification of a profiling library
configuration, then it can be used to insert Darshan instrumentation without
modifying the existing MPI compiler script. Example profiling configuration
......@@ -196,6 +184,21 @@ mpif77 -profile=$DARSHAN_PREFIX/share/mpi-profile/darshan-f <args>
mpif90 -profile=$DARSHAN_PREFIX/share/mpi-profile/darshan-f <args>
----
=== Using customized compiler wrapper scripts
[[static-wrapper]]
For MPICH-based MPI libraries, such as MPICH1, MPICH2, or MVAPICH,
custom wrapper scripts can be generated to automatically include Darshan
instrumentation. The following example illustrates how to produce
wrappers for C, C++, and Fortran compilers:
----
darshan-gen-cc.pl `which mpicc` --output mpicc.darshan
darshan-gen-cxx.pl `which mpicxx` --output mpicxx.darshan
darshan-gen-fortran.pl `which mpif77` --output mpif77.darshan
darshan-gen-fortran.pl `which mpif90` --output mpif90.darshan
-----
=== Other configurations
Please see the Cray recipe in this document for instructions on
......@@ -282,10 +285,21 @@ than the login node. The `--host` argument is used to force cross-compilation
of Darshan. The `CC` variable is set to point to a stock MPI compiler.
====
Once Darshan has been installed, use the `darshan-gen-*.pl` scripts as
described earlier in this document to produce darshan-enabled MPI compilers.
This method has been widely used and tested with both the GNU and IBM XL
compilers.
Once Darshan has been installed, you can use one of the static
instrumentation methods described earlier in this document. If you
use the profiling configuration file method, then please note that the
Darshan installation includes profiling configuration files that have been
adapted specifically for the Blue Gene environment. Set the following
environment variables to enable them, and then use your normal compiler
scripts. This method is compatible with both GNU and IBM compilers.
Blue Gene profiling configuration example:
----
export MPICC_PROFILE=$DARSHAN_PREFIX/share/mpi-profile/darshan-bg-cc
export MPICXX_PROFILE=$DARSHAN_PREFIX/share/mpi-profile/darshan-bg-cxx
export MPICF77_PROFILE=$DARSHAN_PREFIX/share/mpi-profile/darshan-bg-f
export MPICF90_PROFILE=$DARSHAN_PREFIX/share/mpi-profile/darshan-bg-f
----
=== Cray platforms (XE, XC, or similar)
......
#!/bin/bash
# Example Darshan profiling configuration file for MPICH.
DARSHAN_PREFIX=@prefix@
# Check to see if user explicitly specified dynamic linking options
echo $CFLAGS |grep "\\-dynamic" >& /dev/null
dyncheck1=$?
echo $LDFLAGS |grep "\\-dynamic" >& /dev/null
dyncheck2=$?
echo "${allargs[@]}" | grep "\\-dynamic" >& /dev/null
dyncheck3=$?
# Skip profiling if we know that dynamic linking is in use. See LD_PRELOAD
# method instead.
if [ "$dyncheck1" -ne 0 -a "$dyncheck2" -ne 0 -a "$dyncheck3" -ne 0 ]; then
# Libraries (and paths) to include before the MPI library
export PROFILE_PRELIB=`$DARSHAN_PREFIX/bin/darshan-config --pre-ld-flags`
# Libraries to include after the MPI library
export PROFILE_POSTLIB=`$DARSHAN_PREFIX/bin/darshan-config --post-ld-flags`
fi
# C preprocessor arguments for any include files For example, to add
# /usr/local/myprof/include to the include path and the library libmyprof.a in
# /usr/local/myprof/lib to the link step, you could create the file myprof.conf with the
# lines
# PROFILE_INCPATHS
#!/bin/bash
# Example Darshan profiling configuration file for MPICH.
DARSHAN_PREFIX=@prefix@
# Check to see if user explicitly specified dynamic linking options
echo $CFLAGS |grep "\\-dynamic" >& /dev/null
dyncheck1=$?
echo $LDFLAGS |grep "\\-dynamic" >& /dev/null
dyncheck2=$?
echo "${allargs[@]}" | grep "\\-dynamic" >& /dev/null
dyncheck3=$?
# Skip profiling if we know that dynamic linking is in use. See LD_PRELOAD
# method instead.
if [ "$dyncheck1" -ne 0 -a "$dyncheck2" -ne 0 -a "$dyncheck3" -ne 0 ]; then
# NOTE: there is something fragile here. We are relying on the BG mpicxx script to set a
# variable called $cxxlibs to the name of the libmpichcxx variant that we need in order
# to intercept pmpi calls
# Libraries (and paths) to include before the MPI library
export PROFILE_PRELIB="$cxxlibs `$DARSHAN_PREFIX/bin/darshan-config --pre-ld-flags`"
# Libraries to include after the MPI library
export PROFILE_POSTLIB=`$DARSHAN_PREFIX/bin/darshan-config --post-ld-flags`
fi
# C preprocessor arguments for any include files For example, to add
# /usr/local/myprof/include to the include path and the library libmyprof.a in
# /usr/local/myprof/lib to the link step, you could create the file myprof.conf with the
# lines
# PROFILE_INCPATHS
#!/bin/bash
# Example Darshan profiling configuration file for MPICH.
DARSHAN_PREFIX=@prefix@
# Check to see if user explicitly specified dynamic linking options
echo $CFLAGS |grep "\\-dynamic" >& /dev/null
dyncheck1=$?
echo $LDFLAGS |grep "\\-dynamic" >& /dev/null
dyncheck2=$?
echo "${allargs[@]}" | grep "\\-dynamic" >& /dev/null
dyncheck3=$?
# try to detect name of libmpichf77 or libmpichf90 library needed for pmpi
# - these may be named with different suffixes to match the compiler version
D_MPI_PRELIB=`echo $LIBS |grep "\-lmpichf\\S\+\-\\S\+" -o`
# Skip profiling if we know that dynamic linking is in use. See LD_PRELOAD
# method instead.
if [ "$dyncheck1" -ne 0 -a "$dyncheck2" -ne 0 -a "$dyncheck3" -ne 0 ]; then
# Libraries (and paths) to include before the MPI library
export PROFILE_PRELIB="$D_MPI_PRELIB `$DARSHAN_PREFIX/bin/darshan-config --pre-ld-flags`"
# Libraries to include after the MPI library
export PROFILE_POSTLIB=`$DARSHAN_PREFIX/bin/darshan-config --post-ld-flags`
fi
# C preprocessor arguments for any include files For example, to add
# /usr/local/myprof/include to the include path and the library libmyprof.a in
# /usr/local/myprof/lib to the link step, you could create the file myprof.conf with the
# lines
# PROFILE_INCPATHS
#!/bin/bash
# Example Darshan profiling configuration file for MPICH.
DARSHAN_PREFIX=@prefix@
# Check to see if user explicitly specified dynamic linking options
echo $CFLAGS |grep "\\-dynamic" >& /dev/null
dyncheck1=$?
echo $LDFLAGS |grep "\\-dynamic" >& /dev/null
dyncheck2=$?
echo "${allargs[@]}" | grep "\\-dynamic" >& /dev/null
dyncheck3=$?
DARSHAN_PREFIX=@prefix@
# Skip profiling if we know that dynamic linking is in use. See LD_PRELOAD
# method instead.
if [ "$dyncheck1" -ne 0 -a "$dyncheck2" -ne 0 -a "$dyncheck3" -ne 0 ]; then
# Libraries (and paths) to include before the MPI library
export PROFILE_PRELIB=`$DARSHAN_PREFIX/bin/darshan-config --pre-ld-flags`
# Libraries (and paths) to include before the MPI library
export PROFILE_PRELIB=`$DARSHAN_PREFIX/bin/darshan-config --pre-ld-flags`
# Libraries to include after the MPI library
export PROFILE_POSTLIB=`$DARSHAN_PREFIX/bin/darshan-config --post-ld-flags`
# Libraries to include after the MPI library
export PROFILE_POSTLIB=`$DARSHAN_PREFIX/bin/darshan-config --post-ld-flags`
fi
# C preprocessor arguments for any include files For example, to add
# /usr/local/myprof/include to the include path and the library libmyprof.a in
......
......@@ -6,15 +6,28 @@
DARSHAN_PREFIX=@prefix@
MPICH_LIB_OLD=@MPICH_LIB_OLD@
# Libraries (and paths) to include before the MPI library
if [ $MPICH_LIB_OLD -eq 1 ]; then
export PROFILE_PRELIB="-lmpichcxx `$DARSHAN_PREFIX/bin/darshan-config --pre-ld-flags`"
else
export PROFILE_PRELIB="-lmpicxx `$DARSHAN_PREFIX/bin/darshan-config --pre-ld-flags`"
fi
# Check to see if user explicitly specified dynamic linking options
echo $CFLAGS |grep "\\-dynamic" >& /dev/null
dyncheck1=$?
echo $LDFLAGS |grep "\\-dynamic" >& /dev/null
dyncheck2=$?
echo "${allargs[@]}" | grep "\\-dynamic" >& /dev/null
dyncheck3=$?
# Skip profiling if we know that dynamic linking is in use. See LD_PRELOAD
# method instead.
if [ "$dyncheck1" -ne 0 -a "$dyncheck2" -ne 0 -a "$dyncheck3" -ne 0 ]; then
# Libraries to include after the MPI library
export PROFILE_POSTLIB=`$DARSHAN_PREFIX/bin/darshan-config --post-ld-flags`
# Libraries (and paths) to include before the MPI library
if [ $MPICH_LIB_OLD -eq 1 ]; then
export PROFILE_PRELIB="-lmpichcxx `$DARSHAN_PREFIX/bin/darshan-config --pre-ld-flags`"
else
export PROFILE_PRELIB="-lmpicxx `$DARSHAN_PREFIX/bin/darshan-config --pre-ld-flags`"
fi
# Libraries to include after the MPI library
export PROFILE_POSTLIB=`$DARSHAN_PREFIX/bin/darshan-config --post-ld-flags`
fi
# C preprocessor arguments for any include files For example, to add
# /usr/local/myprof/include to the include path and the library libmyprof.a in
......
......@@ -6,15 +6,28 @@
DARSHAN_PREFIX=@prefix@
MPICH_LIB_OLD=@MPICH_LIB_OLD@
# Libraries (and paths) to include before the MPI library
if [ $MPICH_LIB_OLD -eq 1 ]; then
export PROFILE_PRELIB="-lfmpich `$DARSHAN_PREFIX/bin/darshan-config --pre-ld-flags`"
else
export PROFILE_PRELIB="-lmpifort `$DARSHAN_PREFIX/bin/darshan-config --pre-ld-flags`"
fi
# Check to see if user explicitly specified dynamic linking options
echo $CFLAGS |grep "\\-dynamic" >& /dev/null
dyncheck1=$?
echo $LDFLAGS |grep "\\-dynamic" >& /dev/null
dyncheck2=$?
echo "${allargs[@]}" | grep "\\-dynamic" >& /dev/null
dyncheck3=$?
# Skip profiling if we know that dynamic linking is in use. See LD_PRELOAD
# method instead.
if [ "$dyncheck1" -ne 0 -a "$dyncheck2" -ne 0 -a "$dyncheck3" -ne 0 ]; then
# Libraries to include after the MPI library
export PROFILE_POSTLIB=`$DARSHAN_PREFIX/bin/darshan-config --post-ld-flags`
# Libraries (and paths) to include before the MPI library
if [ $MPICH_LIB_OLD -eq 1 ]; then
export PROFILE_PRELIB="-lfmpich `$DARSHAN_PREFIX/bin/darshan-config --pre-ld-flags`"
else
export PROFILE_PRELIB="-lmpifort `$DARSHAN_PREFIX/bin/darshan-config --pre-ld-flags`"
fi
# Libraries to include after the MPI library
export PROFILE_POSTLIB=`$DARSHAN_PREFIX/bin/darshan-config --post-ld-flags`
fi
# C preprocessor arguments for any include files For example, to add
# /usr/local/myprof/include to the include path and the library libmyprof.a in
......
......@@ -24,6 +24,9 @@ Notes on how to release a new version of Darshan
git tag -a darshan-2.3.1-pre2 -m 'Darshan 2.3.1-pre2'
git push origin darshan-2.3.1-pre2
- TESTING
- manually trigger jenkins test
- run regression tests on BG platform (see
darshan-test/regression/README.BG.txt)
- export the tag and tar gzip it
- easiest method is to do a fresh checkout and remove the .git
subdirectory
......
See README.txt for general instructions. This file contains notes for testing on the Blue Gene platform
(more specifically: cetus.alcf.anl.gov). This example assumes that you are using the MPICH profile conf
method to add instrumentation.
To run regression tests:
- compile and install both darshan-runtime and darshan-util in the same directory
examples:
# darshan runtime
../configure --with-mem-align=16 --with-log-path=/projects/SSSPPg/carns/darshan-logs --prefix=/home/carns/working/darshan/install-cetus --with-jobid-env=COBALT_JOBID --with-zlib=/soft/libraries/alcf/current/gcc/ZLIB --host=powerpc-bgp-linux CC=/bgsys/drivers/V1R2M2/ppc64/comm/bin/gcc/mpicc
make install
# darshan util
../configure --prefix=/home/carns/working/darshan/install
make install
- start a screen session by running "screen"
note: this is suggested because the tests may take a while to complete depending on scheduler
availability
- within the screen session, set your path to point to a stock set of MPI compiler scripts
export PATH=/bgsys/drivers/V1R2M2/ppc64/comm/bin/gcc:$PATH
- run regression tests
./run-all.sh /home/carns/working/darshan/install-cetus /projects/SSSPPg/carns/darshan-test bg-profile-conf
note: the f90 test is expected to fail due to a known problem in the profiling interface for the
F90 MPICH implementation on Mira.
#!/bin/bash
# General notes
#######################
# Script to set up the environment for tests on this platform. Must export
# the following environment variables:
#
# DARSHAN_CC: command to compile C programs
# DARSHAN_CXX: command to compile C++ programs
# DARSHAN_F90: command to compile Fortran90 programs
# DARSHAN_F77: command to compile Fortran77 programs
# DARSHAN_RUNJOB: command to execute a job and wait for its completion
# This script may load optional modules (as in a Cray PE), set LD_PRELOAD
# variables (as in a dynamically linked environment), or generate mpicc
# wrappers (as in a statically linked environment).
# Notes specific to this platform (bg-profile-conf)
########################
# This particular env script assumes that mpicc and its variants for other
# languages are already in the path, and that they will produce static
# executables by default. Darshan instrumentation is added by specifying
# a profiling configuration file using environment variables.
# the RUNJOB command is the most complex part here. We use a script that submits
# a cobalt job, waits for its completion, and checks its return status
export DARSHAN_CC=mpicc
export DARSHAN_CXX=mpicxx
export DARSHAN_F77=mpif77
export DARSHAN_F90=mpif90
export MPICC_PROFILE=$DARSHAN_PATH/share/mpi-profile/darshan-bg-cc
export MPICXX_PROFILE=$DARSHAN_PATH/share/mpi-profile/darshan-bg-cxx
export MPIF90_PROFILE=$DARSHAN_PATH/share/mpi-profile/darshan-bg-f
export MPIF77_PROFILE=$DARSHAN_PATH/share/mpi-profile/darshan-bg-f
export DARSHAN_RUNJOB="bg-profile-conf/runjob.sh"
#!/bin/bash
# submit job and get job id
jobid=`qsub --env DARSHAN_LOGFILE=$DARSHAN_LOGFILE --mode c16 --proccount $DARSHAN_DEFAULT_NPROCS -A SSSPPg -t 10 -n 1 --output $DARSHAN_TMP/$$-tmp.out --error $DARSHAN_TMP/$$-tmp.err --debuglog $DARSHAN_TMP/$$-tmp.debuglog "$@"`
if [ $? -ne 0 ]; then
echo "Error: failed to qsub $@"
exit 1
fi
output="foo"
rc=0
# loop as long as qstat succeeds and shows information about job
while [ -n "$output" -a "$rc" -eq 0 ]; do
sleep 5
output=`qstat $jobid`
rc=$?
done
# look for return code
grep "exit code of 0" $DARSHAN_TMP/$$-tmp.debuglog >& /dev/null
if [ $? -ne 0 ]; then
exit 1
else
exit 0
fi
......@@ -45,14 +45,21 @@ fi
# set up environment for tests according to platform
source $DARSHAN_TESTDIR/$DARSHAN_PLATFORM/env.sh
failure_count=0
for i in `ls $DARSHAN_TESTDIR/test-cases/*.sh`; do
echo Running ${i}...
$i
if [ $? -ne 0 ]; then
echo "Error: failed to execute test case $i"
exit 1
failure_count=$((failure_count+1))
fi
echo Done.
done
exit 0
if [ "$failure_count" -eq 0 ]; then
exit 0
else
echo $failure_count tests failed
exit 1
fi
......@@ -14,7 +14,7 @@ if [ $? -ne 0 ]; then
fi
# execute
$DARSHAN_RUNJOB $DARSHAN_TMP/${PROG} -f $DARSHAN_TMP/${PROG}.tmp.dat
$DARSHAN_RUNJOB $DARSHAN_TMP/${PROG}
if [ $? -ne 0 ]; then
echo "Error: failed to execute ${PROG}" 1>&2
exit 1
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment