Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Elsa Gonsiorowski
codes
Commits
a0521d0e
Commit
a0521d0e
authored
Nov 08, 2016
by
Matthieu Dorier
Browse files
added support for Cortex, need to test
parent
42ae0345
Changes
7
Hide whitespace changes
Inline
Side-by-side
Makefile.am
View file @
a0521d0e
...
...
@@ -55,4 +55,17 @@ if USE_DUMPI
AM_CPPFLAGS
+=
${DUMPI_CFLAGS}
-DUSE_DUMPI
=
1
src_libcodes_la_SOURCES
+=
src/workload/methods/codes-dumpi-trace-nw-wrkld.c
LDADD
+=
${DUMPI_LIBS}
if
USE_CORTEX
if
USE_PYTHON
if
USE_BOOST
AM_CPPFLAGS
+=
${CORTEX_CFLAGS}
-DENABLE_CORTEX
=
1
LDADD
+=
${CORTEX_LIBS}
AM_CPPFLAGS
+=
${PYTHON_CFLAGS}
LDADD
+=
${PYTHON_LIBS}
AM_CPPFLAGS
+=
${BOOST_CFLAGS}
LDADD
+=
${BOOST_LIBS}
endif
endif
endif
endif
codes/codes-workload.h
View file @
a0521d0e
...
...
@@ -69,6 +69,10 @@ struct recorder_params
struct
dumpi_trace_params
{
char
file_name
[
MAX_NAME_LENGTH_WKLD
];
int
num_net_traces
;
#ifdef ENABLE_CORTEX
char
cortex_script
[
MAX_NAME_LENGTH_WKLD
];
char
cortex_class
[
MAX_NAME_LENGTH_WKLD
];
#endif
};
struct
checkpoint_wrkld_params
...
...
configure.ac
View file @
a0521d0e
...
...
@@ -121,6 +121,51 @@ else
AM_CONDITIONAL(USE_DUMPI, false)
fi
# check for Cortex
AC_ARG_WITH([cortex],[AS_HELP_STRING([--with-cortex@<:@=DIR@:>@],
[location of Cortex installation])])
if test "x${with_cortex}" != "x" ; then
AC_CHECK_FILES([${with_cortex}/lib/libcortex.a ${with_cortex}/lib/libcortex-python.a],
AM_CONDITIONAL(USE_CORTEX, true),
AC_MSG_ERROR(Could not find Cortex libraries libcortex.a and/or libcortex-python.a))
CORTEX_CFLAGS="-I${with_cortex}/include"
CORTEX_LIBS="-L${with_cortex}/lib/ -lcortex-python -lcortex -lstdc++"
AC_SUBST(CORTEX_LIBS)
AC_SUBST(CORTEX_CFLAGS)
else
AM_CONDITIONAL(USE_CORTEX, false)
fi
# check for Python
AC_ARG_WITH([python],[AS_HELP_STRING([--with-python@<:@=DIR@:>@],
[location of Python 2.7 installation])])
if test "x${with_python}" != "x" ; then
AC_CHECK_FILE([${with_python}/lib/libpython2.7.a],
AM_CONDITIONAL(USE_PYTHON, true),
AC_MSG_ERROR(Could not find Python library))
PYTHON_CFLAGS="-I${with_python}/include"
PYTHON_LIBS="-L${with_python}/lib/ -lpython2.7"
AC_SUBST(PYTHON_LIBS)
AC_SUBST(PYTHON_CFLAGS)
else
AM_CONDITIONAL(USE_PYTHON, false)
fi
# check for Boost Python
AC_ARG_WITH([boost],[AS_HELP_STRING([--with-boost@<:@=DIR@:>@],
[location of Boost Python installation])])
if test "x${with_boost}" != "x" ; then
AC_CHECK_FILE([${with_boost}/lib/libboost_python.a],
AM_CONDITIONAL(USE_BOOST, true),
AC_MSG_ERROR(Could not find Boost Python library libboost_python.a))
BOOST_CFLAGS="-I${with_boost}/include"
BOOST_LIBS="-L${with_boost}/lib -lboost_python"
AC_SUBST(BOOST_LIBS)
AC_SUBST(BOOST_CFLAGS)
else
AM_CONDITIONAL(USE_BOOST, false)
fi
dnl ======================================================================
dnl Try harder to be valgrind safe
dnl ======================================================================
...
...
maint/codes.pc.in
View file @
a0521d0e
...
...
@@ -8,11 +8,17 @@ darshan_libs=@DARSHAN_LIBS@
darshan_cflags=@DARSHAN_CFLAGS@
dumpi_cflags=@DUMPI_CFLAGS@
dumpi_libs=@DUMPI_LIBS@
cortex_cflags=@CORTEX_CFLAGS@
cortex_libs=@CORTEX_LIBS@
python_cflags=@PYTHON_CFLAGS@
python_libs=@PYTHON_LIBS@
boost_cflags=@BOOST_CFLAGS@
boost_libs=@BOOST_LIBS@
Name: codes-base
Description: Base functionality for CODES storage simulation
Version: @PACKAGE_VERSION@
URL: http://trac.mcs.anl.gov/projects/CODES
Requires:
Libs: -L${libdir} -lcodes ${ross_libs} ${darshan_libs} ${dumpi_libs}
Cflags: -I${includedir} ${ross_cflags} ${darshan_cflags} ${dumpi_cflags}
Libs: -L${libdir} -lcodes ${ross_libs} ${darshan_libs} ${dumpi_libs}
${cortex_libs}
Cflags: -I${includedir} ${ross_cflags} ${darshan_cflags} ${dumpi_cflags}
${cortex_cflags}
src/network-workloads/model-net-mpi-replay.c
View file @
a0521d0e
...
...
@@ -51,6 +51,12 @@ struct codes_jobmap_ctx *jobmap_ctx;
struct
codes_jobmap_params_list
jobmap_p
;
/* Xu's additions end */
/* Variables for Cortex Support */
/* Matthieu's additions start */
static
char
cortex_file
[
512
];
static
char
cortex_class
[
512
];
/* Matthieu's additions end */
typedef
struct
nw_state
nw_state
;
typedef
struct
nw_message
nw_message
;
typedef
int32_t
dumpi_req_id
;
...
...
@@ -1137,7 +1143,7 @@ void nw_test_init(nw_state* s, tw_lp* lp)
return
;
}
if
(
strcmp
(
workload_type
,
"dumpi"
)
==
0
){
if
(
strcmp
(
workload_type
,
"dumpi"
)
==
0
||
strcmp
(
workload_type
,
"cortex"
)
==
0
){
strcpy
(
params_d
.
file_name
,
file_name_of_job
[
lid
.
job
]);
params_d
.
num_net_traces
=
num_traces_of_job
[
lid
.
job
];
params
=
(
char
*
)
&
params_d
;
...
...
@@ -1147,6 +1153,13 @@ void nw_test_init(nw_state* s, tw_lp* lp)
// s->app_id, s->local_rank, lp->gid);
}
#ifdef ENABLE_CORTEX
if
(
strcmp
(
workload_type
,
"cortex"
)
==
0
)
{
strcpy
(
params_d
.
cortex_script
,
cortex_file
);
strcpy
(
params_d
.
cortex_class
,
cortex_class
);
}
#endif
wrkld_id
=
codes_workload_load
(
"dumpi-trace-workload"
,
params
,
s
->
app_id
,
s
->
local_rank
);
double
overhead
;
...
...
@@ -1515,6 +1528,10 @@ const tw_optdef app_opt [] =
TWOPT_CHAR
(
"lp-io-dir"
,
lp_io_dir
,
"Where to place io output (unspecified -> no output"
),
TWOPT_UINT
(
"lp-io-use-suffix"
,
lp_io_use_suffix
,
"Whether to append uniq suffix to lp-io directory (default 0)"
),
TWOPT_CHAR
(
"offset_file"
,
offset_file
,
"offset file name"
),
#ifdef ENABLE_CORTEX
TWOPT_CHAR
(
"cortex-file"
,
cortex_file
,
"Python file (without .py) containing the CoRtEx translation class"
),
TWOPT_CHAR
(
"cortex-class"
,
cortex_class
,
"Python class implementing the CoRtEx translator"
),
#endif
TWOPT_END
()
};
...
...
@@ -1555,8 +1572,14 @@ int main( int argc, char** argv )
{
if
(
tw_ismaster
())
printf
(
"Usage: mpirun -np n ./modelnet-mpi-replay --sync=1/3"
" --workload_type=dumpi --workload_conf_file=prefix-workload-file-name"
" --alloc_file=alloc-file-name -- config-file-name
\n
"
" --workload_type=dumpi"
" --workload_conf_file=prefix-workload-file-name"
" --alloc_file=alloc-file-name"
#ifdef ENABLE_CORTEX
" --cortex-file=cortex-file-name"
" --cortex-class=cortex-class-name"
#endif
" -- config-file-name
\n
"
"See model-net/doc/README.dragonfly.txt and model-net/doc/README.torus.txt"
" for instructions on how to run the models with network traces "
);
tw_end
();
...
...
src/networks/model-net/dragonfly-custom.C
View file @
a0521d0e
...
...
@@ -473,7 +473,7 @@ static void dragonfly_read_config(const char * anno, dragonfly_param *params){
int
myRank
;
MPI_Comm_rank
(
MPI_COMM_WORLD
,
&
myRank
);
rc
=
configuration_get_value_int
(
&
config
,
"PARAMS"
,
"local_vc_size"
,
anno
,
&
p
->
local_vc_size
);
int
rc
=
configuration_get_value_int
(
&
config
,
"PARAMS"
,
"local_vc_size"
,
anno
,
&
p
->
local_vc_size
);
if
(
rc
)
{
p
->
local_vc_size
=
1024
;
fprintf
(
stderr
,
"Buffer size of local channels not specified, setting to %d
\n
"
,
p
->
local_vc_size
);
...
...
src/workload/methods/codes-dumpi-trace-nw-wrkld.c
View file @
a0521d0e
...
...
@@ -15,6 +15,20 @@
#include "codes/codes-workload.h"
#include "codes/quickhash.h"
#if ENABLE_CORTEX
#include <cortex/cortex.h>
#include <cortex/cortex-python.h>
#define PROFILE_TYPE cortex_dumpi_profile*
#define UNDUMPI_OPEN cortex_undumpi_open
#define DUMPI_START_STREAM_READ cortex_dumpi_start_stream_read
#define UNDUMPI_CLOSE cortex_undumpi_close
#else
#define PROFILE_TYPE dumpi_profile*
#define UNDUMPI_OPEN undumpi_open
#define DUMPI_START_STREAM_READ dumpi_start_stream_read
#define UNDUMPI_CLOSE undumpi_close
#endif
#define MAX_LENGTH 512
#define MAX_OPERATIONS 32768
#define DUMPI_IGNORE_DELAY 100
...
...
@@ -550,7 +564,6 @@ static int hash_rank_compare(void *key, struct qhash_head *link)
tmp
=
qhash_entry
(
link
,
rank_mpi_context
,
hash_link
);
if
(
tmp
->
my_rank
==
in
->
rank
&&
tmp
->
my_app_id
==
in
->
app
)
return
1
;
return
0
;
}
...
...
@@ -558,7 +571,10 @@ int dumpi_trace_nw_workload_load(const char* params, int app_id, int rank)
{
libundumpi_callbacks
callbacks
;
libundumpi_cbpair
callarr
[
DUMPI_END_OF_STREAM
];
dumpi_profile
*
profile
;
#ifdef ENABLE_CORTEX
libundumpi_cbpair
transarr
[
DUMPI_END_OF_STREAM
];
#endif
PROFILE_TYPE
profile
;
dumpi_trace_params
*
dumpi_params
=
(
dumpi_trace_params
*
)
params
;
char
file_name
[
MAX_LENGTH
];
...
...
@@ -589,7 +605,11 @@ int dumpi_trace_nw_workload_load(const char* params, int app_id, int rank)
sprintf
(
file_name
,
"%s0%d.bin"
,
dumpi_params
->
file_name
,
rank
);
else
sprintf
(
file_name
,
"%s%d.bin"
,
dumpi_params
->
file_name
,
rank
);
#ifdef ENABLE_CORTEX
profile
=
cortex_undumpi_open
(
file_name
,
app_id
,
dumpi_params
->
num_net_traces
,
rank
);
#else
profile
=
undumpi_open
(
file_name
);
#endif
if
(
NULL
==
profile
)
{
printf
(
"Error: unable to open DUMPI trace: %s"
,
file_name
);
exit
(
-
1
);
...
...
@@ -597,6 +617,9 @@ int dumpi_trace_nw_workload_load(const char* params, int app_id, int rank)
memset
(
&
callbacks
,
0
,
sizeof
(
libundumpi_callbacks
));
memset
(
&
callarr
,
0
,
sizeof
(
libundumpi_cbpair
)
*
DUMPI_END_OF_STREAM
);
#ifdef ENABLE_CORTEX
memset
(
&
transarr
,
0
,
sizeof
(
libundumpi_cbpair
)
*
DUMPI_END_OF_STREAM
);
#endif
/* handle MPI function calls */
callbacks
.
on_init
=
handleDUMPIInit
;
...
...
@@ -662,19 +685,31 @@ int dumpi_trace_nw_workload_load(const char* params, int app_id, int rank)
libundumpi_populate_callbacks
(
&
callbacks
,
callarr
);
dumpi_start_stream_read
(
profile
);
#ifdef ENABLE_CORTEX
libundumpi_populate_callbacks
(
CORTEX_PYTHON_TRANSLATION
,
transarr
);
#endif
DUMPI_START_STREAM_READ
(
profile
);
//dumpi_header* trace_header = undumpi_read_header(profile);
//dumpi_free_header(trace_header);
#ifdef ENABLE_CORTEX
cortex_python_set_module
(
dumpi_params
->
cortex_script
,
dumpi_params
->
cortex_class
);
#endif
int
finalize_reached
=
0
;
int
active
=
1
;
int
num_calls
=
0
;
while
(
active
&&
!
finalize_reached
)
{
num_calls
++
;
#ifdef ENABLE_CORTEX
active
=
cortex_undumpi_read_single_call
(
profile
,
callarr
,
transarr
,
(
void
*
)
my_ctx
,
&
finalize_reached
);
#else
active
=
undumpi_read_single_call
(
profile
,
callarr
,
(
void
*
)
my_ctx
,
&
finalize_reached
);
#endif
}
undumpi_close
(
profile
);
UNDUMPI_CLOSE
(
profile
);
dumpi_finalize_mpi_op_data
(
my_ctx
->
dumpi_mpi_array
);
/* add this rank context to hash table */
rank_mpi_compare
cmp
;
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment