Optimised global reduce for labelList

This commit is contained in:
Hrvoje Jasak 2016-10-08 19:05:14 +01:00
parent b7b1edcf95
commit 52e99137eb
3 changed files with 106 additions and 170 deletions

View file

@ -250,8 +250,8 @@ void Foam::reduce
void Foam::reduce
(
UList<label>& Value,
const sumOp<UList<label> >& bop,
List<label>& Value,
const sumOp<List<label> >& bop,
const int tag,
const label comm
)
@ -264,7 +264,89 @@ void Foam::reduce
error::printStack(Pout);
}
allReduce(*Value.begin(), Value.size(), MPI_LABEL, MPI_SUM, bop, tag, comm);
// Make a copy of the Value in the send buffer so that Value can be
// used for receive. HJ, 8/Oct/2016
labelList send(Value);
int MPISize = Value.size();
MPI_Allreduce
(
send.begin(),
Value.begin(),
MPISize,
MPI_LABEL,
MPI_SUM,
PstreamGlobals::MPICommunicators_[comm]
);
}
void Foam::reduce
(
List<label>& Value,
const minOp<List<label> >& bop,
const int tag,
const label comm
)
{
if (Pstream::warnComm != -1 && comm != Pstream::warnComm)
{
Pout<< "** reducing:" << Value << " with comm:" << comm
<< " warnComm:" << Pstream::warnComm
<< endl;
error::printStack(Pout);
}
// Make a copy of the Value in the send buffer so that Value can be
// used for receive. HJ, 8/Oct/2016
labelList send(Value);
int MPISize = Value.size();
MPI_Allreduce
(
send.begin(),
Value.begin(),
MPISize,
MPI_LABEL,
MPI_MIN,
PstreamGlobals::MPICommunicators_[comm]
);
}
void Foam::reduce
(
List<label>& Value,
const maxOp<List<label> >& bop,
const int tag,
const label comm
)
{
if (Pstream::warnComm != -1 && comm != Pstream::warnComm)
{
Pout<< "** reducing:" << Value << " with comm:" << comm
<< " warnComm:" << Pstream::warnComm
<< endl;
error::printStack(Pout);
}
// Make a copy of the Value in the send buffer so that Value can be
// used for receive. HJ, 8/Oct/2016
labelList send(Value);
int MPISize = Value.size();
MPI_Allreduce
(
send.begin(),
Value.begin(),
MPISize,
MPI_LABEL,
MPI_MAX,
PstreamGlobals::MPICommunicators_[comm]
);
}

View file

@ -239,28 +239,29 @@ void reduce
// Insist there are specialisations for the common reductions of
// lists of labels
// lists of labels. Note: template function specialisation must be the
// exact match on argument types. HJ, 8/Oct/2016
void reduce
(
UList<label>& Value,
const sumOp<UList<label> >& bop,
List<label>& Value,
const sumOp<List<label> >& bop,
const int tag = Pstream::msgType(),
const label comm = Pstream::worldComm
);
void reduce
(
UList<label>& Value,
const minOp<UList<label> >& bop,
List<label>& Value,
const minOp<List<label> >& bop,
const int tag = Pstream::msgType(),
const label comm = Pstream::worldComm
);
void reduce
(
UList<label>& Value,
const maxOp<UList<label> >& bop,
List<label>& Value,
const maxOp<List<label> >& bop,
const int tag = Pstream::msgType(),
const label comm = Pstream::worldComm
);

View file

@ -44,169 +44,22 @@ void Foam::allReduce
return;
}
// if (Pstream::nProcs(comm) <= Pstream::nProcsSimpleSum)
// {
// if (Pstream::master(comm))
// {
// for
// (
// int slave = Pstream::firstSlave();
// slave <= Pstream::lastSlave(comm);
// slave++
// )
// {
// Type value;
// Removed send-received loop: use Allreduce instead.
// HJ, 8/Oct/2016
// if
// (
// MPI_Recv
// (
// &value,
// MPICount,
// MPIType,
// slave,
// tag,
// PstreamGlobals::MPICommunicators_[comm],
// MPI_STATUS_IGNORE
// )
// )
// {
// FatalErrorIn
// (
// "void Foam::allReduce\n"
// "(\n"
// " Type&,\n"
// " int,\n"
// " MPI_Datatype,\n"
// " MPI_Op,\n"
// " const BinaryOp&,\n"
// " const int\n"
// ")\n"
// ) << "MPI_Recv failed"
// << Foam::abort(FatalError);
// }
Type sum;
// Value = bop(Value, value);
// }
// }
// else
// {
// if
// (
// MPI_Send
// (
// &Value,
// MPICount,
// MPIType,
// Pstream::masterNo(),
// tag,
// PstreamGlobals::MPICommunicators_[comm]
// )
// )
// {
// FatalErrorIn
// (
// "void Foam::allReduce\n"
// "(\n"
// " Type&,\n"
// " int,\n"
// " MPI_Datatype,\n"
// " MPI_Op,\n"
// " const BinaryOp&,\n"
// " const int\n"
// ")\n"
// ) << "MPI_Send failed"
// << Foam::abort(FatalError);
// }
// }
MPI_Allreduce
(
&Value,
&sum,
MPICount,
MPIType,
MPIOp,
PstreamGlobals::MPICommunicators_[comm]
);
// if (Pstream::master(comm))
// {
// for
// (
// int slave = Pstream::firstSlave();
// slave <= Pstream::lastSlave(comm);
// slave++
// )
// {
// if
// (
// MPI_Send
// (
// &Value,
// MPICount,
// MPIType,
// slave,
// tag,
// PstreamGlobals::MPICommunicators_[comm]
// )
// )
// {
// FatalErrorIn
// (
// "void Foam::allReduce\n"
// "(\n"
// " Type&,\n"
// " int,\n"
// " MPI_Datatype,\n"
// " MPI_Op,\n"
// " const BinaryOp&,\n"
// " const int\n"
// ")\n"
// ) << "MPI_Send failed"
// << Foam::abort(FatalError);
// }
// }
// }
// else
// {
// if
// (
// MPI_Recv
// (
// &Value,
// MPICount,
// MPIType,
// Pstream::masterNo(),
// tag,
// PstreamGlobals::MPICommunicators_[comm],
// MPI_STATUS_IGNORE
// )
// )
// {
// FatalErrorIn
// (
// "void Foam::allReduce\n"
// "(\n"
// " Type&,\n"
// " int,\n"
// " MPI_Datatype,\n"
// " MPI_Op,\n"
// " const BinaryOp&,\n"
// " const int\n"
// ")\n"
// ) << "MPI_Recv failed"
// << Foam::abort(FatalError);
// }
// }
// }
// else
// {
Type sum;
MPI_Allreduce
(
&Value,
&sum,
MPICount,
MPIType,
MPIOp,
PstreamGlobals::MPICommunicators_[comm]
);
Value = sum;
// }
Value = sum;
}