ESI Comms optimisation: send to neighbours in reverse order

This commit is contained in:
Hrvoje Jasak 2016-09-19 15:42:09 +01:00
parent e73480637b
commit 358c854945
4 changed files with 60 additions and 24 deletions

View file

@ -67,22 +67,43 @@ const Foam::NamedEnum<Foam::Pstream::commsTypes, 3>
void Foam::Pstream::setParRun(const label nProcs)
{
parRun_ = true;
// Redo worldComm communicator (created at static initialisation)
freeCommunicator(Pstream::worldComm);
label comm = allocateCommunicator(-1, identity(nProcs), true);
if (comm != Pstream::worldComm)
if (nProcs == 0)
{
FatalErrorIn("Pstream::setParRun(const label)")
<< "problem : comm:" << comm
<< " Pstream::worldComm:" << Pstream::worldComm
<< Foam::exit(FatalError);
}
parRun_ = false;
freeCommunicator(Pstream::worldComm);
Pout.prefix() = '[' + name(myProcNo()) + "] ";
Perr.prefix() = '[' + name(myProcNo()) + "] ";
label comm = allocateCommunicator(-1, labelList(1, label(0)), false);
if (comm != Pstream::worldComm)
{
FatalErrorIn("Pstream::setParRun(const label)")
<< "problem : comm:" << comm
<< " Pstream::worldComm:" << Pstream::worldComm
<< Foam::exit(FatalError);
}
Pout.prefix() = "";
Perr.prefix() = "";
}
else
{
parRun_ = true;
// Redo worldComm communicator (created at static initialisation)
freeCommunicator(Pstream::worldComm);
label comm = allocateCommunicator(-1, identity(nProcs), true);
if (comm != Pstream::worldComm)
{
FatalErrorIn("Pstream::setParRun(const label)")
<< "problem : comm:" << comm
<< " Pstream::worldComm:" << Pstream::worldComm
<< Foam::exit(FatalError);
}
Pout.prefix() = '[' + name(myProcNo()) + "] ";
Perr.prefix() = '[' + name(myProcNo()) + "] ";
}
}

View file

@ -222,8 +222,11 @@ void Pstream::combineScatter
}
}
// Send to my downstairs neighbours
forAll (myComm.below(), belowI)
// Send to my downstairs neighbours. Note reverse order (compared to
// receiving). This is to make sure to send to the critical path
// (only when using a tree schedule!) first.
// This is ESI Comms optimisation, v16.06. HJ, 19/Sep/2016
forAllReverse (myComm.below(), belowI)
{
label belowID = myComm.below()[belowI];
@ -461,8 +464,11 @@ void Pstream::listCombineScatter
}
}
// Send to my downstairs neighbours
forAll (myComm.below(), belowI)
// Send to my downstairs neighbours. Note reverse order (compared to
// receiving). This is to make sure to send to the critical path
// (only when using a tree schedule!) first.
// This is ESI Comms optimisation, v16.06. HJ, 19/Sep/2016
forAllReverse (myComm.below(), belowI)
{
label belowID = myComm.below()[belowI];
@ -662,8 +668,11 @@ void Pstream::mapCombineScatter
}
}
// Send to my downstairs neighbours
forAll (myComm.below(), belowI)
// Send to my downstairs neighbours. Note reverse order (compared to
// receiving). This is to make sure to send to the critical path
// (only when using a tree schedule!) first.
// This is ESI Comms optimisation, v16.06. HJ, 19/Sep/2016
forAllReverse (myComm.below(), belowI)
{
label belowID = myComm.below()[belowI];

View file

@ -183,8 +183,11 @@ void Pstream::scatter
}
}
// Send to my downstairs neighbours
forAll (myComm.below(), belowI)
// Send to my downstairs neighbours. Note reverse order (compared to
// receiving). This is to make sure to send to the critical path
// (only when using a tree schedule!) first.
// This is ESI Comms optimisation, v16.06. HJ, 19/Sep/2016
forAllReverse (myComm.below(), belowI)
{
if (contiguous<T>())
{

View file

@ -290,8 +290,11 @@ void Pstream::scatterList
}
}
// Send to my downstairs neighbours
forAll (myComm.below(), belowI)
// Send to my downstairs neighbours. Note reverse order (compared to
// receiving). This is to make sure to send to the critical path
// (only when using a tree schedule!) first.
// This is ESI Comms optimisation, v16.06. HJ, 19/Sep/2016
forAllReverse (myComm.below(), belowI)
{
label belowID = myComm.below()[belowI];
const labelList& notBelowLeaves = comms[belowID].allNotBelow();