Replace tabs by 4 spaces in applications/solvers/solidMechanics

This commit is contained in:
Henrik Rusche 2015-05-17 17:11:30 +02:00
parent 82a0e1e7df
commit b46695ce1e
123 changed files with 1883 additions and 1883 deletions

View file

@ -16,9 +16,9 @@ if(iCorr == 0)
scalar sumMagB = gSum(magSqr(b));
if(sumMagB < SMALL)
{
//Warning << "Aitken under-relaxation: denominator less then SMALL"
// << endl;
sumMagB += SMALL;
//Warning << "Aitken under-relaxation: denominator less then SMALL"
// << endl;
sumMagB += SMALL;
}
aitkenTheta = -aitkenTheta*

View file

@ -10,10 +10,10 @@ if(divSigmaExpMethod == "standard")
{
divSigmaExp = fvc::div
(
muf*(mesh.Sf() & fvc::interpolate(gradU.T()))
+ lambdaf*(mesh.Sf() & I*fvc::interpolate(tr(gradU)))
- (muf + lambdaf)*(mesh.Sf() & fvc::interpolate(gradU))
);
muf*(mesh.Sf() & fvc::interpolate(gradU.T()))
+ lambdaf*(mesh.Sf() & I*fvc::interpolate(tr(gradU)))
- (muf + lambdaf)*(mesh.Sf() & fvc::interpolate(gradU))
);
}
else if(divSigmaExpMethod == "decompose")
{
@ -24,13 +24,13 @@ if(divSigmaExpMethod == "standard")
divSigmaExp = fvc::div
(
mesh.magSf()
*(
- (muf + lambdaf)*(snGradU&(I - n*n))
+ lambdaf*tr(shearGradU&(I - n*n))*n
+ muf*(shearGradU&n)
)
);
mesh.magSf()
*(
- (muf + lambdaf)*(snGradU&(I - n*n))
+ lambdaf*tr(shearGradU&(I - n*n))*n
+ muf*(shearGradU&n)
)
);
}
else if(divSigmaExpMethod == "expLaplacian")
{
@ -38,10 +38,10 @@ if(divSigmaExpMethod == "standard")
- fvc::laplacian(mu + lambda, U, "laplacian(DU,U)")
+ fvc::div
(
mu*gradU.T()
+ lambda*(I*tr(gradU)),
"div(sigma)"
);
mu*gradU.T()
+ lambda*(I*tr(gradU)),
"div(sigma)"
);
}
else
{

View file

@ -6,14 +6,14 @@
forAll(mesh.boundary(), patchi)
{
netForce +=
sum(
mesh.Sf().boundaryField()[patchi]
&
(
2*mu.boundaryField()[patchi]*symm(gradU.boundaryField()[patchi])
+ lambda*tr(gradU.boundaryField()[patchi])*I
)
);
sum(
mesh.Sf().boundaryField()[patchi]
&
(
2*mu.boundaryField()[patchi]*symm(gradU.boundaryField()[patchi])
+ lambda*tr(gradU.boundaryField()[patchi])*I
)
);
}
forceResidual = mag(netForce);
}

View file

@ -28,14 +28,14 @@
// forAll(traction.boundaryField(), patchi)
// {
// if (mesh.boundary()[patchi].type() == "cohesive")
// {
// forAll(traction.boundaryField()[patchi], facei)
// {
// Pout << "face " << facei << " with traction magnitude "
// << mag(traction.boundaryField()[patchi][facei])/1e6 << " MPa and traction "
// << traction.boundaryField()[patchi][facei]/1e6 << " MPa" << endl;
// }
// }
// if (mesh.boundary()[patchi].type() == "cohesive")
// {
// forAll(traction.boundaryField()[patchi], facei)
// {
// Pout << "face " << facei << " with traction magnitude "
// << mag(traction.boundaryField()[patchi][facei])/1e6 << " MPa and traction "
// << traction.boundaryField()[patchi][facei]/1e6 << " MPa" << endl;
// }
// }
// }
}

View file

@ -7,40 +7,40 @@
{
if (isA<solidCohesiveFvPatchVectorField>(U.boundaryField()[patchI]))
{
cohesivePatchID = patchI;
cohesivePatchUPtr =
&refCast<solidCohesiveFvPatchVectorField>
(
U.boundaryField()[cohesivePatchID]
);
break;
cohesivePatchID = patchI;
cohesivePatchUPtr =
&refCast<solidCohesiveFvPatchVectorField>
(
U.boundaryField()[cohesivePatchID]
);
break;
}
else if (isA<solidCohesiveFixedModeMixFvPatchVectorField>(U.boundaryField()[patchI]))
{
cohesivePatchID = patchI;
cohesivePatchUFixedModePtr =
&refCast<solidCohesiveFixedModeMixFvPatchVectorField>
(
U.boundaryField()[cohesivePatchID]
);
break;
}
{
cohesivePatchID = patchI;
cohesivePatchUFixedModePtr =
&refCast<solidCohesiveFixedModeMixFvPatchVectorField>
(
U.boundaryField()[cohesivePatchID]
);
break;
}
}
if(cohesivePatchID == -1)
{
FatalErrorIn(args.executable())
<< "Can't find cohesiveLawFvPatch" << nl
<< "One of the boundary patches in " << U.name() << ".boundaryField() "
<< "should be of type " << solidCohesiveFvPatchVectorField::typeName
<< "or " << solidCohesiveFixedModeMixFvPatchVectorField::typeName
<< abort(FatalError);
<< "One of the boundary patches in " << U.name() << ".boundaryField() "
<< "should be of type " << solidCohesiveFvPatchVectorField::typeName
<< "or " << solidCohesiveFixedModeMixFvPatchVectorField::typeName
<< abort(FatalError);
}
// solidCohesiveFvPatchVectorField& cohesivePatchU =
// refCast<solidCohesiveFvPatchVectorField>
// (
// U.boundaryField()[cohesivePatchID]
// refCast<solidCohesiveFvPatchVectorField>
// (
// U.boundaryField()[cohesivePatchID]
// );
// philipc: I have moved cohesive stuff to constitutiveModel
@ -66,82 +66,82 @@
// limit crack to specified boxes
{
const dictionary& stressControl =
mesh.solutionDict().subDict("solidMechanics");
mesh.solutionDict().subDict("solidMechanics");
List<boundBox> userBoxes(stressControl.lookup("crackLimitingBoxes"));
const surfaceVectorField& Cf = mesh.Cf();
forAll(cohesiveZone.internalField(), faceI)
{
bool faceInsideBox = false;
{
bool faceInsideBox = false;
forAll(userBoxes, boxi)
{
if(userBoxes[boxi].contains(Cf.internalField()[faceI])) faceInsideBox = true;
}
forAll(userBoxes, boxi)
{
if(userBoxes[boxi].contains(Cf.internalField()[faceI])) faceInsideBox = true;
}
if(faceInsideBox)
{
cohesiveZone.internalField()[faceI] = 1.0;
}
}
if(faceInsideBox)
{
cohesiveZone.internalField()[faceI] = 1.0;
}
}
forAll(cohesiveZone.boundaryField(), patchI)
{
// cracks may go along proc boundaries
if(mesh.boundaryMesh()[patchI].type() == processorPolyPatch::typeName)
{
forAll(cohesiveZone.boundaryField()[patchI], faceI)
{
bool faceInsideBox = false;
{
// cracks may go along proc boundaries
if(mesh.boundaryMesh()[patchI].type() == processorPolyPatch::typeName)
{
forAll(cohesiveZone.boundaryField()[patchI], faceI)
{
bool faceInsideBox = false;
forAll(userBoxes, boxi)
{
if(userBoxes[boxi].contains(Cf.boundaryField()[patchI][faceI])) faceInsideBox = true;
}
forAll(userBoxes, boxi)
{
if(userBoxes[boxi].contains(Cf.boundaryField()[patchI][faceI])) faceInsideBox = true;
}
if(faceInsideBox)
{
cohesiveZone.boundaryField()[patchI][faceI] = 1.0;
}
}
}
}
if(faceInsideBox)
{
cohesiveZone.boundaryField()[patchI][faceI] = 1.0;
}
}
}
}
Info << "\nThere are " << gSum(cohesiveZone.internalField()) << " potential internal crack faces" << nl << endl;
Info << "\nThere are " << gSum(cohesiveZone.boundaryField())/2 << " potential coupled boundary crack faces" << nl << endl;
// write field for visualisation
volScalarField cohesiveZoneVol
(
IOobject
(
"cohesiveZoneVol",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
mesh,
dimensionedScalar("zero", dimless, 0.0)
);
(
IOobject
(
"cohesiveZoneVol",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
mesh,
dimensionedScalar("zero", dimless, 0.0)
);
forAll(cohesiveZone.internalField(), facei)
{
if(cohesiveZone.internalField()[facei])
{
cohesiveZoneVol.internalField()[mesh.owner()[facei]] = 1.0;
cohesiveZoneVol.internalField()[mesh.neighbour()[facei]] = 1.0;
}
}
{
if(cohesiveZone.internalField()[facei])
{
cohesiveZoneVol.internalField()[mesh.owner()[facei]] = 1.0;
cohesiveZoneVol.internalField()[mesh.neighbour()[facei]] = 1.0;
}
}
forAll(cohesiveZone.boundaryField(), patchi)
{
forAll(cohesiveZone.boundaryField()[patchi], facei)
{
if(cohesiveZone.boundaryField()[patchi][facei] > 0.0)
{
cohesiveZoneVol.boundaryField()[patchi][facei] = 1.0;
}
}
}
{
forAll(cohesiveZone.boundaryField()[patchi], facei)
{
if(cohesiveZone.boundaryField()[patchi][facei] > 0.0)
{
cohesiveZoneVol.boundaryField()[patchi][facei] = 1.0;
}
}
}
Info << "Writing cohesiveZone field" << endl;
cohesiveZoneVol.write();
}

View file

@ -35,8 +35,8 @@
IOobject::NO_READ,
IOobject::NO_WRITE
),
mesh,
dimensionedVector("zero", dimless, vector::zero)
mesh,
dimensionedVector("zero", dimless, vector::zero)
);
volVectorField V
@ -122,7 +122,7 @@
IOobject::NO_WRITE
),
mesh,
dimensionedVector("zero", dimLength, vector::zero)
dimensionedVector("zero", dimLength, vector::zero)
);
// aitken relaxation factor
scalar aitkenInitialRes = 1.0;
@ -140,5 +140,5 @@ scalar aitkenTheta = 0.1;
// IOobject::AUTO_WRITE
// ),
// mesh,
// dimensionedVector("zero", dimless, vector::zero)
// dimensionedVector("zero", dimless, vector::zero)
// );

View file

@ -4,14 +4,14 @@ label historyPatchID = mesh.boundaryMesh().findPatchID(historyPatchName);
if(historyPatchID == -1)
{
Warning << "history patch " << historyPatchName
<< " not found. Force-displacement will not be written"
<< endl;
<< " not found. Force-displacement will not be written"
<< endl;
}
else if(Pstream::master())
{
Info << "Force-displacement for patch " << historyPatchName
<< " will be written to forceDisp.dat"
<< endl;
<< " will be written to forceDisp.dat"
<< endl;
word hisDirName("history");
mkDir(hisDirName);
filePtr = new OFstream(hisDirName/historyPatchName+"forceDisp.dat");

View file

@ -4,6 +4,6 @@ Info << "Selecting divSigmaExp calculation method " << divSigmaExpMethod << end
if(divSigmaExpMethod != "standard" && divSigmaExpMethod != "surface" && divSigmaExpMethod != "decompose" && divSigmaExpMethod != "laplacian")
{
FatalError << "divSigmaExp method " << divSigmaExpMethod << " not found!" << nl
<< "valid methods are:\nstandard\nsurface\ndecompose\nlaplacian"
<< exit(FatalError);
<< "valid methods are:\nstandard\nsurface\ndecompose\nlaplacian"
<< exit(FatalError);
}

View file

@ -15,16 +15,16 @@ if (dynamicTimeStep && runTime.value() > dynamicTimeStepActivation)
scalar newDeltaT = deltaTmin;
if (newDeltaT/runTime.deltaT().value() < 0.5)
{
{
newDeltaT = 0.5*runTime.deltaT().value();
Info << "Reducing time step" << nl;
}
}
runTime.setDeltaT(newDeltaT);
}
Pout << "Current time step size: "
<< runTime.deltaT().value() << " s" << endl;
<< runTime.deltaT().value() << " s" << endl;
scalar maxDT = runTime.deltaT().value();

View file

@ -100,7 +100,7 @@ nCoupledFacesToBreak = 0;
}
// Check if maximum is present on more then one processors
label procID = Pstream::nProcs();
label procID = Pstream::nProcs();
if (procHasFaceToBreak)
{
procID = Pstream::myProcNo();
@ -125,46 +125,46 @@ nCoupledFacesToBreak = 0;
if (mesh.boundary()[patchI].coupled())
{
// scalarField pEffTraction =
// cohesiveZone.boundaryField()[patchI] *
// mag(traction.boundaryField()[patchI]);
// scalarField pEffTractionFraction = pEffTraction/sigmaMax.boundaryField()[patchI];
// cohesiveZone.boundaryField()[patchI] *
// mag(traction.boundaryField()[patchI]);
// scalarField pEffTractionFraction = pEffTraction/sigmaMax.boundaryField()[patchI];
scalarField pNormalTraction =
cohesiveZone.boundaryField()[patchI] *
( n.boundaryField()[patchI] & traction.boundaryField()[patchI] );
pNormalTraction = max(pNormalTraction, scalar(0)); // only consider tensile tractions
scalarField pShearTraction =
cohesiveZone.boundaryField()[patchI] *
mag( (I - Foam::sqr(n.boundaryField()[patchI])) & traction.boundaryField()[patchI] );
scalarField pNormalTraction =
cohesiveZone.boundaryField()[patchI] *
( n.boundaryField()[patchI] & traction.boundaryField()[patchI] );
pNormalTraction = max(pNormalTraction, scalar(0)); // only consider tensile tractions
scalarField pShearTraction =
cohesiveZone.boundaryField()[patchI] *
mag( (I - Foam::sqr(n.boundaryField()[patchI])) & traction.boundaryField()[patchI] );
// the traction fraction is monitored to decide which faces to break:
// ie (tN/tNC)^2 + (tS/tSC)^2 >1 to crack a face
const scalarField& pSigmaMax = sigmaMax.boundaryField()[patchI];
const scalarField& pTauMax = tauMax.boundaryField()[patchI];
// the traction fraction is monitored to decide which faces to break:
// ie (tN/tNC)^2 + (tS/tSC)^2 >1 to crack a face
const scalarField& pSigmaMax = sigmaMax.boundaryField()[patchI];
const scalarField& pTauMax = tauMax.boundaryField()[patchI];
scalarField pEffTractionFraction(pNormalTraction.size(), 0.0);
if(cohesivePatchUPtr)
{
pEffTractionFraction =
(pNormalTraction/pSigmaMax)*(pNormalTraction/pSigmaMax) + (pShearTraction/pTauMax)*(pShearTraction/pTauMax);
}
else
{
// solidCohesiveFixedModeMix only uses sigmaMax
pEffTractionFraction =
(pNormalTraction/pSigmaMax)*(pNormalTraction/pSigmaMax) + (pShearTraction/pSigmaMax)*(pShearTraction/pSigmaMax);
}
scalarField pEffTractionFraction(pNormalTraction.size(), 0.0);
if(cohesivePatchUPtr)
{
pEffTractionFraction =
(pNormalTraction/pSigmaMax)*(pNormalTraction/pSigmaMax) + (pShearTraction/pTauMax)*(pShearTraction/pTauMax);
}
else
{
// solidCohesiveFixedModeMix only uses sigmaMax
pEffTractionFraction =
(pNormalTraction/pSigmaMax)*(pNormalTraction/pSigmaMax) + (pShearTraction/pSigmaMax)*(pShearTraction/pSigmaMax);
}
label start = mesh.boundaryMesh()[patchI].start();
forAll(pEffTractionFraction, faceI)
{
if (pEffTractionFraction[faceI] > maxEffTractionFraction)
{
maxEffTractionFraction = pEffTractionFraction[faceI];
if (pEffTractionFraction[faceI] > maxEffTractionFraction)
{
maxEffTractionFraction = pEffTractionFraction[faceI];
}
if (pEffTractionFraction[faceI] > 1.0)
if (pEffTractionFraction[faceI] > 1.0)
{
coupledFacesToBreakList.insert(start + faceI);
coupledFacesToBreakEffTractionFractionList.insert
@ -260,7 +260,7 @@ nCoupledFacesToBreak = 0;
if (nCoupledFacesToBreak)
{
label patchID =
mesh.boundaryMesh().whichPatch(coupledFaceToBreakIndex);
mesh.boundaryMesh().whichPatch(coupledFaceToBreakIndex);
label start = mesh.boundaryMesh()[patchID].start();
label localIndex = coupledFaceToBreakIndex - start;
@ -325,31 +325,31 @@ nCoupledFacesToBreak = 0;
faceToBreakNormal = n.internalField()[faceToBreakIndex];
// Scale broken face traction
faceToBreakSigmaMax = sigmaMaxI[faceToBreakIndex];
faceToBreakTauMax = tauMaxI[faceToBreakIndex];
scalar normalTrac = faceToBreakNormal & faceToBreakTraction;
normalTrac = max(normalTrac, 0.0);
scalar shearTrac = mag( (I - sqr(faceToBreakNormal)) & faceToBreakTraction );
scalar scaleFactor = 1;
if(cohesivePatchUPtr)
{
scaleFactor =
::sqrt(1 / (
(normalTrac/faceToBreakSigmaMax)*(normalTrac/faceToBreakSigmaMax)
+ (shearTrac/faceToBreakTauMax)*(shearTrac/faceToBreakTauMax)
) );
}
else
{
// solidCohesiveFixedModeMix only uses sigmaMax
scaleFactor =
::sqrt(1 / (
(normalTrac/faceToBreakSigmaMax)*(normalTrac/faceToBreakSigmaMax)
+ (shearTrac/faceToBreakSigmaMax)*(shearTrac/faceToBreakSigmaMax)
) );
}
faceToBreakSigmaMax = sigmaMaxI[faceToBreakIndex];
faceToBreakTauMax = tauMaxI[faceToBreakIndex];
scalar normalTrac = faceToBreakNormal & faceToBreakTraction;
normalTrac = max(normalTrac, 0.0);
scalar shearTrac = mag( (I - sqr(faceToBreakNormal)) & faceToBreakTraction );
scalar scaleFactor = 1;
if(cohesivePatchUPtr)
{
scaleFactor =
::sqrt(1 / (
(normalTrac/faceToBreakSigmaMax)*(normalTrac/faceToBreakSigmaMax)
+ (shearTrac/faceToBreakTauMax)*(shearTrac/faceToBreakTauMax)
) );
}
else
{
// solidCohesiveFixedModeMix only uses sigmaMax
scaleFactor =
::sqrt(1 / (
(normalTrac/faceToBreakSigmaMax)*(normalTrac/faceToBreakSigmaMax)
+ (shearTrac/faceToBreakSigmaMax)*(shearTrac/faceToBreakSigmaMax)
) );
}
faceToBreakTraction *= scaleFactor;
faceToBreakTraction *= scaleFactor;
topoChange = true;
}
@ -364,29 +364,29 @@ nCoupledFacesToBreak = 0;
faceToBreakNormal = n.boundaryField()[patchID][localIndex];
// Scale broken face traction
faceToBreakSigmaMax = sigmaMax.boundaryField()[patchID][localIndex];
faceToBreakTauMax = tauMax.boundaryField()[patchID][localIndex];
scalar normalTrac = faceToBreakNormal & faceToBreakTraction;
normalTrac = max(normalTrac, 0.0);
scalar shearTrac = mag( (I - sqr(faceToBreakNormal)) & faceToBreakTraction );
scalar scaleFactor = 1;
if(cohesivePatchUPtr)
{
scaleFactor =
::sqrt(1 / (
(normalTrac/faceToBreakSigmaMax)*(normalTrac/faceToBreakSigmaMax)
+ (shearTrac/faceToBreakTauMax)*(shearTrac/faceToBreakTauMax)
) );
}
else
{
// solidCohesiveFixedModeMix only uses sigmaMax
scaleFactor =
::sqrt(1 / (
(normalTrac/faceToBreakSigmaMax)*(normalTrac/faceToBreakSigmaMax)
+ (shearTrac/faceToBreakSigmaMax)*(shearTrac/faceToBreakSigmaMax)
) );
}
faceToBreakSigmaMax = sigmaMax.boundaryField()[patchID][localIndex];
faceToBreakTauMax = tauMax.boundaryField()[patchID][localIndex];
scalar normalTrac = faceToBreakNormal & faceToBreakTraction;
normalTrac = max(normalTrac, 0.0);
scalar shearTrac = mag( (I - sqr(faceToBreakNormal)) & faceToBreakTraction );
scalar scaleFactor = 1;
if(cohesivePatchUPtr)
{
scaleFactor =
::sqrt(1 / (
(normalTrac/faceToBreakSigmaMax)*(normalTrac/faceToBreakSigmaMax)
+ (shearTrac/faceToBreakTauMax)*(shearTrac/faceToBreakTauMax)
) );
}
else
{
// solidCohesiveFixedModeMix only uses sigmaMax
scaleFactor =
::sqrt(1 / (
(normalTrac/faceToBreakSigmaMax)*(normalTrac/faceToBreakSigmaMax)
+ (shearTrac/faceToBreakSigmaMax)*(shearTrac/faceToBreakSigmaMax)
) );
}
faceToBreakTraction *= scaleFactor;
@ -422,20 +422,20 @@ nCoupledFacesToBreak = 0;
muf = fvc::interpolate(mu);
lambdaf = fvc::interpolate(lambda);
// we need to modify propertiess after cracking otherwise momentum equation is wrong
// but solidInterface seems to hold some information about old mesh
// so we will delete it and make another
// we could probably add a public clearout function
// create new solidInterface
//Pout << "Creating new solidInterface" << endl;
//delete solidInterfacePtr;
//solidInterfacePtr = new solidInterface(mesh, rheology);
// delete demand driven data as the mesh has changed
if(rheology.solidInterfaceActive())
{
rheology.solInterface().clearOut();
solidInterfacePtr->modifyProperties(muf, lambdaf);
}
// we need to modify propertiess after cracking otherwise momentum equation is wrong
// but solidInterface seems to hold some information about old mesh
// so we will delete it and make another
// we could probably add a public clearout function
// create new solidInterface
//Pout << "Creating new solidInterface" << endl;
//delete solidInterfacePtr;
//solidInterfacePtr = new solidInterface(mesh, rheology);
// delete demand driven data as the mesh has changed
if(rheology.solidInterfaceActive())
{
rheology.solInterface().clearOut();
solidInterfacePtr->modifyProperties(muf, lambdaf);
}
// Local crack displacement
vectorField UpI =
@ -447,21 +447,21 @@ nCoupledFacesToBreak = 0;
vectorField globalUpI = mesh.globalCrackField(UpI);
vectorField globalOldUpI = mesh.globalCrackField(oldUpI);
// mu and lambda field on new crack faces must be updated
// mu and lambda field on new crack faces must be updated
scalarField muPI = mu.boundaryField()[cohesivePatchID].patchInternalField();
scalarField lambdaPI = lambda.boundaryField()[cohesivePatchID].patchInternalField();
scalarField globalMuPI = mesh.globalCrackField(muPI);
scalarField globalLambdaPI = mesh.globalCrackField(lambdaPI);
// cohesivePatchU.size()
int cohesivePatchSize(cohesivePatchUPtr ? cohesivePatchUPtr->size() : cohesivePatchUFixedModePtr->size());
// cohesivePatchU.size()
int cohesivePatchSize(cohesivePatchUPtr ? cohesivePatchUPtr->size() : cohesivePatchUFixedModePtr->size());
// Initialise U for new cohesive face
const labelList& gcfa = mesh.globalCrackFaceAddressing();
label globalIndex = mesh.localCrackStart();
// for (label i=0; i<cohesivePatchU.size(); i++)
for (label i=0; i<cohesivePatchSize; i++)
{
{
label oldFaceIndex = faceMap[start+i];
// If new face
@ -480,10 +480,10 @@ nCoupledFacesToBreak = 0;
+ globalOldUpI[gcfa[globalIndex]]
);
// initialise mu and lambda on new faces
// set new face value to value of internal cell
muf.boundaryField()[cohesivePatchID][i] = globalMuPI[globalIndex];
lambdaf.boundaryField()[cohesivePatchID][i] = globalLambdaPI[globalIndex];
// initialise mu and lambda on new faces
// set new face value to value of internal cell
muf.boundaryField()[cohesivePatchID][i] = globalMuPI[globalIndex];
lambdaf.boundaryField()[cohesivePatchID][i] = globalLambdaPI[globalIndex];
globalIndex++;
}
@ -494,24 +494,24 @@ nCoupledFacesToBreak = 0;
}
// we must calculate grad using interface
// U at the interface has not been calculated yet as interface.correct()
// has not been called yet
// not really a problem as gradU is correct in second outer iteration
// as long as this does not cause convergence problems for the first iterations.
// we should be able to calculate the interface displacements without
// having to call interface.correct()
// todo: add calculateInterfaceU() function
// interface grad uses Gauss, we need least squares
//gradU = solidInterfacePtr->grad(U);
// U at the interface has not been calculated yet as interface.correct()
// has not been called yet
// not really a problem as gradU is correct in second outer iteration
// as long as this does not cause convergence problems for the first iterations.
// we should be able to calculate the interface displacements without
// having to call interface.correct()
// todo: add calculateInterfaceU() function
// interface grad uses Gauss, we need least squares
//gradU = solidInterfacePtr->grad(U);
gradU = fvc::grad(U); // leastSquaresSolidInterface grad scheme
//snGradU = fvc::snGrad(U);
# include "calculateTraction.H"
//if (nFacesToBreak || nCoupledFacesToBreak) mesh.write(); traction.write();
//if (nFacesToBreak || nCoupledFacesToBreak) mesh.write(); traction.write();
// Initialise initiation traction for new cohesive patch face
// for (label i=0; i<cohesivePatchU.size(); i++)
for (label i=0; i<cohesivePatchSize; i++)
for (label i=0; i<cohesivePatchSize; i++)
{
label oldFaceIndex = faceMap[start+i];
@ -529,46 +529,46 @@ nCoupledFacesToBreak = 0;
if ((n0&faceToBreakNormal) > SMALL)
{
traction.boundaryField()[cohesivePatchID][i] =
traction.boundaryField()[cohesivePatchID][i] =
faceToBreakTraction;
traction.oldTime().boundaryField()[cohesivePatchID][i] =
faceToBreakTraction;
traction.oldTime().boundaryField()[cohesivePatchID][i] =
faceToBreakTraction;
if(cohesivePatchUPtr)
{
cohesivePatchUPtr->traction()[i] = faceToBreakTraction;
}
else
{
cohesivePatchUFixedModePtr->traction()[i] = faceToBreakTraction;
cohesivePatchUFixedModePtr->initiationTraction()[i] = faceToBreakTraction;
}
if(cohesivePatchUPtr)
{
cohesivePatchUPtr->traction()[i] = faceToBreakTraction;
}
else
{
cohesivePatchUFixedModePtr->traction()[i] = faceToBreakTraction;
cohesivePatchUFixedModePtr->initiationTraction()[i] = faceToBreakTraction;
}
}
else
{
traction.boundaryField()[cohesivePatchID][i] =
traction.boundaryField()[cohesivePatchID][i] =
-faceToBreakTraction;
traction.oldTime().boundaryField()[cohesivePatchID][i] =
-faceToBreakTraction;
traction.oldTime().boundaryField()[cohesivePatchID][i] =
-faceToBreakTraction;
//cohesivePatchU.traction()[i] = -faceToBreakTraction;
if(cohesivePatchUPtr)
{
cohesivePatchUPtr->traction()[i] = -faceToBreakTraction;
}
else
{
cohesivePatchUFixedModePtr->traction()[i] = -faceToBreakTraction;
cohesivePatchUFixedModePtr->initiationTraction()[i] = -faceToBreakTraction;
}
//cohesivePatchU.traction()[i] = -faceToBreakTraction;
if(cohesivePatchUPtr)
{
cohesivePatchUPtr->traction()[i] = -faceToBreakTraction;
}
else
{
cohesivePatchUFixedModePtr->traction()[i] = -faceToBreakTraction;
cohesivePatchUFixedModePtr->initiationTraction()[i] = -faceToBreakTraction;
}
}
}
}
// hmmnn we only need a reference for very small groups of cells
// turn off for now
//# include "updateReference.H"
// hmmnn we only need a reference for very small groups of cells
// turn off for now
//# include "updateReference.H"
}
}

View file

@ -13,11 +13,11 @@
// with a processor boundary
//if (U.boundaryField()[patchI].fixesValue())
if (
U.boundaryField()[patchI].fixesValue()
||
mesh.boundaryMesh()[patchI].type()
== processorPolyPatch::typeName
)
U.boundaryField()[patchI].fixesValue()
||
mesh.boundaryMesh()[patchI].type()
== processorPolyPatch::typeName
)
{
const unallocLabelList& curFaceCells =
mesh.boundary()[patchI].faceCells();

View file

@ -88,12 +88,12 @@ if (runTime.outputTime() || topoChange)
(
IOobject
(
"damageAndCracks",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"damageAndCracks",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
mesh,
dimensionedScalar("zero", dimless, 0.0),
calculatedFvPatchVectorField::typeName
@ -102,12 +102,12 @@ if (runTime.outputTime() || topoChange)
(
IOobject
(
"GI",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"GI",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
mesh,
dimensionedScalar("zero", dimless, 0.0),
calculatedFvPatchVectorField::typeName
@ -116,30 +116,30 @@ if (runTime.outputTime() || topoChange)
(
IOobject
(
"GII",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"GII",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
mesh,
dimensionedScalar("zero", dimless, 0.0),
calculatedFvPatchVectorField::typeName
);
forAll(U.boundaryField(), patchi)
{
// if(U.boundaryField()[patchi].type() == cohesiveLawMultiMatFvPatchVectorField::typeName)
if(U.boundaryField()[patchi].type() == solidCohesiveFvPatchVectorField::typeName)
{
// cohesiveLawMultiMatFvPatchVectorField& Upatch =
// refCast<cohesiveLawMultiMatFvPatchVectorField>(U.boundaryField()[patchi]);
solidCohesiveFvPatchVectorField& Upatch =
refCast<solidCohesiveFvPatchVectorField>(U.boundaryField()[patchi]);
// if(U.boundaryField()[patchi].type() == cohesiveLawMultiMatFvPatchVectorField::typeName)
if(U.boundaryField()[patchi].type() == solidCohesiveFvPatchVectorField::typeName)
{
// cohesiveLawMultiMatFvPatchVectorField& Upatch =
// refCast<cohesiveLawMultiMatFvPatchVectorField>(U.boundaryField()[patchi]);
solidCohesiveFvPatchVectorField& Upatch =
refCast<solidCohesiveFvPatchVectorField>(U.boundaryField()[patchi]);
GI.boundaryField()[patchi] = Upatch.GI();
GII.boundaryField()[patchi] = Upatch.GII();
damageAndCracks.boundaryField()[patchi] = Upatch.crackingAndDamage();
}
GI.boundaryField()[patchi] = Upatch.GI();
GII.boundaryField()[patchi] = Upatch.GII();
damageAndCracks.boundaryField()[patchi] = Upatch.crackingAndDamage();
}
}
volScalarField GTotal("GTotal", GI + GII);
GTotal.write();

View file

@ -2,7 +2,7 @@
if(historyPatchID != -1)
{
Info << "Writing disp and force of patch "<<historyPatchName<<" to file"
<< endl;
<< endl;
//- for small strain or moving mesh
vector force = gSum(mesh.boundary()[historyPatchID].Sf() & sigma.boundaryField()[historyPatchID]);
@ -19,17 +19,17 @@ if(historyPatchID != -1)
//- be dotted with the surface normal to give the actual traction/force
//- you cannot just take the component of the sigma tensor
//scalar forcePatchIntegrateMethod = gSum(
// mesh.magSf().boundaryField()[historyPatchID]
// *sigma.boundaryField()[historyPatchID].component(symmTensor::XY)
// );
// mesh.magSf().boundaryField()[historyPatchID]
// *sigma.boundaryField()[historyPatchID].component(symmTensor::XY)
// );
vector avDisp = gAverage(U.boundaryField()[historyPatchID]);
//- write to file
if(Pstream::master())
{
OFstream& forceDispFile = *filePtr;
forceDispFile << avDisp.x() << " " << avDisp.y() << " " << avDisp.z() << " "
<< force.x() << " " << force.y() << " " << force.z() << endl;
OFstream& forceDispFile = *filePtr;
forceDispFile << avDisp.x() << " " << avDisp.y() << " " << avDisp.z() << " "
<< force.x() << " " << force.y() << " " << force.z() << endl;
}
}

View file

@ -16,9 +16,9 @@ if(iCorr == 0)
scalar sumMagB = gSum(magSqr(b));
if(sumMagB < SMALL)
{
//Warning << "Aitken under-relaxation: denominator less then SMALL"
// << endl;
sumMagB += SMALL;
//Warning << "Aitken under-relaxation: denominator less then SMALL"
// << endl;
sumMagB += SMALL;
}
aitkenTheta = -aitkenTheta*

View file

@ -10,10 +10,10 @@ if(divDSigmaExpMethod == "standard")
{
divDSigmaExp = fvc::div
(
muf*(mesh.Sf() & fvc::interpolate(gradDU.T()))
+ lambdaf*(mesh.Sf() & I*fvc::interpolate(tr(gradDU)))
- (muf + lambdaf)*(mesh.Sf() & fvc::interpolate(gradDU))
);
muf*(mesh.Sf() & fvc::interpolate(gradDU.T()))
+ lambdaf*(mesh.Sf() & I*fvc::interpolate(tr(gradDU)))
- (muf + lambdaf)*(mesh.Sf() & fvc::interpolate(gradDU))
);
}
else if(divDSigmaExpMethod == "decompose")
{
@ -24,13 +24,13 @@ if(divDSigmaExpMethod == "standard")
divDSigmaExp = fvc::div
(
mesh.magSf()
*(
- (muf + lambdaf)*(snGradDU&(I - n*n))
+ lambdaf*tr(shearGradDU&(I - n*n))*n
+ muf*(shearGradDU&n)
)
);
mesh.magSf()
*(
- (muf + lambdaf)*(snGradDU&(I - n*n))
+ lambdaf*tr(shearGradDU&(I - n*n))*n
+ muf*(shearGradDU&n)
)
);
}
else if(divDSigmaExpMethod == "expLaplacian")
{
@ -38,10 +38,10 @@ if(divDSigmaExpMethod == "standard")
- fvc::laplacian(mu + lambda, DU, "laplacian(DDU,DU)")
+ fvc::div
(
mu*gradDU.T()
+ lambda*(I*tr(gradDU)),
"div(sigma)"
);
mu*gradDU.T()
+ lambda*(I*tr(gradDU)),
"div(sigma)"
);
}
else
{

View file

@ -6,14 +6,14 @@
forAll(mesh.boundary(), patchi)
{
netForce +=
sum(
mesh.Sf().boundaryField()[patchi]
&
(
2*mu.boundaryField()[patchi]*symm(gradU.boundaryField()[patchi])
+ lambda*tr(gradU.boundaryField()[patchi])*I
)
);
sum(
mesh.Sf().boundaryField()[patchi]
&
(
2*mu.boundaryField()[patchi]*symm(gradU.boundaryField()[patchi])
+ lambda*tr(gradU.boundaryField()[patchi])*I
)
);
}
forceResidual = mag(netForce);
}

View file

@ -28,14 +28,14 @@
// forAll(traction.boundaryField(), patchi)
// {
// if (mesh.boundary()[patchi].type() == "cohesive")
// {
// forAll(traction.boundaryField()[patchi], facei)
// {
// Pout << "face " << facei << " with traction magnitude "
// << mag(traction.boundaryField()[patchi][facei])/1e6 << " MPa and traction "
// << traction.boundaryField()[patchi][facei]/1e6 << " MPa" << endl;
// }
// }
// if (mesh.boundary()[patchi].type() == "cohesive")
// {
// forAll(traction.boundaryField()[patchi], facei)
// {
// Pout << "face " << facei << " with traction magnitude "
// << mag(traction.boundaryField()[patchi][facei])/1e6 << " MPa and traction "
// << traction.boundaryField()[patchi][facei]/1e6 << " MPa" << endl;
// }
// }
// }
}

View file

@ -7,40 +7,40 @@
{
if (isA<solidCohesiveFvPatchVectorField>(DU.boundaryField()[patchI]))
{
cohesivePatchID = patchI;
cohesivePatchDUPtr =
&refCast<solidCohesiveFvPatchVectorField>
(
DU.boundaryField()[cohesivePatchID]
);
break;
cohesivePatchID = patchI;
cohesivePatchDUPtr =
&refCast<solidCohesiveFvPatchVectorField>
(
DU.boundaryField()[cohesivePatchID]
);
break;
}
else if (isA<solidCohesiveFixedModeMixFvPatchVectorField>(DU.boundaryField()[patchI]))
{
cohesivePatchID = patchI;
cohesivePatchDUFixedModePtr =
&refCast<solidCohesiveFixedModeMixFvPatchVectorField>
(
DU.boundaryField()[cohesivePatchID]
);
break;
}
{
cohesivePatchID = patchI;
cohesivePatchDUFixedModePtr =
&refCast<solidCohesiveFixedModeMixFvPatchVectorField>
(
DU.boundaryField()[cohesivePatchID]
);
break;
}
}
if(cohesivePatchID == -1)
{
FatalErrorIn(args.executable())
<< "Can't find cohesiveLawFvPatch" << nl
<< "One of the boundary patches in " << DU.name() << ".boundaryField() "
<< "should be of type " << solidCohesiveFvPatchVectorField::typeName
<< "or " << solidCohesiveFixedModeMixFvPatchVectorField::typeName
<< abort(FatalError);
<< "One of the boundary patches in " << DU.name() << ".boundaryField() "
<< "should be of type " << solidCohesiveFvPatchVectorField::typeName
<< "or " << solidCohesiveFixedModeMixFvPatchVectorField::typeName
<< abort(FatalError);
}
// solidCohesiveFvPatchVectorField& cohesivePatchDU =
// refCast<solidCohesiveFvPatchVectorField>
// (
// DU.boundaryField()[cohesivePatchID]
// refCast<solidCohesiveFvPatchVectorField>
// (
// DU.boundaryField()[cohesivePatchID]
// );
// philipc: I have moved cohesive stuff to constitutiveModel
@ -66,64 +66,64 @@
// limit crack to specified boxes
{
const dictionary& stressControl =
mesh.solutionDict().subDict("solidMechanics");
mesh.solutionDict().subDict("solidMechanics");
List<boundBox> userBoxes(stressControl.lookup("crackLimitingBoxes"));
const surfaceVectorField& Cf = mesh.Cf();
//int numPossibleCrackFaces = 0;
forAll(cohesiveZone.internalField(), faceI)
{
bool faceInsideBox = false;
{
bool faceInsideBox = false;
forAll(userBoxes, boxi)
{
if(userBoxes[boxi].contains(Cf.internalField()[faceI])) faceInsideBox = true;
}
forAll(userBoxes, boxi)
{
if(userBoxes[boxi].contains(Cf.internalField()[faceI])) faceInsideBox = true;
}
if(faceInsideBox)
{
cohesiveZone.internalField()[faceI] = 1.0;
//numPossibleCrackFaces++;
}
}
if(faceInsideBox)
{
cohesiveZone.internalField()[faceI] = 1.0;
//numPossibleCrackFaces++;
}
}
//reduce(numPossibleCrackFaces, sumOp<int>());
forAll(cohesiveZone.boundaryField(), patchI)
{
// cracks may go along proc boundaries
if(mesh.boundaryMesh()[patchI].type() == processorPolyPatch::typeName)
{
forAll(cohesiveZone.boundaryField()[patchI], faceI)
{
bool faceInsideBox = false;
{
// cracks may go along proc boundaries
if(mesh.boundaryMesh()[patchI].type() == processorPolyPatch::typeName)
{
forAll(cohesiveZone.boundaryField()[patchI], faceI)
{
bool faceInsideBox = false;
forAll(userBoxes, boxi)
{
if(userBoxes[boxi].contains(Cf.boundaryField()[patchI][faceI])) faceInsideBox = true;
}
forAll(userBoxes, boxi)
{
if(userBoxes[boxi].contains(Cf.boundaryField()[patchI][faceI])) faceInsideBox = true;
}
if(faceInsideBox)
{
cohesiveZone.boundaryField()[patchI][faceI] = 1.0;
}
}
if(faceInsideBox)
{
cohesiveZone.boundaryField()[patchI][faceI] = 1.0;
}
}
// numPossibleCrackFaces += int(sum(cohesiveZone.boundaryField()[patchI]));
// philipc multiMat cracks not working on proc boundaries yet... disable for now
// found the problem: solidInterface needs to know about mesh changes so
// I make a new one each time there is a crack
// int numProcFaces = int(sum(cohesiveZone.boundaryField()[patchI]));
// if(numProcFaces > 0)
// {
// cohesiveZone.boundaryField()[patchI] = 0.0;
// Warning << "Processor boundary cracking is "
// << "disabled because it is not working yet for multi-materials." << nl
// << "There are " << numProcFaces << " possible cracks "
// << "faces on processor boundary " << mesh.boundary()[patchI].name()
// << ", which are not allowed to crack." << endl;
// }
}
}
// numPossibleCrackFaces += int(sum(cohesiveZone.boundaryField()[patchI]));
// philipc multiMat cracks not working on proc boundaries yet... disable for now
// found the problem: solidInterface needs to know about mesh changes so
// I make a new one each time there is a crack
// int numProcFaces = int(sum(cohesiveZone.boundaryField()[patchI]));
// if(numProcFaces > 0)
// {
// cohesiveZone.boundaryField()[patchI] = 0.0;
// Warning << "Processor boundary cracking is "
// << "disabled because it is not working yet for multi-materials." << nl
// << "There are " << numProcFaces << " possible cracks "
// << "faces on processor boundary " << mesh.boundary()[patchI].name()
// << ", which are not allowed to crack." << endl;
// }
}
}
// Info << "\nNumber of possible cracking faces is " << numPossibleCrackFaces << endl;
Info << "\nThere are " << gSum(cohesiveZone.internalField()) << " potential internal crack faces" << nl << endl;
@ -131,36 +131,36 @@
// write field for visualisation
volScalarField cohesiveZoneVol
(
IOobject
(
"cohesiveZoneVol",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
mesh,
dimensionedScalar("zero", dimless, 0.0)
);
(
IOobject
(
"cohesiveZoneVol",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
mesh,
dimensionedScalar("zero", dimless, 0.0)
);
forAll(cohesiveZone.internalField(), facei)
{
if(cohesiveZone.internalField()[facei])
{
cohesiveZoneVol.internalField()[mesh.owner()[facei]] = 1.0;
cohesiveZoneVol.internalField()[mesh.neighbour()[facei]] = 1.0;
}
}
{
if(cohesiveZone.internalField()[facei])
{
cohesiveZoneVol.internalField()[mesh.owner()[facei]] = 1.0;
cohesiveZoneVol.internalField()[mesh.neighbour()[facei]] = 1.0;
}
}
forAll(cohesiveZone.boundaryField(), patchi)
{
forAll(cohesiveZone.boundaryField()[patchi], facei)
{
if(cohesiveZone.boundaryField()[patchi][facei])
{
cohesiveZoneVol.boundaryField()[patchi][facei] = 1.0;
}
}
}
{
forAll(cohesiveZone.boundaryField()[patchi], facei)
{
if(cohesiveZone.boundaryField()[patchi][facei])
{
cohesiveZoneVol.boundaryField()[patchi][facei] = 1.0;
}
}
}
Info << "Writing cohesiveZone field" << endl;
cohesiveZoneVol.write();
}

View file

@ -36,8 +36,8 @@
IOobject::NO_READ,
IOobject::NO_WRITE
),
mesh,
dimensionedVector("zero", dimless, vector::zero)
mesh,
dimensionedVector("zero", dimless, vector::zero)
);
Info<< "Creating field U\n" << endl;
@ -59,12 +59,12 @@
(
IOobject
(
"DEpsilon",
runTime.timeName(),
mesh,
IOobject::READ_IF_PRESENT,
IOobject::AUTO_WRITE
),
"DEpsilon",
runTime.timeName(),
mesh,
IOobject::READ_IF_PRESENT,
IOobject::AUTO_WRITE
),
mesh,
dimensionedSymmTensor("zero", dimless, symmTensor::zero)
);
@ -155,7 +155,7 @@ constitutiveModel rheology(sigma, DU);
IOobject::NO_WRITE
),
mesh,
dimensionedVector("zero", dimLength, vector::zero)
dimensionedVector("zero", dimLength, vector::zero)
);
// aitken relaxation factor
scalar aitkenInitialRes = 1.0;

View file

@ -4,14 +4,14 @@ label historyPatchID = mesh.boundaryMesh().findPatchID(historyPatchName);
if(historyPatchID == -1)
{
Warning << "history patch " << historyPatchName
<< " not found. Force-displacement will not be written"
<< endl;
<< " not found. Force-displacement will not be written"
<< endl;
}
else if(Pstream::master())
{
Info << "Force-displacement for patch " << historyPatchName
<< " will be written to forceDisp.dat"
<< endl;
<< " will be written to forceDisp.dat"
<< endl;
filePtr = new OFstream("forceDisp.dat");
OFstream& forceDispFile = *filePtr;
forceDispFile << "#Disp(mm)\tForce(N)" << endl;

View file

@ -4,6 +4,6 @@ Info << "Selecting divDSigmaExp calculation method " << divDSigmaExpMethod << e
if(divDSigmaExpMethod != "standard" && divDSigmaExpMethod != "surface" && divDSigmaExpMethod != "decompose" && divDSigmaExpMethod != "laplacian")
{
FatalError << "divDSigmaExp method " << divDSigmaExpMethod << " not found!" << nl
<< "valid methods are:\nstandard\nsurface\ndecompose\nlaplacian"
<< exit(FatalError);
<< "valid methods are:\nstandard\nsurface\ndecompose\nlaplacian"
<< exit(FatalError);
}

View file

@ -15,16 +15,16 @@ if (dynamicTimeStep)
scalar newDeltaT = deltaTmin;
if (newDeltaT/runTime.deltaT().value() < 0.5)
{
{
newDeltaT = 0.5*runTime.deltaT().value();
Info << "Reducing time step" << nl;
}
}
runTime.setDeltaT(newDeltaT);
}
Pout << "Current time step size: "
<< runTime.deltaT().value() << " s" << endl;
<< runTime.deltaT().value() << " s" << endl;
scalar maxDT = runTime.deltaT().value();

View file

@ -138,39 +138,39 @@ nCoupledFacesToBreak = 0;
if (mesh.boundary()[patchI].coupled())
{
// scalarField pEffTraction =
// cohesiveZone.boundaryField()[patchI] *
// mag(traction.boundaryField()[patchI]);
// scalarField pEffTractionFraction = pEffTraction/sigmaMax.boundaryField()[patchI];
// cohesiveZone.boundaryField()[patchI] *
// mag(traction.boundaryField()[patchI]);
// scalarField pEffTractionFraction = pEffTraction/sigmaMax.boundaryField()[patchI];
scalarField pNormalTraction =
scalarField pNormalTraction =
cohesiveZone.boundaryField()[patchI]*
( n.boundaryField()[patchI] & traction.boundaryField()[patchI] );
// only consider tensile tractions
pNormalTraction = max(pNormalTraction, scalar(0));
pNormalTraction = max(pNormalTraction, scalar(0));
scalarField pShearTraction =
cohesiveZone.boundaryField()[patchI] *
mag( (I - Foam::sqr(n.boundaryField()[patchI])) & traction.boundaryField()[patchI] );
scalarField pShearTraction =
cohesiveZone.boundaryField()[patchI] *
mag( (I - Foam::sqr(n.boundaryField()[patchI])) & traction.boundaryField()[patchI] );
// the traction fraction is monitored to decide which faces to break:
// ie (tN/tNC)^2 + (tS/tSC)^2 >1 to crack a face
const scalarField& pSigmaMax = sigmaMax.boundaryField()[patchI];
const scalarField& pTauMax = tauMax.boundaryField()[patchI];
// the traction fraction is monitored to decide which faces to break:
// ie (tN/tNC)^2 + (tS/tSC)^2 >1 to crack a face
const scalarField& pSigmaMax = sigmaMax.boundaryField()[patchI];
const scalarField& pTauMax = tauMax.boundaryField()[patchI];
// scalarField pEffTractionFraction =
// (pNormalTraction/pSigmaMax)*(pNormalTraction/pSigmaMax) + (pShearTraction/pTauMax)*(pShearTraction/pTauMax);
scalarField pEffTractionFraction(pNormalTraction.size(), 0.0);
if(cohesivePatchDUPtr)
// scalarField pEffTractionFraction =
// (pNormalTraction/pSigmaMax)*(pNormalTraction/pSigmaMax) + (pShearTraction/pTauMax)*(pShearTraction/pTauMax);
scalarField pEffTractionFraction(pNormalTraction.size(), 0.0);
if(cohesivePatchDUPtr)
{
pEffTractionFraction =
(pNormalTraction/pSigmaMax)*(pNormalTraction/pSigmaMax) + (pShearTraction/pTauMax)*(pShearTraction/pTauMax);
pEffTractionFraction =
(pNormalTraction/pSigmaMax)*(pNormalTraction/pSigmaMax) + (pShearTraction/pTauMax)*(pShearTraction/pTauMax);
}
else
else
{
// solidCohesiveFixedModeMix only uses sigmaMax
pEffTractionFraction =
(pNormalTraction/pSigmaMax)*(pNormalTraction/pSigmaMax) + (pShearTraction/pSigmaMax)*(pShearTraction/pSigmaMax);
// solidCohesiveFixedModeMix only uses sigmaMax
pEffTractionFraction =
(pNormalTraction/pSigmaMax)*(pNormalTraction/pSigmaMax) + (pShearTraction/pSigmaMax)*(pShearTraction/pSigmaMax);
}
label start = mesh.boundaryMesh()[patchI].start();
@ -182,9 +182,9 @@ nCoupledFacesToBreak = 0;
maxEffTractionFraction = pEffTractionFraction[faceI];
}
if (pEffTractionFraction[faceI] > 1.0)
if (pEffTractionFraction[faceI] > 1.0)
{
//Pout << "coupled face to break " << faceI << endl;
//Pout << "coupled face to break " << faceI << endl;
coupledFacesToBreakList.insert(start + faceI);
coupledFacesToBreakEffTractionFractionList.insert
(
@ -347,31 +347,31 @@ nCoupledFacesToBreak = 0;
faceToBreakNormal = n.internalField()[faceToBreakIndex];
// Scale broken face traction
// The scale factor is derived by solving the following eqn for alpha:
// (alpha*tN/tNC)^2 + (alpha*tS/tSC)^2 = 1
faceToBreakSigmaMax = sigmaMaxI[faceToBreakIndex];
faceToBreakTauMax = tauMaxI[faceToBreakIndex];
scalar normalTrac = faceToBreakNormal & faceToBreakTraction;
normalTrac = max(normalTrac, 0.0);
scalar shearTrac = mag( (I - sqr(faceToBreakNormal)) & faceToBreakTraction );
scalar scaleFactor = 1;
if(cohesivePatchDUPtr)
{
scaleFactor =
::sqrt(1 / (
(normalTrac/faceToBreakSigmaMax)*(normalTrac/faceToBreakSigmaMax)
+ (shearTrac/faceToBreakTauMax)*(shearTrac/faceToBreakTauMax)
) );
}
else
{
// solidCohesiveFixedModeMix only uses sigmaMax
scaleFactor =
::sqrt(1 / (
(normalTrac/faceToBreakSigmaMax)*(normalTrac/faceToBreakSigmaMax)
+ (shearTrac/faceToBreakSigmaMax)*(shearTrac/faceToBreakSigmaMax)
) );
}
// The scale factor is derived by solving the following eqn for alpha:
// (alpha*tN/tNC)^2 + (alpha*tS/tSC)^2 = 1
faceToBreakSigmaMax = sigmaMaxI[faceToBreakIndex];
faceToBreakTauMax = tauMaxI[faceToBreakIndex];
scalar normalTrac = faceToBreakNormal & faceToBreakTraction;
normalTrac = max(normalTrac, 0.0);
scalar shearTrac = mag( (I - sqr(faceToBreakNormal)) & faceToBreakTraction );
scalar scaleFactor = 1;
if(cohesivePatchDUPtr)
{
scaleFactor =
::sqrt(1 / (
(normalTrac/faceToBreakSigmaMax)*(normalTrac/faceToBreakSigmaMax)
+ (shearTrac/faceToBreakTauMax)*(shearTrac/faceToBreakTauMax)
) );
}
else
{
// solidCohesiveFixedModeMix only uses sigmaMax
scaleFactor =
::sqrt(1 / (
(normalTrac/faceToBreakSigmaMax)*(normalTrac/faceToBreakSigmaMax)
+ (shearTrac/faceToBreakSigmaMax)*(shearTrac/faceToBreakSigmaMax)
) );
}
faceToBreakTraction *= scaleFactor;
@ -388,15 +388,15 @@ nCoupledFacesToBreak = 0;
faceToBreakNormal = n.boundaryField()[patchID][localIndex];
// Scale broken face traction
faceToBreakSigmaMax = sigmaMax.boundaryField()[patchID][localIndex];
faceToBreakTauMax = tauMax.boundaryField()[patchID][localIndex];
scalar normalTrac = faceToBreakNormal & faceToBreakTraction;
normalTrac = max(normalTrac, scalar(0));
scalar shearTrac = mag( (I - sqr(faceToBreakNormal)) & faceToBreakTraction );
scalar scaleFactor = 1;
if(cohesivePatchDUPtr)
faceToBreakSigmaMax = sigmaMax.boundaryField()[patchID][localIndex];
faceToBreakTauMax = tauMax.boundaryField()[patchID][localIndex];
scalar normalTrac = faceToBreakNormal & faceToBreakTraction;
normalTrac = max(normalTrac, scalar(0));
scalar shearTrac = mag( (I - sqr(faceToBreakNormal)) & faceToBreakTraction );
scalar scaleFactor = 1;
if(cohesivePatchDUPtr)
{
scaleFactor =
scaleFactor =
Foam::sqrt
(
1/
@ -407,10 +407,10 @@ nCoupledFacesToBreak = 0;
)
);
}
else
else
{
// solidCohesiveFixedModeMix only uses sigmaMax
scaleFactor =
// solidCohesiveFixedModeMix only uses sigmaMax
scaleFactor =
Foam::sqrt
(
1/
@ -447,7 +447,7 @@ nCoupledFacesToBreak = 0;
Pout << "Coupled face to break: " << coupledFaceToBreak << endl;
mesh.setBreak(faceToBreak, faceToBreakFlip, coupledFaceToBreak);
mesh.update();
mesh.update();
const labelList& faceMap = mesh.topoChangeMap().faceMap();
label start = mesh.boundaryMesh()[cohesivePatchID].start();
@ -457,19 +457,19 @@ nCoupledFacesToBreak = 0;
muf = fvc::interpolate(mu);
lambdaf = fvc::interpolate(lambda);
// we need to modify propertiess after cracking otherwise momentum equation is wrong
// but solidInterface seems to hold some information about old mesh
// so we will delete it and make another
// we could probably add a public clearout function
// create new solidInterface
if(rheology.solidInterfaceActive())
{
rheology.solInterface().clearOut();
solidInterfacePtr->modifyProperties(muf, lambdaf);
}
// we need to modify propertiess after cracking otherwise momentum equation is wrong
// but solidInterface seems to hold some information about old mesh
// so we will delete it and make another
// we could probably add a public clearout function
// create new solidInterface
if(rheology.solidInterfaceActive())
{
rheology.solInterface().clearOut();
solidInterfacePtr->modifyProperties(muf, lambdaf);
}
// All values on the new crack faces get set to zero
// so we must manually correct them
// so we must manually correct them
const vectorField DUpI =
DU.boundaryField()[cohesivePatchID].patchInternalField();
const vectorField oldDUpI =
@ -492,8 +492,8 @@ nCoupledFacesToBreak = 0;
const scalarField globalMuPI = mesh.globalCrackField(muPI);
const scalarField globalLambdaPI = mesh.globalCrackField(lambdaPI);
// cohesivePatchU.size()
int cohesivePatchSize(cohesivePatchDUPtr ? cohesivePatchDUPtr->size() : cohesivePatchDUFixedModePtr->size());
// cohesivePatchU.size()
int cohesivePatchSize(cohesivePatchDUPtr ? cohesivePatchDUPtr->size() : cohesivePatchDUFixedModePtr->size());
// Initialise fields for new cohesive face
const labelList& gcfa = mesh.globalCrackFaceAddressing();
@ -506,9 +506,9 @@ nCoupledFacesToBreak = 0;
// If new face
if (oldFaceIndex == faceToBreakIndex)
{
// set to average of old cell centres
// hmnnn it would be better to interpolate
// using weights... OK for now: future work
// set to average of old cell centres
// hmnnn it would be better to interpolate
// using weights... OK for now: future work
DU.boundaryField()[cohesivePatchID][i] =
0.5
*(
@ -540,10 +540,10 @@ nCoupledFacesToBreak = 0;
+ globalsigmapI[gcfa[globalIndex]]
);
// initialise mu and lambda on new faces
// set new face value to value of internal cell
muf.boundaryField()[cohesivePatchID][i] = globalMuPI[globalIndex];
lambdaf.boundaryField()[cohesivePatchID][i] = globalLambdaPI[globalIndex];
// initialise mu and lambda on new faces
// set new face value to value of internal cell
muf.boundaryField()[cohesivePatchID][i] = globalMuPI[globalIndex];
lambdaf.boundaryField()[cohesivePatchID][i] = globalLambdaPI[globalIndex];
globalIndex++;
}
@ -554,24 +554,24 @@ nCoupledFacesToBreak = 0;
}
// we must calculate grad using interface
// DU at the interface has not been calculated yet as interface.correct()
// has not been called yet
// not really a problem as gradDU is correct in second outer iteration
// as long as this does not cause convergence problems for the first iterations.
// we should be able to calculate the interface displacements without
// having to call interface.correct()
// todo: add calculateInterfaceDU() function
// interface grad uses Gauss, we need least squares
// DU at the interface has not been calculated yet as interface.correct()
// has not been called yet
// not really a problem as gradDU is correct in second outer iteration
// as long as this does not cause convergence problems for the first iterations.
// we should be able to calculate the interface displacements without
// having to call interface.correct()
// todo: add calculateInterfaceDU() function
// interface grad uses Gauss, we need least squares
gradDU = fvc::grad(DU); // leastSquaresSolidInterface grad scheme
//gradDU = solidInterfacePtr->grad(DU);
//gradDU = solidInterfacePtr->grad(DU);
//snGradDU = fvc::snGrad(DU);
# include "calculateTraction.H"
//if (nFacesToBreak || nCoupledFacesToBreak) mesh.write(); traction.write();
//if (nFacesToBreak || nCoupledFacesToBreak) mesh.write(); traction.write();
// Initialise initiation traction for new cohesive patch face
// we also need to update the traction_ field in the crack boundary condition
// this is because it cannot set itself during mapping.
// we also need to update the traction_ field in the crack boundary condition
// this is because it cannot set itself during mapping.
//for (label i=0; i<cohesivePatchDU.size(); i++)
for (label i=0; i<cohesivePatchSize; i++)
{
@ -591,49 +591,49 @@ nCoupledFacesToBreak = 0;
if ((n0&faceToBreakNormal) > SMALL)
{
traction.boundaryField()[cohesivePatchID][i] =
traction.boundaryField()[cohesivePatchID][i] =
faceToBreakTraction;
traction.oldTime().boundaryField()[cohesivePatchID][i] =
faceToBreakTraction;
traction.oldTime().boundaryField()[cohesivePatchID][i] =
faceToBreakTraction;
// this seems to slow convergence in some simple test cases...
// but surely it should be better update it
//cohesivePatchDU.traction()[i] = faceToBreakTraction;
if(cohesivePatchDUPtr)
{
cohesivePatchDUPtr->traction()[i] = faceToBreakTraction;
}
else
{
cohesivePatchDUFixedModePtr->traction()[i] = faceToBreakTraction;
cohesivePatchDUFixedModePtr->initiationTraction()[i] = faceToBreakTraction;
}
// this seems to slow convergence in some simple test cases...
// but surely it should be better update it
//cohesivePatchDU.traction()[i] = faceToBreakTraction;
if(cohesivePatchDUPtr)
{
cohesivePatchDUPtr->traction()[i] = faceToBreakTraction;
}
else
{
cohesivePatchDUFixedModePtr->traction()[i] = faceToBreakTraction;
cohesivePatchDUFixedModePtr->initiationTraction()[i] = faceToBreakTraction;
}
}
else
{
traction.boundaryField()[cohesivePatchID][i] =
traction.boundaryField()[cohesivePatchID][i] =
-faceToBreakTraction;
traction.oldTime().boundaryField()[cohesivePatchID][i] =
-faceToBreakTraction;
traction.oldTime().boundaryField()[cohesivePatchID][i] =
-faceToBreakTraction;
//cohesivePatchDU.traction()[i] = -faceToBreakTraction;
if(cohesivePatchDUPtr)
{
cohesivePatchDUPtr->traction()[i] = -faceToBreakTraction;
}
else
{
cohesivePatchDUFixedModePtr->traction()[i] = -faceToBreakTraction;
cohesivePatchDUFixedModePtr->initiationTraction()[i] = -faceToBreakTraction;
}
//cohesivePatchDU.traction()[i] = -faceToBreakTraction;
if(cohesivePatchDUPtr)
{
cohesivePatchDUPtr->traction()[i] = -faceToBreakTraction;
}
else
{
cohesivePatchDUFixedModePtr->traction()[i] = -faceToBreakTraction;
cohesivePatchDUFixedModePtr->initiationTraction()[i] = -faceToBreakTraction;
}
}
}
}
// hmmnn we only need a reference for very small groups of cells
// turn off for now
//# include "updateReference.H"
// hmmnn we only need a reference for very small groups of cells
// turn off for now
//# include "updateReference.H"
}
}

View file

@ -13,11 +13,11 @@
// with a processor boundary
//if (U.boundaryField()[patchI].fixesValue())
if (
U.boundaryField()[patchI].fixesValue()
||
mesh.boundaryMesh()[patchI].type()
== processorPolyPatch::typeName
)
U.boundaryField()[patchI].fixesValue()
||
mesh.boundaryMesh()[patchI].type()
== processorPolyPatch::typeName
)
{
const unallocLabelList& curFaceCells =
mesh.boundary()[patchI].faceCells();

View file

@ -88,23 +88,23 @@ if (runTime.outputTime() || topoChange)
// (
// IOobject
// (
// "tractionBoundary",
// runTime.timeName(),
// mesh,
// IOobject::NO_READ,
// IOobject::AUTO_WRITE
// ),
// "tractionBoundary",
// runTime.timeName(),
// mesh,
// IOobject::NO_READ,
// IOobject::AUTO_WRITE
// ),
// mesh,
// dimensionedVector("zero", dimForce/dimArea, vector::zero)
// );
// surfaceVectorField n = mesh.Sf()/mesh.magSf();
// forAll(tractionBoundary.boundaryField(), patchi)
// {
// if(mesh.boundaryMesh()[patchi].type() != processorPolyPatch::typeName)
// {
// tractionBoundary.boundaryField()[patchi] =
// n.boundaryField()[patchi] & sigma.boundaryField()[patchi];
// }
// if(mesh.boundaryMesh()[patchi].type() != processorPolyPatch::typeName)
// {
// tractionBoundary.boundaryField()[patchi] =
// n.boundaryField()[patchi] & sigma.boundaryField()[patchi];
// }
// }
@ -113,12 +113,12 @@ if (runTime.outputTime() || topoChange)
(
IOobject
(
"damageAndCracks",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"damageAndCracks",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
mesh,
dimensionedScalar("zero", dimless, 0.0),
calculatedFvPatchVectorField::typeName
@ -127,12 +127,12 @@ if (runTime.outputTime() || topoChange)
(
IOobject
(
"GI",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"GI",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
mesh,
dimensionedScalar("zero", dimless, 0.0),
calculatedFvPatchVectorField::typeName
@ -141,30 +141,30 @@ if (runTime.outputTime() || topoChange)
(
IOobject
(
"GII",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"GII",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
mesh,
dimensionedScalar("zero", dimless, 0.0),
calculatedFvPatchVectorField::typeName
);
forAll(DU.boundaryField(), patchi)
{
// if(DU.boundaryField()[patchi].type() == cohesiveLawMultiMatFvPatchVectorField::typeName)
if(DU.boundaryField()[patchi].type() == solidCohesiveFvPatchVectorField::typeName)
{
// cohesiveLawMultiMatFvPatchVectorField& DUpatch =
// refCast<cohesiveLawMultiMatFvPatchVectorField>(DU.boundaryField()[patchi]);
solidCohesiveFvPatchVectorField& DUpatch =
refCast<solidCohesiveFvPatchVectorField>(DU.boundaryField()[patchi]);
// if(DU.boundaryField()[patchi].type() == cohesiveLawMultiMatFvPatchVectorField::typeName)
if(DU.boundaryField()[patchi].type() == solidCohesiveFvPatchVectorField::typeName)
{
// cohesiveLawMultiMatFvPatchVectorField& DUpatch =
// refCast<cohesiveLawMultiMatFvPatchVectorField>(DU.boundaryField()[patchi]);
solidCohesiveFvPatchVectorField& DUpatch =
refCast<solidCohesiveFvPatchVectorField>(DU.boundaryField()[patchi]);
GI.boundaryField()[patchi] = DUpatch.GI();
GII.boundaryField()[patchi] = DUpatch.GII();
damageAndCracks.boundaryField()[patchi] = DUpatch.crackingAndDamage();
}
GI.boundaryField()[patchi] = DUpatch.GI();
GII.boundaryField()[patchi] = DUpatch.GII();
damageAndCracks.boundaryField()[patchi] = DUpatch.crackingAndDamage();
}
}
//Info << "done" << endl;

View file

@ -2,16 +2,16 @@
if(historyPatchID != -1)
{
Info << "Found patch "<<historyPatchName<<", writing y force and displacement to file"
<< endl;
<< endl;
//- calculate force in specified direction on topClamp patch
vector direction(0, 1, 0);
//- for small strain or moving mesh
scalar force = gSum(
direction &
(mesh.boundary()[historyPatchID].Sf() & sigma.boundaryField()[historyPatchID])
);
direction &
(mesh.boundary()[historyPatchID].Sf() & sigma.boundaryField()[historyPatchID])
);
//- for large strain total lagrangian
// tensorField F = I + gradU.boundaryField()[historyPatchID];
@ -25,16 +25,16 @@ if(historyPatchID != -1)
//- be dotted with the surface normal to give the actual traction/force
//- you cannot just take the component of the sigma tensor
//scalar forcePatchIntegrateMethod = gSum(
// mesh.magSf().boundaryField()[historyPatchID]
// *sigma.boundaryField()[historyPatchID].component(symmTensor::XY)
// );
// mesh.magSf().boundaryField()[historyPatchID]
// *sigma.boundaryField()[historyPatchID].component(symmTensor::XY)
// );
scalar disp = max(U.boundaryField()[historyPatchID].component(vector::Y));
//- write to file
if(Pstream::master())
{
OFstream& forceDispFile = *filePtr;
forceDispFile << disp << "\t" << force << endl;
OFstream& forceDispFile = *filePtr;
forceDispFile << disp << "\t" << force << endl;
}
}

View file

@ -13,7 +13,7 @@ else if(divDSigmaExpMethod == "surface")
muf*(mesh.Sf() & fvc::interpolate(gradDU.T()))
+ lambdaf*(mesh.Sf() & I*fvc::interpolate(tr(gradDU)))
- (muf + lambdaf)*(mesh.Sf() & fvc::interpolate(gradDU))
);
);
}
else if(divDSigmaExpMethod == "decompose")
{

View file

@ -17,6 +17,6 @@
{
FatalErrorIn(args.executable())
<< "divSigmaExp method " << divDSigmaExpMethod << " not found! "
<< "valid methods are:\nstandard\nsurface\ndecompose\nlaplacian"
<< exit(FatalError);
<< "valid methods are:\nstandard\nsurface\ndecompose\nlaplacian"
<< exit(FatalError);
}

View file

@ -8,17 +8,17 @@ if(leftPatchID == -1)
//- calculate force in x direction on leftClamp patch
scalar leftForce = gSum(
vector(1, 0, 0) &
(mesh.boundary()[leftPatchID].Sf() & sigma.boundaryField()[leftPatchID])
);
vector(1, 0, 0) &
(mesh.boundary()[leftPatchID].Sf() & sigma.boundaryField()[leftPatchID])
);
//- patchIntegrate utility integrates it this way but this is worng because the sigma tensor should
//- be dotted with the surface normal to give the actual traction/force
//- you cannot just take the component of the sigma tensor
//scalar leftForcePatchIntegrateMethod = gSum(
// mesh.magSf().boundaryField()[leftPatchID]
// *sigma.boundaryField()[leftPatchID].component(symmTensor::XY)
// );
// mesh.magSf().boundaryField()[leftPatchID]
// *sigma.boundaryField()[leftPatchID].component(symmTensor::XY)
// );
vector gaugeU1 = vector::zero;
vector gaugeU2 = vector::zero;

View file

@ -24,8 +24,8 @@
IOobject::READ_IF_PRESENT,
IOobject::AUTO_WRITE
),
mesh,
dimensionedVector("zero", dimLength, vector::zero)
mesh,
dimensionedVector("zero", dimLength, vector::zero)
);
volTensorField gradU
@ -38,8 +38,8 @@
IOobject::READ_IF_PRESENT,
IOobject::AUTO_WRITE
),
mesh,
dimensionedTensor("zero", dimless, tensor::zero)
mesh,
dimensionedTensor("zero", dimless, tensor::zero)
);
//- increment of Green finite strain tensor
@ -53,8 +53,8 @@
IOobject::READ_IF_PRESENT,
IOobject::AUTO_WRITE
),
mesh,
dimensionedSymmTensor("zero", dimless, symmTensor::zero)
mesh,
dimensionedSymmTensor("zero", dimless, symmTensor::zero)
);
//- Green strain tensor
@ -68,8 +68,8 @@
IOobject::READ_IF_PRESENT,
IOobject::AUTO_WRITE
),
mesh,
dimensionedSymmTensor("zero", dimless, symmTensor::zero)
mesh,
dimensionedSymmTensor("zero", dimless, symmTensor::zero)
);
@ -99,8 +99,8 @@
IOobject::READ_IF_PRESENT,
IOobject::AUTO_WRITE
),
mesh,
dimensionedSymmTensor("zero", dimForce/dimArea, symmTensor::zero)
mesh,
dimensionedSymmTensor("zero", dimForce/dimArea, symmTensor::zero)
);
constitutiveModel rheology(sigma, DU);

View file

@ -4,33 +4,33 @@ if (runTime.outputTime())
(
IOobject
(
"epsilonEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"epsilonEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
sqrt((2.0/3.0)*magSqr(dev(epsilon)))
);
Info<< "Max epsilonEq = " << max(epsilonEq).value()
<< endl;
<< endl;
volScalarField sigmaEq
(
IOobject
(
"sigmaEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"sigmaEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
sqrt((3.0/2.0)*magSqr(dev(sigma)))
);
Info<< "Max sigmaEq = " << max(sigmaEq).value()
<< endl;
<< endl;
//- Calculate Cauchy stress
volTensorField F = I + gradU;
@ -43,12 +43,12 @@ if (runTime.outputTime())
(
IOobject
(
"sigmaCauchy",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"sigmaCauchy",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
(1/J) * symm(F.T() & sigma & F)
);
@ -57,29 +57,29 @@ if (runTime.outputTime())
(
IOobject
(
"sigmaCauchyEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"sigmaCauchyEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
sqrt((3.0/2.0)*magSqr(dev(sigmaCauchy)))
);
Info<< "Max sigmaCauchyEq = " << max(sigmaCauchyEq).value()
<< endl;
<< endl;
volTensorField Finv = inv(F);
volSymmTensorField epsilonAlmansi
(
IOobject
(
"epsilonAlmansi",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"epsilonAlmansi",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
symm(Finv & epsilon & Finv.T())
);
@ -88,21 +88,21 @@ if (runTime.outputTime())
(
IOobject
(
"tractionCauchy",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"tractionCauchy",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
mesh,
dimensionedVector("zero", dimForce/dimArea, vector::zero)
);
forAll(traction.boundaryField(), patchi)
{
tensorField Fpatch = I + gradU.boundaryField()[patchi];
tensorField Fpatch = I + gradU.boundaryField()[patchi];
traction.boundaryField()[patchi] =
n.boundaryField()[patchi] & (sigma.boundaryField()[patchi] & Fpatch);
traction.boundaryField()[patchi] =
n.boundaryField()[patchi] & (sigma.boundaryField()[patchi] & Fpatch);
}
runTime.write();

View file

@ -2,32 +2,32 @@
forAll(mesh.boundary(), patchID)
{
if(U.boundaryField()[patchID].type()
== solidDirectionMixedFvPatchVectorField::typeName
)
{
solidDirectionMixedFvPatchVectorField& loadingPatch =
refCast<solidDirectionMixedFvPatchVectorField>
(
U.boundaryField()[patchID]
);
== solidDirectionMixedFvPatchVectorField::typeName
)
{
solidDirectionMixedFvPatchVectorField& loadingPatch =
refCast<solidDirectionMixedFvPatchVectorField>
(
U.boundaryField()[patchID]
);
tensorField Finv = inv(I + gradU);
vectorField newN = Finv & n.boundaryField()[patchID];
newN /= mag(newN);
loadingPatch.valueFraction() = sqr(newN);
tensorField Finv = inv(I + gradU);
vectorField newN = Finv & n.boundaryField()[patchID];
newN /= mag(newN);
loadingPatch.valueFraction() = sqr(newN);
//- set gradient
loadingPatch.refGrad() =
(
//Traction
( (mu.boundaryField()[patchID] + lambda.boundaryField()[patchID]) * (n.boundaryField()[patchID] & gradU.boundaryField()[patchID]) )
- ( mu.boundaryField()[patchID] * (n.boundaryField()[patchID] & gradU.boundaryField()[patchID].T()) )
- ( mu.boundaryField()[patchID] * ( n.boundaryField()[patchID] & (gradU.boundaryField()[patchID] & gradU.boundaryField()[patchID].T()) ) )
- ( lambda.boundaryField()[patchID] * tr(gradU.boundaryField()[patchID]) * n.boundaryField()[patchID] )
- ( 0.5 * lambda.boundaryField()[patchID] * tr(gradU.boundaryField()[patchID] & gradU.boundaryField()[patchID].T()) * n.boundaryField()[patchID] )
)
/
(2.0*mu.boundaryField()[patchID] + lambda.boundaryField()[patchID]);
}
//- set gradient
loadingPatch.refGrad() =
(
//Traction
( (mu.boundaryField()[patchID] + lambda.boundaryField()[patchID]) * (n.boundaryField()[patchID] & gradU.boundaryField()[patchID]) )
- ( mu.boundaryField()[patchID] * (n.boundaryField()[patchID] & gradU.boundaryField()[patchID].T()) )
- ( mu.boundaryField()[patchID] * ( n.boundaryField()[patchID] & (gradU.boundaryField()[patchID] & gradU.boundaryField()[patchID].T()) ) )
- ( lambda.boundaryField()[patchID] * tr(gradU.boundaryField()[patchID]) * n.boundaryField()[patchID] )
- ( 0.5 * lambda.boundaryField()[patchID] * tr(gradU.boundaryField()[patchID] & gradU.boundaryField()[patchID].T()) * n.boundaryField()[patchID] )
)
/
(2.0*mu.boundaryField()[patchID] + lambda.boundaryField()[patchID]);
}
}
}

View file

@ -11,11 +11,11 @@ if(solidInterfaceCorr)
// if(divDSigmaExpMethod != "surface" && divDSigmaExpMethod != "decompose")
// {
// FatalError << "divDSigmaExp must be decompose or surface when solidInterface is on"
// << exit(FatalError);
// << exit(FatalError);
// }
// if(divDSigmaLargeStrainExpMethod != "surface")
// {
// FatalError << "divDSigmaLargeStrainExp must be surface when solidInterface is on"
// << exit(FatalError);
// << exit(FatalError);
// }
}

View file

@ -51,6 +51,6 @@ if(min(J.internalField()) > 0)
else
{
FatalErrorIn(args.executable())
<< "Negative Jacobian"
<< exit(FatalError);
<< "Negative Jacobian"
<< exit(FatalError);
}

View file

@ -4,33 +4,33 @@ if (runTime.outputTime())
(
IOobject
(
"epsilonEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"epsilonEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
sqrt((2.0/3.0)*magSqr(dev(epsilon)))
);
Info<< "Max epsilonEq = " << max(epsilonEq).value()
<< endl;
<< endl;
volScalarField sigmaEq
(
IOobject
(
"sigmaEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"sigmaEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
sqrt((3.0/2.0)*magSqr(dev(sigma)))
);
Info<< "Max sigmaEq = " << max(sigmaEq).value()
<< endl;
<< endl;
//- Calculate Cauchy stress
volTensorField F = I + gradU;
@ -43,12 +43,12 @@ if (runTime.outputTime())
(
IOobject
(
"sigmaCauchy",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"sigmaCauchy",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
(1/J) * symm(F.T() & sigma & F)
);
@ -57,17 +57,17 @@ if (runTime.outputTime())
(
IOobject
(
"sigmaCauchyEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"sigmaCauchyEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
sqrt((3.0/2.0)*magSqr(dev(sigmaCauchy)))
);
Info<< "Max sigmaCauchyEq = " << max(sigmaCauchyEq).value()
<< endl;
<< endl;
volTensorField Finv = inv(F);
@ -75,12 +75,12 @@ if (runTime.outputTime())
(
IOobject
(
"epsilonAlmansi",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"epsilonAlmansi",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
symm(Finv & epsilon & Finv.T())
);
@ -88,22 +88,22 @@ if (runTime.outputTime())
// (
// IOobject
// (
// "traction",
// runTime.timeName(),
// mesh,
// IOobject::NO_READ,
// IOobject::AUTO_WRITE
// ),
// "traction",
// runTime.timeName(),
// mesh,
// IOobject::NO_READ,
// IOobject::AUTO_WRITE
// ),
// mesh,
// dimensionedVector("zero", dimForce/dimArea, vector::zero),
// calculatedFvPatchVectorField::typeName
// );
// forAll(traction.boundaryField(), patchi)
// {
// const tensorField& Fbinv = Finv.boundaryField()[patchi];
// vectorField nCurrent = Fbinv & n.boundaryField()[patchi];
// traction.boundaryField()[patchi] =
// nCurrent & sigmaCauchy.boundaryField()[patchi];
// const tensorField& Fbinv = Finv.boundaryField()[patchi];
// vectorField nCurrent = Fbinv & n.boundaryField()[patchi];
// traction.boundaryField()[patchi] =
// nCurrent & sigmaCauchy.boundaryField()[patchi];
// }
// //- write boundary forces
@ -113,53 +113,53 @@ if (runTime.outputTime())
// Info << nl;
// forAll(mesh.boundary(), patchi)
// {
// Info << "Patch " << mesh.boundary()[patchi].name() << endl;
// const tensorField& Fb = F.boundaryField()[patchi];
// vectorField totalForce = mesh.Sf().boundaryField()[patchi] & (sigma.boundaryField()[patchi] & Fb);
// //vectorField totalForce2 = Sf.boundaryField()[patchi] & (sigmaCauchy.boundaryField()[patchi]);
// Info << "Patch " << mesh.boundary()[patchi].name() << endl;
// const tensorField& Fb = F.boundaryField()[patchi];
// vectorField totalForce = mesh.Sf().boundaryField()[patchi] & (sigma.boundaryField()[patchi] & Fb);
// //vectorField totalForce2 = Sf.boundaryField()[patchi] & (sigmaCauchy.boundaryField()[patchi]);
// vector force = sum( totalForce );
// //vector force2 = sum( totalForce2 );
// Info << "\ttotal force is " << force << " N" << endl;
// //Info << "\ttotal force2 is " << force2 << " N" << endl;
// vector force = sum( totalForce );
// //vector force2 = sum( totalForce2 );
// Info << "\ttotal force is " << force << " N" << endl;
// //Info << "\ttotal force2 is " << force2 << " N" << endl;
// const tensorField& Fbinv = Finv.boundaryField()[patchi];
// vectorField nCurrent = Fbinv & n.boundaryField()[patchi];
// nCurrent /= mag(nCurrent);
// scalar normalForce = sum( nCurrent & totalForce );
// Info << "\tnormal force is " << normalForce << " N" << endl;
// scalar shearForce = mag(sum( (I - sqr(nCurrent)) & totalForce ));
// Info << "\tshear force is " << shearForce << " N" << endl;
// const tensorField& Fbinv = Finv.boundaryField()[patchi];
// vectorField nCurrent = Fbinv & n.boundaryField()[patchi];
// nCurrent /= mag(nCurrent);
// scalar normalForce = sum( nCurrent & totalForce );
// Info << "\tnormal force is " << normalForce << " N" << endl;
// scalar shearForce = mag(sum( (I - sqr(nCurrent)) & totalForce ));
// Info << "\tshear force is " << shearForce << " N" << endl;
//if(mesh.boundary()[patchi].name() == "right")
//{
//const vectorField& nOrig = n.boundaryField()[patchi];
//Info << "\tNormal force on right is " << (nCurrent & totalForce) << nl << endl;
//Info << "\tShear force on right is " << ((I - sqr(nCurrent)) & totalForce) << nl << endl;
//Info << "\tpatch gradient is " << U.boundaryField()[patchi].snGrad() << endl;
//Info << "\tpatch gradient (norm) is " << (nCurrent & U.boundaryField()[patchi].snGrad()) << endl;
//Info << "\tpatch gradient (shear) is " << ((I - sqr(nCurrent)) & U.boundaryField()[patchi].snGrad()) << endl;
//Info << "\tpatch Almansi (normal) is " << (nCurrent & (nCurrent & epsilonAlmansi.boundaryField()[patchi])) << endl;
//Info << "\tpatch Almansi (shear) is " << ( (I - sqr(nCurrent)) & (nCurrent & epsilonAlmansi.boundaryField()[patchi])) << endl;
//Info << "\tpatch Green (normal) is " << (nOrig & (nOrig & epsilon.boundaryField()[patchi])) << endl;
//Info << "\tpatch Green (shear) is " << ( (I - sqr(nOrig)) & (nOrig & epsilon.boundaryField()[patchi])) << endl;
//Info << "\tpatch Cauchy stress (normal) is " << (nCurrent & (nCurrent & sigmaCauchy.boundaryField()[patchi])) << endl;
//}
//if(mesh.boundary()[patchi].name() == "right")
//{
//const vectorField& nOrig = n.boundaryField()[patchi];
//Info << "\tNormal force on right is " << (nCurrent & totalForce) << nl << endl;
//Info << "\tShear force on right is " << ((I - sqr(nCurrent)) & totalForce) << nl << endl;
//Info << "\tpatch gradient is " << U.boundaryField()[patchi].snGrad() << endl;
//Info << "\tpatch gradient (norm) is " << (nCurrent & U.boundaryField()[patchi].snGrad()) << endl;
//Info << "\tpatch gradient (shear) is " << ((I - sqr(nCurrent)) & U.boundaryField()[patchi].snGrad()) << endl;
//Info << "\tpatch Almansi (normal) is " << (nCurrent & (nCurrent & epsilonAlmansi.boundaryField()[patchi])) << endl;
//Info << "\tpatch Almansi (shear) is " << ( (I - sqr(nCurrent)) & (nCurrent & epsilonAlmansi.boundaryField()[patchi])) << endl;
//Info << "\tpatch Green (normal) is " << (nOrig & (nOrig & epsilon.boundaryField()[patchi])) << endl;
//Info << "\tpatch Green (shear) is " << ( (I - sqr(nOrig)) & (nOrig & epsilon.boundaryField()[patchi])) << endl;
//Info << "\tpatch Cauchy stress (normal) is " << (nCurrent & (nCurrent & sigmaCauchy.boundaryField()[patchi])) << endl;
//}
// if(mesh.boundary()[patchi].type() != "empty")
// {
// if(mesh.boundary()[patchi].type() != "empty")
// {
// vector Sf0 = Sf.boundaryField()[patchi][0];
// symmTensor sigma0 = sigmaCauchy.boundaryField()[patchi][0];
// Info << "sigmab[0] is " << sigma0 << nl
// << "Sfb is " << Sf0 << nl
// << "force is " << (Sf.boundaryField()[patchi][0]&sigma.boundaryField()[patchi][0]) << nl
// << "Sfx*sigmaxx " << (Sf0[vector::X]*sigma0[symmTensor::XX]) <<nl
// << "Sfx*sigmaxx " << (Sf0[vector::X]*sigma0[symmTensor::XX]) <<nl
// << "Sfy*sigmaxy " << (Sf0[vector::Y]*sigma0[symmTensor::XY]) << nl
// << "Sfx*sigmayx " << (Sf0[vector::X]*sigma0[symmTensor::XY]) << nl
// << "Sfy*sigmayy " << (Sf0[vector::Y]*sigma0[symmTensor::YY]) << nl
// << endl;
// }
// Info << endl;
// }
// Info << endl;
// }
runTime.write();

View file

@ -10,10 +10,10 @@ if(divDSigmaExpMethod == "standard")
{
divDSigmaExp = fvc::div
(
muf*(mesh.Sf() & fvc::interpolate(gradDU.T()))
+ lambdaf*(mesh.Sf() & I*fvc::interpolate(tr(gradDU)))
- (muf + lambdaf)*(mesh.Sf() & fvc::interpolate(gradDU))
);
muf*(mesh.Sf() & fvc::interpolate(gradDU.T()))
+ lambdaf*(mesh.Sf() & I*fvc::interpolate(tr(gradDU)))
- (muf + lambdaf)*(mesh.Sf() & fvc::interpolate(gradDU))
);
}
else if(divDSigmaExpMethod == "decompose")
{
@ -22,13 +22,13 @@ if(divDSigmaExpMethod == "standard")
divDSigmaExp = fvc::div
(
mesh.magSf()
*(
- (muf + lambdaf)*(fvc::snGrad(DU)&(I - n*n))
+ lambdaf*tr(shearGradDU&(I - n*n))*n
+ muf*(shearGradDU&n)
)
);
mesh.magSf()
*(
- (muf + lambdaf)*(fvc::snGrad(DU)&(I - n*n))
+ lambdaf*tr(shearGradDU&(I - n*n))*n
+ muf*(shearGradDU&n)
)
);
}
else if(divDSigmaExpMethod == "laplacian")
{
@ -36,10 +36,10 @@ if(divDSigmaExpMethod == "standard")
- fvc::laplacian(mu + lambda, DU, "laplacian(DDU,DU)")
+ fvc::div
(
mu*gradDU.T()
+ lambda*(I*tr(gradDU)),
"div(sigma)"
);
mu*gradDU.T()
+ lambda*(I*tr(gradDU)),
"div(sigma)"
);
}
else
{

View file

@ -18,10 +18,10 @@ if(divDSigmaLargeStrainExpMethod == "standard")
divDSigmaLargeStrainExp =
fvc::div
(
muf * (mesh.Sf() & fvc::interpolate(gradDU & gradDU.T()))
+ 0.5*lambdaf * (mesh.Sf() & (fvc::interpolate(gradDU && gradDU)*I))
+ (mesh.Sf() & fvc::interpolate( (sigma + DSigma) & gradDU ))
);
muf * (mesh.Sf() & fvc::interpolate(gradDU & gradDU.T()))
+ 0.5*lambdaf * (mesh.Sf() & (fvc::interpolate(gradDU && gradDU)*I))
+ (mesh.Sf() & fvc::interpolate( (sigma + DSigma) & gradDU ))
);
}
else
{

View file

@ -30,9 +30,9 @@ FieldField<Field, vector> extraVecs(ptc.size());
// extraVecs.hook(new vectorField(curFaces.size())); //- no hook function
extraVecs.set
(
pointI,
new vectorField(curFaces.size())
);
pointI,
new vectorField(curFaces.size())
);
vectorField& curExtraVectors = extraVecs[pointI];
@ -42,30 +42,30 @@ FieldField<Field, vector> extraVecs(ptc.size());
// Go through all the faces
forAll (curFaces, faceI)
{
if (!mesh.isInternalFace(curFaces[faceI]))
{
// This is a boundary face. If not in the empty patch
// or coupled calculate the extrapolation vector
label patchID =
mesh.boundaryMesh().whichPatch(curFaces[faceI]);
{
if (!mesh.isInternalFace(curFaces[faceI]))
{
// This is a boundary face. If not in the empty patch
// or coupled calculate the extrapolation vector
label patchID =
mesh.boundaryMesh().whichPatch(curFaces[faceI]);
if
(
!isA<emptyFvPatch>(bm[patchID])
&& !bm[patchID].coupled()
)
{
// Found a face for extrapolation
curExtraVectors[nFacesAroundPoint] =
pointLoc
- centres.boundaryField()[patchID]
[bm[patchID].patch().whichFace(curFaces[faceI])];
if
(
!isA<emptyFvPatch>(bm[patchID])
&& !bm[patchID].coupled()
)
{
// Found a face for extrapolation
curExtraVectors[nFacesAroundPoint] =
pointLoc
- centres.boundaryField()[patchID]
[bm[patchID].patch().whichFace(curFaces[faceI])];
nFacesAroundPoint++;
}
}
}
nFacesAroundPoint++;
}
}
}
curExtraVectors.setSize(nFacesAroundPoint);
}

View file

@ -35,9 +35,9 @@ FieldField<Field, scalar> w(ptc.size());
//w.hook(new scalarField(curFaces.size())); //philipc no hook function
w.set
(
pointI,
new scalarField(curFaces.size())
);
pointI,
new scalarField(curFaces.size())
);
scalarField& curWeights = w[pointI];
@ -47,38 +47,38 @@ FieldField<Field, scalar> w(ptc.size());
// Go through all the faces
forAll (curFaces, faceI)
{
if (!mesh.isInternalFace(curFaces[faceI]))
{
// This is a boundary face. If not in the empty patch
// or coupled calculate the extrapolation vector
label patchID =
mesh.boundaryMesh().whichPatch(curFaces[faceI]);
{
if (!mesh.isInternalFace(curFaces[faceI]))
{
// This is a boundary face. If not in the empty patch
// or coupled calculate the extrapolation vector
label patchID =
mesh.boundaryMesh().whichPatch(curFaces[faceI]);
if
(
!isA<emptyFvPatch>(bm[patchID])
&& !(
bm[patchID].coupled()
//&& Pstream::parRun()
//&& !mesh.parallelData().cyclicParallel()
)
)
{
curWeights[nFacesAroundPoint] =
1.0/mag
(
pointLoc
- centres.boundaryField()[patchID]
[
bm[patchID].patch().whichFace(curFaces[faceI])
]
);
if
(
!isA<emptyFvPatch>(bm[patchID])
&& !(
bm[patchID].coupled()
//&& Pstream::parRun()
//&& !mesh.parallelData().cyclicParallel()
)
)
{
curWeights[nFacesAroundPoint] =
1.0/mag
(
pointLoc
- centres.boundaryField()[patchID]
[
bm[patchID].patch().whichFace(curFaces[faceI])
]
);
nFacesAroundPoint++;
}
}
}
nFacesAroundPoint++;
}
}
}
// Reset the sizes of the local weights
curWeights.setSize(nFacesAroundPoint);

View file

@ -91,32 +91,32 @@
//- explicit terms in the momentum equation
volVectorField divDSigmaExp
(
IOobject
(
"divDSigmaExp",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::NO_WRITE
),
mesh,
dimensionedVector("zero", dimensionSet(1, -2, -2, 0, 0, 0, 0), vector::zero)
);
(
IOobject
(
"divDSigmaExp",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::NO_WRITE
),
mesh,
dimensionedVector("zero", dimensionSet(1, -2, -2, 0, 0, 0, 0), vector::zero)
);
volVectorField divDSigmaLargeStrainExp
(
IOobject
(
"divDSigmaLargeStrainExp",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::NO_WRITE
),
mesh,
dimensionedVector("zero", dimensionSet(1, -2, -2, 0, 0, 0, 0), vector::zero)
);
(
IOobject
(
"divDSigmaLargeStrainExp",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::NO_WRITE
),
mesh,
dimensionedVector("zero", dimensionSet(1, -2, -2, 0, 0, 0, 0), vector::zero)
);
constitutiveModel rheology(sigma, DU);

View file

@ -10,20 +10,20 @@ solidInterface* solidInterfacePtr(NULL);
if(solidInterfaceCorr)
{
Info << "Creating solid interface correction" << endl;
solidInterfacePtr = new solidInterface(mesh, rheology);
solidInterfacePtr->modifyProperties(muf, lambdaf);
Info << "Creating solid interface correction" << endl;
solidInterfacePtr = new solidInterface(mesh, rheology);
solidInterfacePtr->modifyProperties(muf, lambdaf);
//- solidInterface needs muf and lambdaf to be used for divDSigmaExp
if(divDSigmaExpMethod != "surface" && divDSigmaExpMethod != "decompose")
{
FatalError << "divDSigmaExp must be decompose or surface when solidInterface is on"
<< exit(FatalError);
}
if(divDSigmaLargeStrainExpMethod == "surface")
{
FatalError << "divDSigmaLargeStrainExp must be surface when solidInterface is on"
<< exit(FatalError);
}
//- solidInterface needs muf and lambdaf to be used for divDSigmaExp
if(divDSigmaExpMethod != "surface" && divDSigmaExpMethod != "decompose")
{
FatalError << "divDSigmaExp must be decompose or surface when solidInterface is on"
<< exit(FatalError);
}
if(divDSigmaLargeStrainExpMethod == "surface")
{
FatalError << "divDSigmaLargeStrainExp must be surface when solidInterface is on"
<< exit(FatalError);
}
}
}

View file

@ -10,13 +10,13 @@ if(solidInterfaceCorr)
//- solidInterface needs muf and lambdaf to be used for divDSigmaExp
if(divDSigmaExpMethod != "surface" && divDSigmaExpMethod != "decompose")
{
FatalError << "divDSigmaExp must be decompose or surface when solidInterface is on"
<< exit(FatalError);
FatalError << "divDSigmaExp must be decompose or surface when solidInterface is on"
<< exit(FatalError);
}
if(divDSigmaLargeStrainExpMethod != "surface")
{
FatalError << "divDSigmaLargeStrainExp must be surface when solidInterface is on"
<< exit(FatalError);
FatalError << "divDSigmaLargeStrainExp must be surface when solidInterface is on"
<< exit(FatalError);
}
}

View file

@ -14,10 +14,10 @@ forAll (bm, patchI)
(
!isA<emptyFvPatch>(bm[patchI])
&& !(
bm[patchI].coupled()
//&& Pstream::parRun()
//&& !mesh.parallelData().cyclicParallel()
)
bm[patchI].coupled()
//&& Pstream::parRun()
//&& !mesh.parallelData().cyclicParallel()
)
)
{
const labelList& bp = bm[patchI].patch().boundaryPoints();
@ -25,9 +25,9 @@ forAll (bm, patchI)
const labelList& meshPoints = bm[patchI].patch().meshPoints();
forAll (bp, pointI)
{
pointsCorrectionMap.insert(meshPoints[bp[pointI]]);
}
{
pointsCorrectionMap.insert(meshPoints[bp[pointI]]);
}
}
}

View file

@ -10,7 +10,7 @@ else
{
FatalErrorIn(args.executable())
<< "move mesh method " << moveMeshMethod << " not recognised" << nl
<< "available methods are:" << nl
<< "inverseDistance" << nl
<< "leastSquares" << exit(FatalError);
<< "available methods are:" << nl
<< "inverseDistance" << nl
<< "leastSquares" << exit(FatalError);
}

View file

@ -21,10 +21,10 @@
(
IOobject
(
"pointDU",
runTime.timeName(),
mesh
),
"pointDU",
runTime.timeName(),
mesh
),
pMesh,
dimensionedVector("zero", dimLength, vector::zero),
types
@ -48,7 +48,7 @@
forAll (pointDUI, pointI)
{
newPoints[pointI] += pointDUI[pointI];
newPoints[pointI] += pointDUI[pointI];
}
twoDPointCorrector twoDCorrector(mesh);
@ -60,6 +60,6 @@
// else
// {
// FatalErrorIn(args.executable())
// << "Negative Jacobian"
// << exit(FatalError);
// << "Negative Jacobian"
// << exit(FatalError);
// }

View file

@ -67,31 +67,31 @@ forAll (ptc, pointI)
forAll (curFaces, faceI)
{
if (!mesh.isInternalFace(curFaces[faceI]))
{
// This is a boundary face. If not in the empty patch
// or coupled calculate the extrapolation vector
label patchID =
mesh.boundaryMesh().whichPatch(curFaces[faceI]);
{
// This is a boundary face. If not in the empty patch
// or coupled calculate the extrapolation vector
label patchID =
mesh.boundaryMesh().whichPatch(curFaces[faceI]);
if
(
!isA<emptyFvPatch>(mesh.boundary()[patchID])
&& !mesh.boundary()[patchID].coupled()
)
{
label faceInPatchID =
bm[patchID].patch().whichFace(curFaces[faceI]);
if
(
!isA<emptyFvPatch>(mesh.boundary()[patchID])
&& !mesh.boundary()[patchID].coupled()
)
{
label faceInPatchID =
bm[patchID].patch().whichFace(curFaces[faceI]);
pfCorr[curPoint] +=
w[pointI][fI]*
(
extraVecs[pointI][fI]
& gradDU.boundaryField()[patchID][faceInPatchID]
);
pfCorr[curPoint] +=
w[pointI][fI]*
(
extraVecs[pointI][fI]
& gradDU.boundaryField()[patchID][faceInPatchID]
);
fI++;
}
}
fI++;
}
}
}
}

View file

@ -4,33 +4,33 @@ if (runTime.outputTime())
(
IOobject
(
"epsilonEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"epsilonEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
sqrt((2.0/3.0)*magSqr(dev(epsilon)))
);
Info<< "Max epsilonEq = " << max(epsilonEq).value()
<< endl;
<< endl;
volScalarField sigmaEq
(
IOobject
(
"sigmaEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"sigmaEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
sqrt((3.0/2.0)*magSqr(dev(sigma)))
);
Info<< "Max sigmaEq = " << max(sigmaEq).value()
<< endl;
<< endl;
runTime.write();
}

View file

@ -16,9 +16,9 @@ if(iCorr == 0)
scalar sumMagB = gSum(magSqr(b));
if(sumMagB < SMALL)
{
//Warning << "Aitken under-relaxation: denominator less then SMALL"
// << endl;
sumMagB += SMALL;
//Warning << "Aitken under-relaxation: denominator less then SMALL"
// << endl;
sumMagB += SMALL;
}
aitkenTheta = -aitkenTheta*

View file

@ -8,11 +8,11 @@ if(divSigmaExpMethod == "standard")
//- this form seems to have the best convergence
divSigmaExp =
fvc::div(mesh.magSf()*
(
(n&(Cf && fvc::interpolate(symm(gradU))))
- (n&(Kf & fvc::interpolate(gradU)))
)
);
(
(n&(Cf && fvc::interpolate(symm(gradU))))
- (n&(Kf & fvc::interpolate(gradU)))
)
);
}
else if(divSigmaExpMethod == "laplacian")
{

View file

@ -6,14 +6,14 @@
forAll(mesh.boundary(), patchi)
{
netForce +=
sum(
mesh.Sf().boundaryField()[patchi]
&
(
2*mu.boundaryField()[patchi]*symm(gradU.boundaryField()[patchi])
+ lambda*tr(gradU.boundaryField()[patchi])*I
)
);
sum(
mesh.Sf().boundaryField()[patchi]
&
(
2*mu.boundaryField()[patchi]*symm(gradU.boundaryField()[patchi])
+ lambda*tr(gradU.boundaryField()[patchi])*I
)
);
}
forceResidual = mag(netForce);
}

View file

@ -28,14 +28,14 @@
// forAll(traction.boundaryField(), patchi)
// {
// if (mesh.boundary()[patchi].type() == "cohesive")
// {
// forAll(traction.boundaryField()[patchi], facei)
// {
// Pout << "face " << facei << " with traction magnitude "
// << mag(traction.boundaryField()[patchi][facei])/1e6 << " MPa and traction "
// << traction.boundaryField()[patchi][facei]/1e6 << " MPa" << endl;
// }
// }
// if (mesh.boundary()[patchi].type() == "cohesive")
// {
// forAll(traction.boundaryField()[patchi], facei)
// {
// Pout << "face " << facei << " with traction magnitude "
// << mag(traction.boundaryField()[patchi][facei])/1e6 << " MPa and traction "
// << traction.boundaryField()[patchi][facei]/1e6 << " MPa" << endl;
// }
// }
// }
}

View file

@ -7,40 +7,40 @@
{
if (isA<solidCohesiveFvPatchVectorField>(U.boundaryField()[patchI]))
{
cohesivePatchID = patchI;
cohesivePatchUPtr =
&refCast<solidCohesiveFvPatchVectorField>
(
U.boundaryField()[cohesivePatchID]
);
break;
cohesivePatchID = patchI;
cohesivePatchUPtr =
&refCast<solidCohesiveFvPatchVectorField>
(
U.boundaryField()[cohesivePatchID]
);
break;
}
else if (isA<solidCohesiveFixedModeMixFvPatchVectorField>(U.boundaryField()[patchI]))
{
cohesivePatchID = patchI;
cohesivePatchUFixedModePtr =
&refCast<solidCohesiveFixedModeMixFvPatchVectorField>
(
U.boundaryField()[cohesivePatchID]
);
break;
}
{
cohesivePatchID = patchI;
cohesivePatchUFixedModePtr =
&refCast<solidCohesiveFixedModeMixFvPatchVectorField>
(
U.boundaryField()[cohesivePatchID]
);
break;
}
}
if(cohesivePatchID == -1)
{
FatalErrorIn(args.executable())
<< "Can't find cohesiveLawFvPatch" << nl
<< "One of the boundary patches in " << U.name() << ".boundaryField() "
<< "should be of type " << solidCohesiveFvPatchVectorField::typeName
<< "or " << solidCohesiveFixedModeMixFvPatchVectorField::typeName
<< abort(FatalError);
<< "One of the boundary patches in " << U.name() << ".boundaryField() "
<< "should be of type " << solidCohesiveFvPatchVectorField::typeName
<< "or " << solidCohesiveFixedModeMixFvPatchVectorField::typeName
<< abort(FatalError);
}
// solidCohesiveFvPatchVectorField& cohesivePatchU =
// refCast<solidCohesiveFvPatchVectorField>
// (
// U.boundaryField()[cohesivePatchID]
// refCast<solidCohesiveFvPatchVectorField>
// (
// U.boundaryField()[cohesivePatchID]
// );
// philipc: I have moved cohesive stuff to constitutiveModel
@ -65,82 +65,82 @@
// limit crack to specified boxes
{
const dictionary& stressControl =
mesh.solutionDict().subDict("solidMechanics");
mesh.solutionDict().subDict("solidMechanics");
List<boundBox> userBoxes(stressControl.lookup("crackLimitingBoxes"));
const surfaceVectorField& Cf = mesh.Cf();
forAll(cohesiveZone.internalField(), faceI)
{
bool faceInsideBox = false;
{
bool faceInsideBox = false;
forAll(userBoxes, boxi)
{
if(userBoxes[boxi].contains(Cf.internalField()[faceI])) faceInsideBox = true;
}
forAll(userBoxes, boxi)
{
if(userBoxes[boxi].contains(Cf.internalField()[faceI])) faceInsideBox = true;
}
if(faceInsideBox)
{
cohesiveZone.internalField()[faceI] = 1.0;
}
}
if(faceInsideBox)
{
cohesiveZone.internalField()[faceI] = 1.0;
}
}
forAll(cohesiveZone.boundaryField(), patchI)
{
// cracks may go along proc boundaries
if(mesh.boundaryMesh()[patchI].type() == processorPolyPatch::typeName)
{
forAll(cohesiveZone.boundaryField()[patchI], faceI)
{
bool faceInsideBox = false;
{
// cracks may go along proc boundaries
if(mesh.boundaryMesh()[patchI].type() == processorPolyPatch::typeName)
{
forAll(cohesiveZone.boundaryField()[patchI], faceI)
{
bool faceInsideBox = false;
forAll(userBoxes, boxi)
{
if(userBoxes[boxi].contains(Cf.boundaryField()[patchI][faceI])) faceInsideBox = true;
}
forAll(userBoxes, boxi)
{
if(userBoxes[boxi].contains(Cf.boundaryField()[patchI][faceI])) faceInsideBox = true;
}
if(faceInsideBox)
{
cohesiveZone.boundaryField()[patchI][faceI] = 1.0;
}
}
}
}
if(faceInsideBox)
{
cohesiveZone.boundaryField()[patchI][faceI] = 1.0;
}
}
}
}
Info << "\nThere are " << gSum(cohesiveZone.internalField()) << " potential internal crack faces" << nl << endl;
Info << "\nThere are " << gSum(cohesiveZone.boundaryField())/2 << " potential coupled boundary crack faces" << nl << endl;
// write field for visualisation
volScalarField cohesiveZoneVol
(
IOobject
(
"cohesiveZoneVol",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
mesh,
dimensionedScalar("zero", dimless, 0.0)
);
(
IOobject
(
"cohesiveZoneVol",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
mesh,
dimensionedScalar("zero", dimless, 0.0)
);
forAll(cohesiveZone.internalField(), facei)
{
if(cohesiveZone.internalField()[facei])
{
cohesiveZoneVol.internalField()[mesh.owner()[facei]] = 1.0;
cohesiveZoneVol.internalField()[mesh.neighbour()[facei]] = 1.0;
}
}
{
if(cohesiveZone.internalField()[facei])
{
cohesiveZoneVol.internalField()[mesh.owner()[facei]] = 1.0;
cohesiveZoneVol.internalField()[mesh.neighbour()[facei]] = 1.0;
}
}
forAll(cohesiveZone.boundaryField(), patchi)
{
forAll(cohesiveZone.boundaryField()[patchi], facei)
{
if(cohesiveZone.boundaryField()[patchi][facei] > 0.0)
{
cohesiveZoneVol.boundaryField()[patchi][facei] = 1.0;
}
}
}
{
forAll(cohesiveZone.boundaryField()[patchi], facei)
{
if(cohesiveZone.boundaryField()[patchi][facei] > 0.0)
{
cohesiveZoneVol.boundaryField()[patchi][facei] = 1.0;
}
}
}
Info << "Writing cohesiveZone field" << endl;
cohesiveZoneVol.write();
}

View file

@ -35,8 +35,8 @@
IOobject::NO_READ,
IOobject::NO_WRITE
),
mesh,
dimensionedVector("zero", dimless, vector::zero)
mesh,
dimensionedVector("zero", dimless, vector::zero)
);
volVectorField V
@ -122,7 +122,7 @@
IOobject::NO_WRITE
),
mesh,
dimensionedVector("zero", dimLength, vector::zero)
dimensionedVector("zero", dimLength, vector::zero)
);
// aitken relaxation factor
scalar aitkenInitialRes = 1.0;
@ -139,5 +139,5 @@ scalar aitkenTheta = 0.1;
// IOobject::AUTO_WRITE
// ),
// mesh,
// dimensionedVector("zero", dimless, vector::zero)
// dimensionedVector("zero", dimless, vector::zero)
// );

View file

@ -4,14 +4,14 @@ label historyPatchID = mesh.boundaryMesh().findPatchID(historyPatchName);
if(historyPatchID == -1)
{
Warning << "history patch " << historyPatchName
<< " not found. Force-displacement will not be written"
<< endl;
<< " not found. Force-displacement will not be written"
<< endl;
}
else if(Pstream::master())
{
Info << "Force-displacement for patch " << historyPatchName
<< " will be written to forceDisp.dat"
<< endl;
<< " will be written to forceDisp.dat"
<< endl;
word hisDirName("history");
mkDir(hisDirName);
filePtr = new OFstream(hisDirName/historyPatchName+"forceDisp.dat");

View file

@ -5,6 +5,6 @@ Info << "Selecting divSigmaExp calculation method " << divSigmaExpMethod << end
if(divSigmaExpMethod != "standard" && divSigmaExpMethod != "surface" && divSigmaExpMethod != "laplacian")
{
FatalError << "divSigmaExp method " << divSigmaExpMethod << " not found!" << nl
<< "valid methods are:\nstandard\nsurface\nlaplacian"
<< exit(FatalError);
<< "valid methods are:\nstandard\nsurface\nlaplacian"
<< exit(FatalError);
}

View file

@ -14,5 +14,5 @@ Switch relaxEqn(stressControl.lookup("relaxEqn"));
if(relaxEqn && solidInterfaceCorr)
{
FatalError << "relaxEqn and solidInterface may not be used concurrently"
<< exit(FatalError);
<< exit(FatalError);
}

View file

@ -15,16 +15,16 @@ if (dynamicTimeStep)
scalar newDeltaT = deltaTmin;
if (newDeltaT/runTime.deltaT().value() < 0.5)
{
{
newDeltaT = 0.5*runTime.deltaT().value();
Info << "Reducing time step" << nl;
}
}
runTime.setDeltaT(newDeltaT);
}
Pout << "Current time step size: "
<< runTime.deltaT().value() << " s" << endl;
<< runTime.deltaT().value() << " s" << endl;
scalar maxDT = runTime.deltaT().value();

View file

@ -13,11 +13,11 @@
// with a processor boundary
//if (U.boundaryField()[patchI].fixesValue())
if (
U.boundaryField()[patchI].fixesValue()
||
mesh.boundaryMesh()[patchI].type()
== processorPolyPatch::typeName
)
U.boundaryField()[patchI].fixesValue()
||
mesh.boundaryMesh()[patchI].type()
== processorPolyPatch::typeName
)
{
const unallocLabelList& curFaceCells =
mesh.boundary()[patchI].faceCells();

View file

@ -88,12 +88,12 @@ if (runTime.outputTime() || topoChange)
(
IOobject
(
"damageAndCracks",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"damageAndCracks",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
mesh,
dimensionedScalar("zero", dimless, 0.0),
calculatedFvPatchVectorField::typeName
@ -102,12 +102,12 @@ if (runTime.outputTime() || topoChange)
(
IOobject
(
"GI",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"GI",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
mesh,
dimensionedScalar("zero", dimless, 0.0),
calculatedFvPatchVectorField::typeName
@ -116,30 +116,30 @@ if (runTime.outputTime() || topoChange)
(
IOobject
(
"GII",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"GII",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
mesh,
dimensionedScalar("zero", dimless, 0.0),
calculatedFvPatchVectorField::typeName
);
forAll(U.boundaryField(), patchi)
{
// if(U.boundaryField()[patchi].type() == cohesiveLawMultiMatFvPatchVectorField::typeName)
if(U.boundaryField()[patchi].type() == solidCohesiveFvPatchVectorField::typeName)
{
// cohesiveLawMultiMatFvPatchVectorField& Upatch =
// refCast<cohesiveLawMultiMatFvPatchVectorField>(U.boundaryField()[patchi]);
solidCohesiveFvPatchVectorField& Upatch =
refCast<solidCohesiveFvPatchVectorField>(U.boundaryField()[patchi]);
// if(U.boundaryField()[patchi].type() == cohesiveLawMultiMatFvPatchVectorField::typeName)
if(U.boundaryField()[patchi].type() == solidCohesiveFvPatchVectorField::typeName)
{
// cohesiveLawMultiMatFvPatchVectorField& Upatch =
// refCast<cohesiveLawMultiMatFvPatchVectorField>(U.boundaryField()[patchi]);
solidCohesiveFvPatchVectorField& Upatch =
refCast<solidCohesiveFvPatchVectorField>(U.boundaryField()[patchi]);
GI.boundaryField()[patchi] = Upatch.GI();
GII.boundaryField()[patchi] = Upatch.GII();
damageAndCracks.boundaryField()[patchi] = Upatch.crackingAndDamage();
}
GI.boundaryField()[patchi] = Upatch.GI();
GII.boundaryField()[patchi] = Upatch.GII();
damageAndCracks.boundaryField()[patchi] = Upatch.crackingAndDamage();
}
}
volScalarField GTotal("GTotal", GI + GII);
GTotal.write();

View file

@ -2,7 +2,7 @@
if(historyPatchID != -1)
{
Info << "Found patch "<<historyPatchName<<", writing y force and displacement to file"
<< endl;
<< endl;
//- for small strain or moving mesh
vector force = gSum(mesh.boundary()[historyPatchID].Sf() & sigma.boundaryField()[historyPatchID]);
@ -12,8 +12,8 @@ if(historyPatchID != -1)
//- write to file
if(Pstream::master())
{
OFstream& forceDispFile = *filePtr;
forceDispFile << avDisp.x() << " " << avDisp.y() << " " << avDisp.z() << " "
<< force.x() << " " << force.y() << " " << force.z() << endl;
OFstream& forceDispFile = *filePtr;
forceDispFile << avDisp.x() << " " << avDisp.y() << " " << avDisp.z() << " "
<< force.x() << " " << force.y() << " " << force.z() << endl;
}
}

View file

@ -5,23 +5,23 @@ if(rheology.planeStress())
forAll(gradDU.internalField(), celli)
{
gradDU.internalField()[celli][tensor::ZZ] =
((-C.internalField()[celli][symmTensor4thOrder::XXZZ]*DEpsilon.internalField()[celli][symmTensor::XX]
- C.internalField()[celli][symmTensor4thOrder::YYZZ]*DEpsilon.internalField()[celli][symmTensor::YY])
gradDU.internalField()[celli][tensor::ZZ] =
((-C.internalField()[celli][symmTensor4thOrder::XXZZ]*DEpsilon.internalField()[celli][symmTensor::XX]
- C.internalField()[celli][symmTensor4thOrder::YYZZ]*DEpsilon.internalField()[celli][symmTensor::YY])
/
C.internalField()[celli][symmTensor4thOrder::ZZZZ])
-higherTerms.internalField()[celli];
C.internalField()[celli][symmTensor4thOrder::ZZZZ])
-higherTerms.internalField()[celli];
}
forAll(gradDU.boundaryField(), patchi)
{
forAll(gradDU.boundaryField()[patchi], facei)
{
gradDU.boundaryField()[patchi][facei][tensor::ZZ] =
((-C.boundaryField()[patchi][facei][symmTensor4thOrder::XXZZ]*DEpsilon.boundaryField()[patchi][facei][symmTensor::XX]
- C.boundaryField()[patchi][facei][symmTensor4thOrder::YYZZ]*DEpsilon.boundaryField()[patchi][facei][symmTensor::YY])
forAll(gradDU.boundaryField()[patchi], facei)
{
gradDU.boundaryField()[patchi][facei][tensor::ZZ] =
((-C.boundaryField()[patchi][facei][symmTensor4thOrder::XXZZ]*DEpsilon.boundaryField()[patchi][facei][symmTensor::XX]
- C.boundaryField()[patchi][facei][symmTensor4thOrder::YYZZ]*DEpsilon.boundaryField()[patchi][facei][symmTensor::YY])
/
C.boundaryField()[patchi][facei][symmTensor4thOrder::ZZZZ])
- higherTerms.boundaryField()[patchi][facei];
}
C.boundaryField()[patchi][facei][symmTensor4thOrder::ZZZZ])
- higherTerms.boundaryField()[patchi][facei];
}
}
}

View file

@ -111,5 +111,5 @@
// if(rheology.planeStress())
// {
// Info << nl << "Plane stress is set to yes -> the zz stress will be zero" << nl << endl;
// Info << nl << "Plane stress is set to yes -> the zz stress will be zero" << nl << endl;
// }

View file

@ -51,6 +51,6 @@
// else
// {
// FatalErrorIn(args.executable())
// << "Negative Jacobian"
// << exit(FatalError);
// << "Negative Jacobian"
// << exit(FatalError);
// }

View file

@ -18,8 +18,8 @@
if(min(J.internalField()) < 0)
{
FatalErrorIn(args.executable())
<< "Negative Jacobian - a cell volume has become negative!"
<< exit(FatalError);
<< "Negative Jacobian - a cell volume has become negative!"
<< exit(FatalError);
}
rho = rho/J;

View file

@ -6,51 +6,51 @@ if (runTime.outputTime())
(
IOobject
(
"epsilonEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"epsilonEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
sqrt((2.0/3.0)*magSqr(dev(epsilon)))
);
Info<< "Max epsilonEq = " << max(epsilonEq).value()
<< endl;
<< endl;
volScalarField sigmaEq
(
IOobject
(
"sigmaEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"sigmaEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
sqrt((3.0/2.0)*magSqr(dev(sigma)))
);
Info<< "Max sigmaEq = " << max(sigmaEq).value()
<< endl;
<< endl;
// volVectorField traction
// (
// IOobject
// (
// "traction",
// runTime.timeName(),
// mesh,
// IOobject::NO_READ,
// IOobject::AUTO_WRITE
// ),
// "traction",
// runTime.timeName(),
// mesh,
// IOobject::NO_READ,
// IOobject::AUTO_WRITE
// ),
// mesh,
// dimensionedVector("zero", dimForce/dimArea, vector::zero)
// );
// forAll(mesh.boundary(), patchi)
// {
// traction.boundaryField()[patchi] =
// n.boundaryField()[patchi] & sigma.boundaryField()[patchi];
// traction.boundaryField()[patchi] =
// n.boundaryField()[patchi] & sigma.boundaryField()[patchi];
// }
// //- patch forces
@ -60,7 +60,7 @@ if (runTime.outputTime())
// vectorField totalForce = mesh.Sf().boundaryField()[patchi] & sigma.boundaryField()[patchi];
// vector force = sum( totalForce );
// Info << "\ttotal force is " << force << " N" << endl;
// tensorField F = I + gradDU.boundaryField()[patchi];
// tensorField F = I + gradDU.boundaryField()[patchi];
// tensorField Finv = inv(F);
// scalar normalForce = sum( n.boundaryField()[patchi] & totalForce );
// Info << "\tnormal force is " << normalForce << " N" << endl;

View file

@ -3,21 +3,21 @@ if(divSigmaExpMethod == "standard")
//- calculating the full gradient has good convergence and no high freq oscillations
divSigmaExp =
fvc::div(
(C && symm(gradU))
- (K & gradU),
"div(sigma)"
);
(C && symm(gradU))
- (K & gradU),
"div(sigma)"
);
}
else if(divSigmaExpMethod == "surface")
{
//- this form seems to have the best convergence
divSigmaExp =
fvc::div(mesh.magSf()*
(
(n&(Cf && fvc::interpolate(symm(gradU))))
- (n&(Kf & fvc::interpolate(gradU)))
)
);
(
(n&(Cf && fvc::interpolate(symm(gradU))))
- (n&(Kf & fvc::interpolate(gradU)))
)
);
}
else if(divSigmaExpMethod == "laplacian")
{

View file

@ -4,14 +4,14 @@ label historyPatchID = mesh.boundaryMesh().findPatchID(historyPatchName);
if(historyPatchID == -1)
{
Warning << "history patch " << historyPatchName
<< " not found. Force-displacement will not be written"
<< endl;
<< " not found. Force-displacement will not be written"
<< endl;
}
else if(Pstream::master())
{
Info << "Force-displacement for patch " << historyPatchName
<< " will be written to forceDisp.dat"
<< endl;
<< " will be written to forceDisp.dat"
<< endl;
word hisDirName("history");
mkDir(hisDirName);
filePtr = new OFstream(hisDirName/historyPatchName+"forceDisp.dat");

View file

@ -4,6 +4,6 @@ Info << "Calculation of divSigmaExp Method: " << divSigmaExpMethod << endl;
if(divSigmaExpMethod != "standard" && divSigmaExpMethod != "surface" && divSigmaExpMethod != "laplacian")
{
FatalError << "divSigmaExp method " << divSigmaExpMethod << " not found!" << nl
<< "valid methods are:\nstandard\nsurface\nlaplacian"
<< exit(FatalError);
<< "valid methods are:\nstandard\nsurface\nlaplacian"
<< exit(FatalError);
}

View file

@ -1,13 +1,13 @@
//- set gradU.zz() for plane stress
if(rheology.planeStress())
{
forAll(gradU.internalField(), celli)
{
gradU.internalField()[celli].zz() =
(-C.internalField()[celli].xxzz()*epsilon.internalField()[celli].xx()
- C.internalField()[celli].yyzz()*epsilon.internalField()[celli].yy())
/
C.internalField()[celli].zzzz();
}
gradU.correctBoundaryConditions();
}
//- set gradU.zz() for plane stress
if(rheology.planeStress())
{
forAll(gradU.internalField(), celli)
{
gradU.internalField()[celli].zz() =
(-C.internalField()[celli].xxzz()*epsilon.internalField()[celli].xx()
- C.internalField()[celli].yyzz()*epsilon.internalField()[celli].yy())
/
C.internalField()[celli].zzzz();
}
gradU.correctBoundaryConditions();
}

View file

@ -6,33 +6,33 @@ if (runTime.outputTime())
(
IOobject
(
"epsilonEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"epsilonEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
sqrt((2.0/3.0)*magSqr(dev(epsilon)))
);
Info<< "Max epsilonEq = " << max(epsilonEq).value()
<< endl;
<< endl;
volScalarField sigmaEq
(
IOobject
(
"sigmaEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"sigmaEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
sqrt((3.0/2.0)*magSqr(dev(sigma)))
);
Info<< "Max sigmaEq = " << max(sigmaEq).value()
<< endl;
<< endl;
runTime.write();
}

View file

@ -2,7 +2,7 @@
if(historyPatchID != -1)
{
Info << "Writing disp and force of patch "<<historyPatchName<<" to file"
<< endl;
<< endl;
//- for small strain or moving mesh
vector force = gSum(mesh.boundary()[historyPatchID].Sf() & sigma.boundaryField()[historyPatchID]);
@ -12,8 +12,8 @@ if(historyPatchID != -1)
//- write to file
if(Pstream::master())
{
OFstream& forceDispFile = *filePtr;
forceDispFile << avDisp.x() << " " << avDisp.y() << " " << avDisp.z() << " "
<< force.x() << " " << force.y() << " " << force.z() << endl;
OFstream& forceDispFile = *filePtr;
forceDispFile << avDisp.x() << " " << avDisp.y() << " " << avDisp.z() << " "
<< force.x() << " " << force.y() << " " << force.z() << endl;
}
}

View file

@ -21,9 +21,9 @@ if(iCorr == 0)
scalar sumMagB = gSum(magSqr(b));
if(sumMagB < SMALL)
{
//Warning << "Aitken under-relaxation: denominator less then SMALL"
// << endl;
sumMagB += SMALL;
//Warning << "Aitken under-relaxation: denominator less then SMALL"
// << endl;
sumMagB += SMALL;
}
aitkenTheta = -aitkenTheta*

View file

@ -22,10 +22,10 @@ else if(divDSigmaExpMethod == "surface")
// divDSigmaExp = fvc::div
// (
// muf*(mesh.Sf() & fvc::interpolate(gradDU.T()))
// + lambdaf*(mesh.Sf() & I*fvc::interpolate(tr(gradDU)))
// - (muf + lambdaf)*(mesh.Sf() & fvc::interpolate(gradDU))
// );
// muf*(mesh.Sf() & fvc::interpolate(gradDU.T()))
// + lambdaf*(mesh.Sf() & I*fvc::interpolate(tr(gradDU)))
// - (muf + lambdaf)*(mesh.Sf() & fvc::interpolate(gradDU))
// );
}
else if(divDSigmaExpMethod == "decompose")
{
@ -33,23 +33,23 @@ else if(divDSigmaExpMethod == "decompose")
divDSigmaExp = fvc::div
(
mesh.magSf()*
mesh.magSf()*
(
- (muf + lambdaf)*(fvc::snGrad(DU) & (I - n*n))
+ lambdaf*tr(shearGradDU & (I - n*n))*n
+ muf*(shearGradDU & n)
- (muf + lambdaf)*(fvc::snGrad(DU) & (I - n*n))
+ lambdaf*tr(shearGradDU & (I - n*n))*n
+ muf*(shearGradDU & n)
)
);
// divDSigmaExp = fvc::div
// (
// mesh.magSf()
// *(
// - (muf + lambdaf)*(fvc::snGrad(DU)&(I - n*n))
// + lambdaf*tr(shearGradDU&(I - n*n))*n
// + muf*(shearGradDU&n)
// )
// );
// mesh.magSf()
// *(
// - (muf + lambdaf)*(fvc::snGrad(DU)&(I - n*n))
// + lambdaf*tr(shearGradDU&(I - n*n))*n
// + muf*(shearGradDU&n)
// )
// );
}
else if(divDSigmaExpMethod == "laplacian")
{

View file

@ -8,7 +8,7 @@ if(divDSigmaNonLinExpMethod == "standard")
(gradDU & gradU.T())
+ (gradU & gradDU.T())
+ (gradDU & gradDU.T())
)
)
)
+ (
0.5*lambda*
@ -68,23 +68,23 @@ else if(divDSigmaNonLinExpMethod == "surface")
// divDSigmaNonLinExp =
// fvc::div
// (
// ( muf * mesh.Sf()
// & fvc::interpolate(
// (gradDU & gradU.T())
// + (gradU & gradDU.T())
// + (gradDU & gradDU.T())
// ) )
// + ( lambdaf * 0.5* tr(
// fvc::interpolate(
// (gradDU & gradU.T())
// + (gradU & gradDU.T())
// + (gradDU & gradDU.T())
// )
// ) * mesh.Sf() )
// + (mesh.Sf() & fvc::interpolate( (DSigma & gradU )
// + ( (sigma + DSigma) & gradDU )
// ) )
// );
// ( muf * mesh.Sf()
// & fvc::interpolate(
// (gradDU & gradU.T())
// + (gradU & gradDU.T())
// + (gradDU & gradDU.T())
// ) )
// + ( lambdaf * 0.5* tr(
// fvc::interpolate(
// (gradDU & gradU.T())
// + (gradU & gradDU.T())
// + (gradDU & gradDU.T())
// )
// ) * mesh.Sf() )
// + (mesh.Sf() & fvc::interpolate( (DSigma & gradU )
// + ( (sigma + DSigma) & gradDU )
// ) )
// );
}
else
{

View file

@ -199,33 +199,33 @@ forAll(extrapGradDU.internalField(), facei)
tensor deltaOwnDotgradGradDUOwn =
tensor(
deltaOwn.x()*gradGradDUXXXOwn + deltaOwn.y()*gradGradDUYXXOwn + deltaOwn.z()*gradGradDUZXXOwn,
deltaOwn.x()*gradGradDUXXYOwn + deltaOwn.y()*gradGradDUYXYOwn + deltaOwn.z()*gradGradDUZXYOwn,
deltaOwn.x()*gradGradDUXXZOwn + deltaOwn.y()*gradGradDUYXZOwn + deltaOwn.z()*gradGradDUZXZOwn,
deltaOwn.x()*gradGradDUXXXOwn + deltaOwn.y()*gradGradDUYXXOwn + deltaOwn.z()*gradGradDUZXXOwn,
deltaOwn.x()*gradGradDUXXYOwn + deltaOwn.y()*gradGradDUYXYOwn + deltaOwn.z()*gradGradDUZXYOwn,
deltaOwn.x()*gradGradDUXXZOwn + deltaOwn.y()*gradGradDUYXZOwn + deltaOwn.z()*gradGradDUZXZOwn,
deltaOwn.x()*gradGradDUXYXOwn + deltaOwn.y()*gradGradDUYYXOwn + deltaOwn.z()*gradGradDUZYXOwn,
deltaOwn.x()*gradGradDUXYYOwn + deltaOwn.y()*gradGradDUYYYOwn + deltaOwn.z()*gradGradDUZYYOwn,
deltaOwn.x()*gradGradDUXYZOwn + deltaOwn.y()*gradGradDUYYZOwn + deltaOwn.z()*gradGradDUZYZOwn,
deltaOwn.x()*gradGradDUXYXOwn + deltaOwn.y()*gradGradDUYYXOwn + deltaOwn.z()*gradGradDUZYXOwn,
deltaOwn.x()*gradGradDUXYYOwn + deltaOwn.y()*gradGradDUYYYOwn + deltaOwn.z()*gradGradDUZYYOwn,
deltaOwn.x()*gradGradDUXYZOwn + deltaOwn.y()*gradGradDUYYZOwn + deltaOwn.z()*gradGradDUZYZOwn,
deltaOwn.x()*gradGradDUXZXOwn + deltaOwn.y()*gradGradDUYZXOwn + deltaOwn.z()*gradGradDUZZXOwn,
deltaOwn.x()*gradGradDUXZYOwn + deltaOwn.y()*gradGradDUYZYOwn + deltaOwn.z()*gradGradDUZZYOwn,
deltaOwn.x()*gradGradDUXZZOwn + deltaOwn.y()*gradGradDUYZZOwn + deltaOwn.z()*gradGradDUZZZOwn
);
deltaOwn.x()*gradGradDUXZXOwn + deltaOwn.y()*gradGradDUYZXOwn + deltaOwn.z()*gradGradDUZZXOwn,
deltaOwn.x()*gradGradDUXZYOwn + deltaOwn.y()*gradGradDUYZYOwn + deltaOwn.z()*gradGradDUZZYOwn,
deltaOwn.x()*gradGradDUXZZOwn + deltaOwn.y()*gradGradDUYZZOwn + deltaOwn.z()*gradGradDUZZZOwn
);
tensor deltaNeiDotgradGradDUNei =
tensor(
deltaNei.x()*gradGradDUXXXNei + deltaNei.y()*gradGradDUYXXNei + deltaNei.z()*gradGradDUZXXNei,
deltaNei.x()*gradGradDUXXYNei + deltaNei.y()*gradGradDUYXYNei + deltaNei.z()*gradGradDUZXYNei,
deltaNei.x()*gradGradDUXXZNei + deltaNei.y()*gradGradDUYXZNei + deltaNei.z()*gradGradDUZXZNei,
deltaNei.x()*gradGradDUXXXNei + deltaNei.y()*gradGradDUYXXNei + deltaNei.z()*gradGradDUZXXNei,
deltaNei.x()*gradGradDUXXYNei + deltaNei.y()*gradGradDUYXYNei + deltaNei.z()*gradGradDUZXYNei,
deltaNei.x()*gradGradDUXXZNei + deltaNei.y()*gradGradDUYXZNei + deltaNei.z()*gradGradDUZXZNei,
deltaNei.x()*gradGradDUXYXNei + deltaNei.y()*gradGradDUYYXNei + deltaNei.z()*gradGradDUZYXNei,
deltaNei.x()*gradGradDUXYYNei + deltaNei.y()*gradGradDUYYYNei + deltaNei.z()*gradGradDUZYYNei,
deltaNei.x()*gradGradDUXYZNei + deltaNei.y()*gradGradDUYYZNei + deltaNei.z()*gradGradDUZYZNei,
deltaNei.x()*gradGradDUXYXNei + deltaNei.y()*gradGradDUYYXNei + deltaNei.z()*gradGradDUZYXNei,
deltaNei.x()*gradGradDUXYYNei + deltaNei.y()*gradGradDUYYYNei + deltaNei.z()*gradGradDUZYYNei,
deltaNei.x()*gradGradDUXYZNei + deltaNei.y()*gradGradDUYYZNei + deltaNei.z()*gradGradDUZYZNei,
deltaNei.x()*gradGradDUXZXNei + deltaNei.y()*gradGradDUYZXNei + deltaNei.z()*gradGradDUZZXNei,
deltaNei.x()*gradGradDUXZYNei + deltaNei.y()*gradGradDUYZYNei + deltaNei.z()*gradGradDUZZYNei,
deltaNei.x()*gradGradDUXZZNei + deltaNei.y()*gradGradDUYZZNei + deltaNei.z()*gradGradDUZZZNei
);
deltaNei.x()*gradGradDUXZXNei + deltaNei.y()*gradGradDUYZXNei + deltaNei.z()*gradGradDUZZXNei,
deltaNei.x()*gradGradDUXZYNei + deltaNei.y()*gradGradDUYZYNei + deltaNei.z()*gradGradDUZZYNei,
deltaNei.x()*gradGradDUXZZNei + deltaNei.y()*gradGradDUYZZNei + deltaNei.z()*gradGradDUZZZNei
);
// get average of extrapolated values
@ -250,12 +250,12 @@ forAll(extrapGradDU.boundaryField(), patchi)
// calculate thirdOrderTerm
volVectorField divThirdOrderTerm (
"thirdOrderTerm",
fvc::div(
(2*muf+lambdaf)*mesh.Sf()
& (extrapGradDU - averageGradDU)
)
);
"thirdOrderTerm",
fvc::div(
(2*muf+lambdaf)*mesh.Sf()
& (extrapGradDU - averageGradDU)
)
);
// if(runTime.outputTime())
// {

View file

@ -24,20 +24,20 @@
IOobject::READ_IF_PRESENT,
IOobject::AUTO_WRITE
),
mesh,
dimensionedVector("zero", dimLength, vector::zero)
mesh,
dimensionedVector("zero", dimLength, vector::zero)
);
volTensorField gradU
(
IOobject
(
"grad(U)",
runTime.timeName(),
mesh,
IOobject::READ_IF_PRESENT,
IOobject::AUTO_WRITE
),
"grad(U)",
runTime.timeName(),
mesh,
IOobject::READ_IF_PRESENT,
IOobject::AUTO_WRITE
),
mesh,
dimensionedTensor("zero", dimless, tensor::zero)
);
@ -53,8 +53,8 @@
IOobject::READ_IF_PRESENT,
IOobject::AUTO_WRITE
),
mesh,
dimensionedSymmTensor("zero", dimless, symmTensor::zero)
mesh,
dimensionedSymmTensor("zero", dimless, symmTensor::zero)
);
volSymmTensorField epsilon
@ -67,8 +67,8 @@
IOobject::READ_IF_PRESENT,
IOobject::AUTO_WRITE
),
mesh,
dimensionedSymmTensor("zero", dimless, symmTensor::zero)
mesh,
dimensionedSymmTensor("zero", dimless, symmTensor::zero)
);
//- plastic strain
@ -113,20 +113,20 @@
IOobject::READ_IF_PRESENT,
IOobject::AUTO_WRITE
),
mesh,
dimensionedSymmTensor("zero", dimForce/dimArea, symmTensor::zero)
mesh,
dimensionedSymmTensor("zero", dimForce/dimArea, symmTensor::zero)
);
volVectorField divDSigmaExp
(
IOobject
(
"divDSigmaExp",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::NO_WRITE
),
"divDSigmaExp",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::NO_WRITE
),
mesh,
dimensionedVector("zero", dimensionSet(1,-2,-2,0,0,0,0), vector::zero)
);
@ -135,12 +135,12 @@
(
IOobject
(
"divDSigmaNonLinExp",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::NO_WRITE
),
"divDSigmaNonLinExp",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::NO_WRITE
),
mesh,
dimensionedVector("zero", dimensionSet(1,-2,-2,0,0,0,0), vector::zero)
);
@ -171,7 +171,7 @@
IOobject::NO_WRITE
),
mesh,
dimensionedVector("zero", dimLength, vector::zero)
dimensionedVector("zero", dimLength, vector::zero)
);
// aitken relaxation factor
scalar aitkenInitialRes = 1.0;

View file

@ -5,8 +5,8 @@ label historyPatchID = mesh.boundaryMesh().findPatchID(historyPatchName);
if(historyPatchID == -1)
{
Warning << "history patch " << historyPatchName
<< " not found. Force-displacement will not be written"
<< endl;
<< " not found. Force-displacement will not be written"
<< endl;
}
else if(Pstream::master())
{
@ -16,8 +16,8 @@ if(historyPatchID == -1)
{
fileName forceFileName(historyDir/"forceDisp_"+historyPatchName+".dat");
Info << "\nForce-displacement for patch " << historyPatchName
<< " will be written to " << forceFileName
<< endl;
<< " will be written to " << forceFileName
<< endl;
forceFilePtr = new OFstream(forceFileName);
OFstream& forceDispFile = *forceFilePtr;
forceDispFile << "#Disp(mm)\tForce(N)" << endl;
@ -26,9 +26,9 @@ if(historyPatchID == -1)
{
fileName stressFileName(historyDir/"stressStrain_"+historyPatchName+".dat");
Info << "\nStress(2nd Piola-Kirchoff)-strain(Green) for patch "
<< historyPatchName
<< " will be written to " << stressFileName
<< endl;
<< historyPatchName
<< " will be written to " << stressFileName
<< endl;
stressFilePtr = new OFstream(stressFileName);
OFstream& stressStrainFile = *stressFilePtr;
stressStrainFile << "#Strain(-)\tStress(Pa)" << endl;

View file

@ -4,6 +4,6 @@ Info << "divSigmaNonLinExp method " << divDSigmaNonLinExpMethod << endl;
if(divDSigmaNonLinExpMethod != "standard" && divDSigmaNonLinExpMethod != "surface")
{
FatalError << "divSigmaNonLinExp method " << divDSigmaNonLinExpMethod << " not found!" << nl
<< "valid methods are:\nstandard\nsurface"
<< exit(FatalError);
<< "valid methods are:\nstandard\nsurface"
<< exit(FatalError);
}

View file

@ -2,13 +2,13 @@
if(historyPatchID != -1)
{
Info << "Writing disp-force to file for patch " << historyPatchName
<< endl;
<< endl;
//- for small strain or moving mesh
//scalar force = gSum(
// direction &
// (mesh.boundary()[historyPatchID].Sf() & sigma.boundaryField()[historyPatchID])
// );
// direction &
// (mesh.boundary()[historyPatchID].Sf() & sigma.boundaryField()[historyPatchID])
// );
//- for large strain total lagrangian
tensorField F = I + gradU.boundaryField()[historyPatchID];
@ -17,7 +17,7 @@ if(historyPatchID != -1)
Info << "Writing strain-stress to file for patch " << historyPatchName
<< endl;
<< endl;
// avaerage stress strain
symmTensor stress = gAverage(sigma.boundaryField()[historyPatchID]);
@ -27,19 +27,19 @@ if(historyPatchID != -1)
// write to file
if(Pstream::master())
{
OFstream& forceDispFile = *forceFilePtr;
label width = 20;
forceDispFile << disp.x() << " " << disp.y() << " " << disp.z();
forceDispFile.width(width);
forceDispFile << force.x() << " " << force.y() << " " << force.z()
<< endl;
OFstream& forceDispFile = *forceFilePtr;
label width = 20;
forceDispFile << disp.x() << " " << disp.y() << " " << disp.z();
forceDispFile.width(width);
forceDispFile << force.x() << " " << force.y() << " " << force.z()
<< endl;
OFstream& stressStrainFile = *stressFilePtr;
stressStrainFile << strain.xx() << " " << strain.xy() << " " << strain.xz() << " "
<< strain.yy() << " " << strain.yz() << " " << strain.zz();
stressStrainFile.width(width);
stressStrainFile << stress.xx() << " " << stress.xy() << " " << stress.xz() << " "
<< stress.yy() << " " << stress.yz() << " " << stress.zz()
<< endl;
OFstream& stressStrainFile = *stressFilePtr;
stressStrainFile << strain.xx() << " " << strain.xy() << " " << strain.xz() << " "
<< strain.yy() << " " << strain.yz() << " " << strain.zz();
stressStrainFile.width(width);
stressStrainFile << stress.xx() << " " << stress.xy() << " " << stress.xz() << " "
<< stress.yy() << " " << stress.yz() << " " << stress.zz()
<< endl;
}
}

View file

@ -21,9 +21,9 @@ if(iCorr == 0)
scalar sumMagB = gSum(magSqr(b));
if(sumMagB < SMALL)
{
//Warning << "Aitken under-relaxation: denominator less then SMALL"
// << endl;
sumMagB += SMALL;
//Warning << "Aitken under-relaxation: denominator less then SMALL"
// << endl;
sumMagB += SMALL;
}
aitkenTheta = -aitkenTheta*

View file

@ -10,10 +10,10 @@ if(divDSigmaExpMethod == "standard")
{
divDSigmaExp = fvc::div
(
muf*(mesh.Sf() & fvc::interpolate(gradDU.T()))
+ lambdaf*(mesh.Sf() & I*fvc::interpolate(tr(gradDU)))
- (muf + lambdaf)*(mesh.Sf() & fvc::interpolate(gradDU))
);
muf*(mesh.Sf() & fvc::interpolate(gradDU.T()))
+ lambdaf*(mesh.Sf() & I*fvc::interpolate(tr(gradDU)))
- (muf + lambdaf)*(mesh.Sf() & fvc::interpolate(gradDU))
);
}
else if(divDSigmaExpMethod == "decompose")
{
@ -22,13 +22,13 @@ if(divDSigmaExpMethod == "standard")
divDSigmaExp = fvc::div
(
mesh.magSf()
*(
- (muf + lambdaf)*(fvc::snGrad(DU)&(I - n*n))
+ lambdaf*tr(shearGradDU&(I - n*n))*n
+ muf*(shearGradDU&n)
)
);
mesh.magSf()
*(
- (muf + lambdaf)*(fvc::snGrad(DU)&(I - n*n))
+ lambdaf*tr(shearGradDU&(I - n*n))*n
+ muf*(shearGradDU&n)
)
);
}
else if(divDSigmaExpMethod == "laplacian")
{
@ -36,10 +36,10 @@ if(divDSigmaExpMethod == "standard")
- fvc::laplacian(mu + lambda, DU, "laplacian(DDU,DU)")
+ fvc::div
(
mu*gradDU.T()
+ lambda*(I*tr(gradDU)),
"div(sigma)"
);
mu*gradDU.T()
+ lambda*(I*tr(gradDU)),
"div(sigma)"
);
}
else
{

View file

@ -12,13 +12,13 @@ if(divDSigmaNonLinExpMethod == "standard")
{
divDSigmaNonLinExp =
fvc::div(
mesh.magSf()
*(
( muf * (n & fvc::interpolate( gradDU & gradDU.T() )) )
+ ( 0.5*lambdaf * (n * tr(fvc::interpolate( gradDU & gradDU.T() ))) )
+ (n & fvc::interpolate( (sigma + DSigma) & gradDU ))
)
);
mesh.magSf()
*(
( muf * (n & fvc::interpolate( gradDU & gradDU.T() )) )
+ ( 0.5*lambdaf * (n * tr(fvc::interpolate( gradDU & gradDU.T() ))) )
+ (n & fvc::interpolate( (sigma + DSigma) & gradDU ))
)
);
}
else
{

View file

@ -30,9 +30,9 @@ FieldField<Field, vector> extraVecs(ptc.size());
// extraVecs.hook(new vectorField(curFaces.size())); //- no hook function
extraVecs.set
(
pointI,
new vectorField(curFaces.size())
);
pointI,
new vectorField(curFaces.size())
);
vectorField& curExtraVectors = extraVecs[pointI];
@ -42,30 +42,30 @@ FieldField<Field, vector> extraVecs(ptc.size());
// Go through all the faces
forAll (curFaces, faceI)
{
if (!mesh.isInternalFace(curFaces[faceI]))
{
// This is a boundary face. If not in the empty patch
// or coupled calculate the extrapolation vector
label patchID =
mesh.boundaryMesh().whichPatch(curFaces[faceI]);
{
if (!mesh.isInternalFace(curFaces[faceI]))
{
// This is a boundary face. If not in the empty patch
// or coupled calculate the extrapolation vector
label patchID =
mesh.boundaryMesh().whichPatch(curFaces[faceI]);
if
(
!isA<emptyFvPatch>(bm[patchID])
&& !bm[patchID].coupled()
)
{
// Found a face for extrapolation
curExtraVectors[nFacesAroundPoint] =
pointLoc
- centres.boundaryField()[patchID]
[bm[patchID].patch().whichFace(curFaces[faceI])];
if
(
!isA<emptyFvPatch>(bm[patchID])
&& !bm[patchID].coupled()
)
{
// Found a face for extrapolation
curExtraVectors[nFacesAroundPoint] =
pointLoc
- centres.boundaryField()[patchID]
[bm[patchID].patch().whichFace(curFaces[faceI])];
nFacesAroundPoint++;
}
}
}
nFacesAroundPoint++;
}
}
}
curExtraVectors.setSize(nFacesAroundPoint);
}

View file

@ -35,9 +35,9 @@ FieldField<Field, scalar> w(ptc.size());
//w.hook(new scalarField(curFaces.size())); //philipc no hook function
w.set
(
pointI,
new scalarField(curFaces.size())
);
pointI,
new scalarField(curFaces.size())
);
scalarField& curWeights = w[pointI];
@ -47,38 +47,38 @@ FieldField<Field, scalar> w(ptc.size());
// Go through all the faces
forAll (curFaces, faceI)
{
if (!mesh.isInternalFace(curFaces[faceI]))
{
// This is a boundary face. If not in the empty patch
// or coupled calculate the extrapolation vector
label patchID =
mesh.boundaryMesh().whichPatch(curFaces[faceI]);
{
if (!mesh.isInternalFace(curFaces[faceI]))
{
// This is a boundary face. If not in the empty patch
// or coupled calculate the extrapolation vector
label patchID =
mesh.boundaryMesh().whichPatch(curFaces[faceI]);
if
(
!isA<emptyFvPatch>(bm[patchID])
&& !(
bm[patchID].coupled()
//&& Pstream::parRun()
//&& !mesh.parallelData().cyclicParallel()
)
)
{
curWeights[nFacesAroundPoint] =
1.0/mag
(
pointLoc
- centres.boundaryField()[patchID]
[
bm[patchID].patch().whichFace(curFaces[faceI])
]
);
if
(
!isA<emptyFvPatch>(bm[patchID])
&& !(
bm[patchID].coupled()
//&& Pstream::parRun()
//&& !mesh.parallelData().cyclicParallel()
)
)
{
curWeights[nFacesAroundPoint] =
1.0/mag
(
pointLoc
- centres.boundaryField()[patchID]
[
bm[patchID].patch().whichFace(curFaces[faceI])
]
);
nFacesAroundPoint++;
}
}
}
nFacesAroundPoint++;
}
}
}
// Reset the sizes of the local weights
curWeights.setSize(nFacesAroundPoint);

View file

@ -106,29 +106,29 @@
//- explicit terms in the momentum equation
volVectorField divDSigmaExp
(
IOobject
(
"divDSigmaExp",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::NO_WRITE
),
mesh,
dimensionedVector("zero", dimensionSet(1, -2, -2, 0, 0, 0, 0), vector::zero)
);
(
IOobject
(
"divDSigmaExp",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::NO_WRITE
),
mesh,
dimensionedVector("zero", dimensionSet(1, -2, -2, 0, 0, 0, 0), vector::zero)
);
volVectorField divDSigmaNonLinExp
(
IOobject
(
"divDSigmaNonLinExp",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::NO_WRITE
),
"divDSigmaNonLinExp",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::NO_WRITE
),
mesh,
dimensionedVector("zero", dimensionSet(1,-2,-2,0,0,0,0), vector::zero)
);
@ -152,15 +152,15 @@
(
IOobject
(
"aitkenDelta",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::NO_WRITE
),
"aitkenDelta",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::NO_WRITE
),
mesh,
dimensionedVector("zero", dimLength, vector::zero)
);
);
// aitken relaxation factor
scalar aitkenInitialRes = 1.0;
scalar aitkenTheta = 0.1;
@ -169,12 +169,12 @@ scalar aitkenTheta = 0.1;
// (
// IOobject
// (
// "resid",
// runTime.timeName(),
// mesh,
// IOobject::NO_READ,
// IOobject::AUTO_WRITE
// ),
// "resid",
// runTime.timeName(),
// mesh,
// IOobject::NO_READ,
// IOobject::AUTO_WRITE
// ),
// mesh,
// dimensionedVector("zero", dimless, vector::zero)
// );

View file

@ -5,8 +5,8 @@ label historyPatchID = mesh.boundaryMesh().findPatchID(historyPatchName);
if(historyPatchID == -1)
{
Warning << "history patch " << historyPatchName
<< " not found. Force-displacement will not be written"
<< endl;
<< " not found. Force-displacement will not be written"
<< endl;
}
else if(Pstream::master())
{
@ -16,8 +16,8 @@ if(historyPatchID == -1)
{
fileName forceFileName(historyDir/"forceDisp_"+historyPatchName+".dat");
Info << "\nForce-displacement for patch " << historyPatchName
<< " will be written to " << forceFileName
<< endl;
<< " will be written to " << forceFileName
<< endl;
forceFilePtr = new OFstream(forceFileName);
OFstream& forceDispFile = *forceFilePtr;
forceDispFile << "#Disp(mm)\tForce(N)" << endl;
@ -26,9 +26,9 @@ if(historyPatchID == -1)
{
fileName stressFileName(historyDir/"stressStrain_"+historyPatchName+".dat");
Info << "\nCauchy Stress vs. Almansi Strain for patch "
<< historyPatchName
<< " will be written to " << stressFileName
<< endl;
<< historyPatchName
<< " will be written to " << stressFileName
<< endl;
stressFilePtr = new OFstream(stressFileName);
OFstream& stressStrainFile = *stressFilePtr;
stressStrainFile << "#Strain(-)\tStress(Pa)" << endl;

View file

@ -10,20 +10,20 @@ solidInterface* solidInterfacePtr(NULL);
if(solidInterfaceCorr)
{
Info << "Creating solid interface correction" << endl;
solidInterfacePtr = new solidInterface(mesh, rheology);
solidInterfacePtr->modifyProperties(muf, lambdaf);
Info << "Creating solid interface correction" << endl;
solidInterfacePtr = new solidInterface(mesh, rheology);
solidInterfacePtr->modifyProperties(muf, lambdaf);
//- solidInterface needs muf and lambdaf to be used for divDSigmaExp
if(divDSigmaExpMethod != "surface" && divDSigmaExpMethod != "decompose")
{
FatalError << "divDSigmaExp must be decompose or surface when solidInterface is on"
<< exit(FatalError);
}
if(divDSigmaLargeStrainExpMethod == "surface")
{
FatalError << "divDSigmaLargeStrainExp must be surface when solidInterface is on"
<< exit(FatalError);
}
//- solidInterface needs muf and lambdaf to be used for divDSigmaExp
if(divDSigmaExpMethod != "surface" && divDSigmaExpMethod != "decompose")
{
FatalError << "divDSigmaExp must be decompose or surface when solidInterface is on"
<< exit(FatalError);
}
if(divDSigmaLargeStrainExpMethod == "surface")
{
FatalError << "divDSigmaLargeStrainExp must be surface when solidInterface is on"
<< exit(FatalError);
}
}
}

View file

@ -10,20 +10,20 @@ solidInterfaceNonLin* solidInterfacePtr(NULL);
if(solidInterfaceCorr)
{
Info << "Creating solid interface nonlinear correction" << endl;
solidInterfacePtr = new solidInterfaceNonLin(mesh, rheology);
solidInterfacePtr->modifyProperties(muf, lambdaf);
Info << "Creating solid interface nonlinear correction" << endl;
solidInterfacePtr = new solidInterfaceNonLin(mesh, rheology);
solidInterfacePtr->modifyProperties(muf, lambdaf);
//- solidInterface needs muf and lambdaf to be used for divDSigmaExp
if(divDSigmaExpMethod != "surface" && divDSigmaExpMethod != "decompose")
{
FatalError << "divDSigmaExp must be decompose or surface when solidInterface is on"
<< exit(FatalError);
}
if(divDSigmaLargeStrainExpMethod != "surface")
{
FatalError << "divDSigmaLargeStrainExp must be surface when solidInterface is on"
<< exit(FatalError);
}
//- solidInterface needs muf and lambdaf to be used for divDSigmaExp
if(divDSigmaExpMethod != "surface" && divDSigmaExpMethod != "decompose")
{
FatalError << "divDSigmaExp must be decompose or surface when solidInterface is on"
<< exit(FatalError);
}
if(divDSigmaLargeStrainExpMethod != "surface")
{
FatalError << "divDSigmaLargeStrainExp must be surface when solidInterface is on"
<< exit(FatalError);
}
}
}

View file

@ -14,10 +14,10 @@ forAll (bm, patchI)
(
!isA<emptyFvPatch>(bm[patchI])
&& !(
bm[patchI].coupled()
//&& Pstream::parRun()
//&& !mesh.parallelData().cyclicParallel()
)
bm[patchI].coupled()
//&& Pstream::parRun()
//&& !mesh.parallelData().cyclicParallel()
)
)
{
const labelList& bp = bm[patchI].patch().boundaryPoints();
@ -25,9 +25,9 @@ forAll (bm, patchI)
const labelList& meshPoints = bm[patchI].patch().meshPoints();
forAll (bp, pointI)
{
pointsCorrectionMap.insert(meshPoints[bp[pointI]]);
}
{
pointsCorrectionMap.insert(meshPoints[bp[pointI]]);
}
}
}

View file

@ -9,7 +9,7 @@ if(moveMeshMethod == "inverseDistance")
else
{
FatalError << "move mesh method " << moveMeshMethod << " not recognised" << nl
<< "available methods are:" << nl
<< "inverseDistance" << nl
<< "leastSquares" << exit(FatalError);
<< "available methods are:" << nl
<< "inverseDistance" << nl
<< "leastSquares" << exit(FatalError);
}

View file

@ -21,10 +21,10 @@
(
IOobject
(
"pointDU",
runTime.timeName(),
mesh
),
"pointDU",
runTime.timeName(),
mesh
),
pMesh,
dimensionedVector("zero", dimLength, vector::zero),
types
@ -48,7 +48,7 @@
forAll (pointDUI, pointI)
{
newPoints[pointI] += pointDUI[pointI];
newPoints[pointI] += pointDUI[pointI];
}
twoDPointCorrector twoDCorrector(mesh);
@ -60,6 +60,6 @@
// else
// {
// FatalErrorIn(args.executable())
// << "Negative Jacobian"
// << exit(FatalError);
// << "Negative Jacobian"
// << exit(FatalError);
// }

View file

@ -67,31 +67,31 @@ forAll (ptc, pointI)
forAll (curFaces, faceI)
{
if (!mesh.isInternalFace(curFaces[faceI]))
{
// This is a boundary face. If not in the empty patch
// or coupled calculate the extrapolation vector
label patchID =
mesh.boundaryMesh().whichPatch(curFaces[faceI]);
{
// This is a boundary face. If not in the empty patch
// or coupled calculate the extrapolation vector
label patchID =
mesh.boundaryMesh().whichPatch(curFaces[faceI]);
if
(
!isA<emptyFvPatch>(mesh.boundary()[patchID])
&& !mesh.boundary()[patchID].coupled()
)
{
label faceInPatchID =
bm[patchID].patch().whichFace(curFaces[faceI]);
if
(
!isA<emptyFvPatch>(mesh.boundary()[patchID])
&& !mesh.boundary()[patchID].coupled()
)
{
label faceInPatchID =
bm[patchID].patch().whichFace(curFaces[faceI]);
pfCorr[curPoint] +=
w[pointI][fI]*
(
extraVecs[pointI][fI]
& gradDU.boundaryField()[patchID][faceInPatchID]
);
pfCorr[curPoint] +=
w[pointI][fI]*
(
extraVecs[pointI][fI]
& gradDU.boundaryField()[patchID][faceInPatchID]
);
fI++;
}
}
fI++;
}
}
}
}

View file

@ -4,6 +4,6 @@ Info << "divSigmaNonLinExp method " << divDSigmaNonLinExpMethod << endl;
if(divDSigmaNonLinExpMethod != "standard" && divDSigmaNonLinExpMethod != "surface")
{
FatalError << "divSigmaNonLinExp method " << divDSigmaNonLinExpMethod << " not found!" << nl
<< "valid methods are:\nstandard\nsurface"
<< exit(FatalError);
<< "valid methods are:\nstandard\nsurface"
<< exit(FatalError);
}

View file

@ -4,67 +4,67 @@ if (runTime.outputTime())
(
IOobject
(
"epsilonEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"epsilonEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
sqrt((2.0/3.0)*magSqr(dev(epsilon)))
);
Info<< "Max epsilonEq = " << max(epsilonEq).value()
<< endl;
<< endl;
volScalarField sigmaEq
(
IOobject
(
"sigmaEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"sigmaEq",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
sqrt((3.0/2.0)*magSqr(dev(sigma)))
);
Info<< "Max sigmaEq = " << max(sigmaEq).value()
<< endl;
<< endl;
pointMesh pMesh(mesh);
pointScalarField contactPointGap
(
IOobject
(
"contactPointGap",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
"contactPointGap",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
),
pMesh,
dimensionedScalar("zero", dimless, 0.0)
);
forAll(mesh.boundary(), patchi)
{
if(DU.boundaryField()[patchi].type() == solidContactFvPatchVectorField::typeName)
{
const solidContactFvPatchVectorField& DUpatch =
refCast<const solidContactFvPatchVectorField>
(DU.boundaryField()[patchi]);
if(DU.boundaryField()[patchi].type() == solidContactFvPatchVectorField::typeName)
{
const solidContactFvPatchVectorField& DUpatch =
refCast<const solidContactFvPatchVectorField>
(DU.boundaryField()[patchi]);
if(!DUpatch.master())
{
const labelList& meshPoints = mesh.boundaryMesh()[patchi].meshPoints();
const scalarField gap = DUpatch.normalContactModelPtr()->slaveContactPointGap();
forAll(meshPoints, pointi)
{
contactPointGap[meshPoints[pointi]] = gap[pointi];
}
}
}
if(!DUpatch.master())
{
const labelList& meshPoints = mesh.boundaryMesh()[patchi].meshPoints();
const scalarField gap = DUpatch.normalContactModelPtr()->slaveContactPointGap();
forAll(meshPoints, pointi)
{
contactPointGap[meshPoints[pointi]] = gap[pointi];
}
}
}
}
@ -74,10 +74,10 @@ if (runTime.outputTime())
Info << "Patch " << mesh.boundary()[patchi].name() << endl;
vectorField totalForce = mesh.Sf().boundaryField()[patchi] & sigma.boundaryField()[patchi];
vector force = sum( totalForce );
vector force = sum( totalForce );
Info << "\ttotal force is " << force << " N" << endl;
tensorField F = I + gradDU.boundaryField()[patchi];
tensorField F = I + gradDU.boundaryField()[patchi];
tensorField Finv = inv(F);
//vectorField nCurrent = Finv & n.boundaryField()[patchi];
//nCurrent /= mag(nCurrent);
@ -90,25 +90,25 @@ if (runTime.outputTime())
// if(mesh.boundary()[patchi].type() != "empty")
// {
// vector Sf0 = Sf.boundaryField()[patchi][0];
// symmTensor sigma0 = sigma.boundaryField()[patchi][0];
// Info << "sigmab[0] is " << sigma0 << nl
// << "Sfb is " << Sf0 << nl
// << "force is " << (Sf.boundaryField()[patchi][0]&sigma.boundaryField()[patchi][0]) << nl
// << "Sfx*sigmaxx " << (Sf0[vector::X]*sigma0[symmTensor::XX]) << nl
// << "Sfy*sigmaxy " << (Sf0[vector::Y]*sigma0[symmTensor::XY]) << nl
// << "Sfx*sigmayx " << (Sf0[vector::X]*sigma0[symmTensor::XY]) << nl
// << "Sfy*sigmayy " << (Sf0[vector::Y]*sigma0[symmTensor::YY]) << nl
// << endl;
//vector SfTL(-0.000137451, 0.00383599, -4.76878e-20);
// vector SfTL = Finv[0] & vector(0,0.004,0);
// Info << "SfTLx*sigmaxx " << (SfTL[vector::X]*sigma0[symmTensor::XX]) << nl
// << "SfTLy*sigmaxy " << (SfTL[vector::Y]*sigma0[symmTensor::XY]) << nl
// << "SfTLx*sigmayx " << (SfTL[vector::X]*sigma0[symmTensor::XY]) << nl
// << "SfTLy*sigmayy " << (SfTL[vector::Y]*sigma0[symmTensor::YY]) << nl
// << endl;
// }
}*/
// vector Sf0 = Sf.boundaryField()[patchi][0];
// symmTensor sigma0 = sigma.boundaryField()[patchi][0];
// Info << "sigmab[0] is " << sigma0 << nl
// << "Sfb is " << Sf0 << nl
// << "force is " << (Sf.boundaryField()[patchi][0]&sigma.boundaryField()[patchi][0]) << nl
// << "Sfx*sigmaxx " << (Sf0[vector::X]*sigma0[symmTensor::XX]) << nl
// << "Sfy*sigmaxy " << (Sf0[vector::Y]*sigma0[symmTensor::XY]) << nl
// << "Sfx*sigmayx " << (Sf0[vector::X]*sigma0[symmTensor::XY]) << nl
// << "Sfy*sigmayy " << (Sf0[vector::Y]*sigma0[symmTensor::YY]) << nl
// << endl;
//vector SfTL(-0.000137451, 0.00383599, -4.76878e-20);
// vector SfTL = Finv[0] & vector(0,0.004,0);
// Info << "SfTLx*sigmaxx " << (SfTL[vector::X]*sigma0[symmTensor::XX]) << nl
// << "SfTLy*sigmaxy " << (SfTL[vector::Y]*sigma0[symmTensor::XY]) << nl
// << "SfTLx*sigmayx " << (SfTL[vector::X]*sigma0[symmTensor::XY]) << nl
// << "SfTLy*sigmayy " << (SfTL[vector::Y]*sigma0[symmTensor::YY]) << nl
// << endl;
// }
}*/
runTime.write();
}

View file

@ -2,13 +2,13 @@
if(historyPatchID != -1)
{
Info << "Writing disp-force to file for patch " << historyPatchName
<< endl;
<< endl;
//- for small strain or moving mesh
//scalar force = gSum(
// direction &
// (mesh.boundary()[historyPatchID].Sf() & sigma.boundaryField()[historyPatchID])
// );
// direction &
// (mesh.boundary()[historyPatchID].Sf() & sigma.boundaryField()[historyPatchID])
// );
//- for large strain total lagrangian
//tensorField F = I + gradU.boundaryField()[historyPatchID];
@ -21,7 +21,7 @@ if(historyPatchID != -1)
Info << "Writing strain-stress to file for patch " << historyPatchName
<< endl;
<< endl;
// avaerage stress strain
symmTensor stress = gAverage(sigma.boundaryField()[historyPatchID]);
@ -31,19 +31,19 @@ if(historyPatchID != -1)
// write to file
if(Pstream::master())
{
OFstream& forceDispFile = *forceFilePtr;
label width = 20;
forceDispFile << disp.x() << " " << disp.y() << " " << disp.z();
forceDispFile.width(width);
forceDispFile << force.x() << " " << force.y() << " " << force.z()
<< endl;
OFstream& forceDispFile = *forceFilePtr;
label width = 20;
forceDispFile << disp.x() << " " << disp.y() << " " << disp.z();
forceDispFile.width(width);
forceDispFile << force.x() << " " << force.y() << " " << force.z()
<< endl;
OFstream& stressStrainFile = *stressFilePtr;
stressStrainFile << strain.xx() << " " << strain.xy() << " " << strain.xz() << " "
<< strain.yy() << " " << strain.yz() << " " << strain.zz();
stressStrainFile.width(width);
stressStrainFile << stress.xx() << " " << stress.xy() << " " << stress.xz() << " "
<< stress.yy() << " " << stress.yz() << " " << stress.zz()
<< endl;
OFstream& stressStrainFile = *stressFilePtr;
stressStrainFile << strain.xx() << " " << strain.xy() << " " << strain.xz() << " "
<< strain.yy() << " " << strain.yz() << " " << strain.zz();
stressStrainFile.width(width);
stressStrainFile << stress.xx() << " " << stress.xy() << " " << stress.xz() << " "
<< stress.yy() << " " << stress.yz() << " " << stress.zz()
<< endl;
}
}

View file

@ -21,9 +21,9 @@ if(iCorr == 0)
scalar sumMagB = gSum(magSqr(b));
if(sumMagB < SMALL)
{
//Warning << "Aitken under-relaxation: denominator less then SMALL"
// << endl;
sumMagB += SMALL;
//Warning << "Aitken under-relaxation: denominator less then SMALL"
// << endl;
sumMagB += SMALL;
}
aitkenTheta = -aitkenTheta*

View file

@ -21,10 +21,10 @@ if(divDSigmaExpMethod == "standard")
);
// divDSigmaExp = fvc::div
// (
// muf*(mesh.Sf() & fvc::interpolate(gradDU.T()))
// + lambdaf*(mesh.Sf() & I*fvc::interpolate(tr(gradDU)))
// - (muf + lambdaf)*(mesh.Sf() & fvc::interpolate(gradDU))
// );
// muf*(mesh.Sf() & fvc::interpolate(gradDU.T()))
// + lambdaf*(mesh.Sf() & I*fvc::interpolate(tr(gradDU)))
// - (muf + lambdaf)*(mesh.Sf() & fvc::interpolate(gradDU))
// );
}
else if(divDSigmaExpMethod == "decompose")
{
@ -33,13 +33,13 @@ if(divDSigmaExpMethod == "standard")
divDSigmaExp = fvc::div
(
mesh.magSf()
*(
- (muf + lambdaf)*(fvc::snGrad(DU)&(I - n*n))
+ lambdaf*tr(shearGradDU&(I - n*n))*n
+ muf*(shearGradDU&n)
)
);
mesh.magSf()
*(
- (muf + lambdaf)*(fvc::snGrad(DU)&(I - n*n))
+ lambdaf*tr(shearGradDU&(I - n*n))*n
+ muf*(shearGradDU&n)
)
);
}
else if(divDSigmaExpMethod == "laplacian")
{
@ -47,10 +47,10 @@ if(divDSigmaExpMethod == "standard")
- fvc::laplacian(mu + lambda, DU, "laplacian(DDU,DU)")
+ fvc::div
(
mu*gradDU.T()
+ lambda*(I*tr(gradDU)),
"div(sigma)"
);
mu*gradDU.T()
+ lambda*(I*tr(gradDU)),
"div(sigma)"
);
}
else
{

View file

@ -33,12 +33,12 @@
(
IOobject
(
"DEpsilon",
runTime.timeName(),
mesh,
IOobject::READ_IF_PRESENT,
IOobject::AUTO_WRITE
),
"DEpsilon",
runTime.timeName(),
mesh,
IOobject::READ_IF_PRESENT,
IOobject::AUTO_WRITE
),
mesh,
dimensionedSymmTensor("zero", dimless, symmTensor::zero)
);
@ -104,12 +104,12 @@
(
IOobject
(
"divDSigmaExp",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::NO_WRITE
),
"divDSigmaExp",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::NO_WRITE
),
mesh,
dimensionedVector("zero", dimensionSet(1,-2,-2,0,0,0,0), vector::zero)
);
@ -139,7 +139,7 @@
IOobject::NO_WRITE
),
mesh,
dimensionedVector("zero", dimLength, vector::zero)
dimensionedVector("zero", dimLength, vector::zero)
);
scalar aitkenInitialRes = 1.0;
scalar aitkenTheta = 0.1;

View file

@ -5,8 +5,8 @@ label historyPatchID = mesh.boundaryMesh().findPatchID(historyPatchName);
if(historyPatchID == -1)
{
Warning << "history patch " << historyPatchName
<< " not found. Force-displacement will not be written"
<< endl;
<< " not found. Force-displacement will not be written"
<< endl;
}
else if(Pstream::master())
{
@ -16,8 +16,8 @@ if(historyPatchID == -1)
{
fileName forceFileName(historyDir/"forceDisp_"+historyPatchName+".dat");
Info << "\nForce-displacement for patch " << historyPatchName
<< " will be written to " << forceFileName
<< endl;
<< " will be written to " << forceFileName
<< endl;
forceFilePtr = new OFstream(forceFileName);
OFstream& forceDispFile = *forceFilePtr;
forceDispFile << "#Disp(mm)\tForce(N)" << endl;
@ -26,9 +26,9 @@ if(historyPatchID == -1)
{
fileName stressFileName(historyDir/"stressStrain_"+historyPatchName+".dat");
Info << "\nStress(Engineering Small Stress)-strain(Engineering Small Strain) for patch "
<< historyPatchName
<< " will be written to " << stressFileName
<< endl;
<< historyPatchName
<< " will be written to " << stressFileName
<< endl;
stressFilePtr = new OFstream(stressFileName);
OFstream& stressStrainFile = *stressFilePtr;
stressStrainFile << "#Strain(-)\tStress(Pa)" << endl;

View file

@ -10,15 +10,15 @@ solidInterface* solidInterfacePtr(NULL);
if(solidInterfaceCorr)
{
Info << "Creating solid interface correction" << endl;
solidInterfacePtr = new solidInterface(mesh, rheology);
solidInterfacePtr->modifyProperties(muf, lambdaf);
Info << "Creating solid interface correction" << endl;
solidInterfacePtr = new solidInterface(mesh, rheology);
solidInterfacePtr->modifyProperties(muf, lambdaf);
//- solidInterface needs muf and lambdaf to be used for divDSigmaExp
if(divDSigmaExpMethod != "surface" && divDSigmaExpMethod != "decompose")
{
FatalError << "divDSigmaExp must be decompose or surface when solidInterface is on"
<< exit(FatalError);
}
//- solidInterface needs muf and lambdaf to be used for divDSigmaExp
if(divDSigmaExpMethod != "surface" && divDSigmaExpMethod != "decompose")
{
FatalError << "divDSigmaExp must be decompose or surface when solidInterface is on"
<< exit(FatalError);
}
}
}

Some files were not shown because too many files have changed in this diff Show more