--HG--
branch : bgschaid/minorAdditionsBranch
This commit is contained in:
Bernhard F.W. Gschaider 2013-07-19 00:36:27 +02:00
commit 72c243a96f
1416 changed files with 18017 additions and 18012 deletions

View file

@ -49,7 +49,7 @@ IF (NOT $ENV{CDASH_SUBMIT_LOCAL_HOST_ID} STREQUAL "")
# $CDASH_SUBMIT_LOCAL_HOST_ID # $CDASH_SUBMIT_LOCAL_HOST_ID
SET( SET(
SITENAME $ENV{CDASH_SUBMIT_LOCAL_HOST_ID} SITENAME $ENV{CDASH_SUBMIT_LOCAL_HOST_ID}
CACHE STRING "Name of the local site" CACHE STRING "Name of the local site"
) )
ELSE (NOT $ENV{CDASH_SUBMIT_LOCAL_HOST_ID} STREQUAL "") ELSE (NOT $ENV{CDASH_SUBMIT_LOCAL_HOST_ID} STREQUAL "")
# Grab the hostname FQN; will be used for the sitename # Grab the hostname FQN; will be used for the sitename
@ -81,16 +81,16 @@ SET(
) )
# Find out the version of the compiler being used. # Find out the version of the compiler being used.
# Add this information to the buildname # Add this information to the buildname
# This is for gcc or icc because they both support the -dumpversion option # This is for gcc or icc because they both support the -dumpversion option
EXEC_PROGRAM($ENV{WM_CC} EXEC_PROGRAM($ENV{WM_CC}
ARGS -dumpversion ARGS -dumpversion
OUTPUT_VARIABLE COMPILER_VERSION OUTPUT_VARIABLE COMPILER_VERSION
) )
SET(BUILDNAME "${BUILDNAME}-$ENV{WM_CC}${COMPILER_VERSION}") SET(BUILDNAME "${BUILDNAME}-$ENV{WM_CC}${COMPILER_VERSION}")
# #
# We will support more compilers eventually. # We will support more compilers eventually.
# #
# Timeout for running every single test: 4 hours: 4 x 3600 seconds # Timeout for running every single test: 4 hours: 4 x 3600 seconds
#SET( #SET(
@ -128,12 +128,12 @@ if(GIT_FOUND)
if (GIT_BRANCH_NAME STREQUAL "") if (GIT_BRANCH_NAME STREQUAL "")
message("No git-branch. Mercurial?") message("No git-branch. Mercurial?")
EXEC_PROGRAM(hg EXEC_PROGRAM(hg
ARGS branch ARGS branch
OUTPUT_VARIABLE GIT_BRANCH_NAME OUTPUT_VARIABLE GIT_BRANCH_NAME
) )
message("Git branch (mercurial): ${GIT_BRANCH_NAME}") message("Git branch (mercurial): ${GIT_BRANCH_NAME}")
endif() endif()
SET(BUILDNAME "${BUILDNAME}-git-branch=${GIT_BRANCH_NAME}") SET(BUILDNAME "${BUILDNAME}-git-branch=${GIT_BRANCH_NAME}")
endif() endif()
# Some last minute cleanup # Some last minute cleanup
@ -158,7 +158,7 @@ set_property(
# Compile the OpenFOAM unit tests located under applications/test # Compile the OpenFOAM unit tests located under applications/test
# This part will not be compiled and run by default. # This part will not be compiled and run by default.
# This would be a good candidate for a sub-project # This would be a good candidate for a sub-project
add_custom_target (OpenFOAM-$ENV{WM_PROJECT_VERSION}_unitTests add_custom_target (OpenFOAM-$ENV{WM_PROJECT_VERSION}_unitTests
wmake all ${OF_ROOT}/applications/test wmake all ${OF_ROOT}/applications/test
) )
@ -184,7 +184,7 @@ IF(BUILD_TESTING)
# Modify this variable if you want the full length test case simulations # Modify this variable if you want the full length test case simulations
# Beware, this might take a long time to execute. # Beware, this might take a long time to execute.
# Otherwise, the default behaviour is to run each tutorial for 1 "timestep" # Otherwise, the default behaviour is to run each tutorial for 1 "timestep"
#SET(RUN_FROM_ONE_TIMESTEP 0) #SET(RUN_FROM_ONE_TIMESTEP 0)
SET(RUN_FROM_ONE_TIMESTEP 1) SET(RUN_FROM_ONE_TIMESTEP 1)

14
COPYING
View file

@ -1,12 +1,12 @@
GNU GENERAL PUBLIC LICENSE GNU GENERAL PUBLIC LICENSE
Version 2, June 1991 Version 2, June 1991
Copyright (C) 1989, 1991 Free Software Foundation, Inc. Copyright (C) 1989, 1991 Free Software Foundation, Inc.
51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
Everyone is permitted to copy and distribute verbatim copies Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed. of this license document, but changing it is not allowed.
Preamble Preamble
The licenses for most software are designed to take away your The licenses for most software are designed to take away your
freedom to share and change it. By contrast, the GNU General Public freedom to share and change it. By contrast, the GNU General Public
@ -56,7 +56,7 @@ patent must be licensed for everyone's free use or not licensed at all.
The precise terms and conditions for copying, distribution and The precise terms and conditions for copying, distribution and
modification follow. modification follow.
GNU GENERAL PUBLIC LICENSE GNU GENERAL PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License applies to any program or other work which contains 0. This License applies to any program or other work which contains
@ -255,7 +255,7 @@ make exceptions for this. Our decision will be guided by the two goals
of preserving the free status of all derivatives of our free software and of preserving the free status of all derivatives of our free software and
of promoting the sharing and reuse of software generally. of promoting the sharing and reuse of software generally.
NO WARRANTY NO WARRANTY
11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
@ -277,9 +277,9 @@ YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGES. POSSIBILITY OF SUCH DAMAGES.
END OF TERMS AND CONDITIONS END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it possible use to the public, the best way to achieve this is to make it

View file

@ -14,7 +14,7 @@ Contributions:
- TU Braunschweig: real gas thermo - TU Braunschweig: real gas thermo
- Kornev, Rostock: inlet turbulence generator; locDynV2F hybrid turbulence model - Kornev, Rostock: inlet turbulence generator; locDynV2F hybrid turbulence model
- Christoph Goniva: and multi-mixer dynamic mesh class - Christoph Goniva: and multi-mixer dynamic mesh class
- -
- segregated pressure-based turbomachinery (Wikki) - segregated pressure-based turbomachinery (Wikki)

View file

@ -1,118 +1,118 @@
The OpenFOAM-Extend Project The OpenFOAM-Extend Project
Web site: http://www.extend-project.de/ Web site: http://www.extend-project.de/
The goal of the OpenFOAM -Extend Project is to open the OpenFOAM CFD toolbox The goal of the OpenFOAM -Extend Project is to open the OpenFOAM CFD toolbox
to community contributed extensions in the spirit of the Open Source to community contributed extensions in the spirit of the Open Source
development model. development model.
Software under the name OpenFOAM has been developed by a large group of Software under the name OpenFOAM has been developed by a large group of
volunteers starting in 1991 and has spread through scientific and engineering volunteers starting in 1991 and has spread through scientific and engineering
community in the late 1990s and early 2000s. Its main strength was the spirit community in the late 1990s and early 2000s. Its main strength was the spirit
of open collaboration in the community: the aim of the OpenFOAM-Extend Project of open collaboration in the community: the aim of the OpenFOAM-Extend Project
is to formalize this spirit. is to formalize this spirit.
* Trademark notice * Trademark notice
Currently, the word "OpenFOAM" is held as a trademark by a commercial Currently, the word "OpenFOAM" is held as a trademark by a commercial
company, not associated with the Open Source development effort of this company, not associated with the Open Source development effort of this
software. While the author acknowledges the existence of Trademark, software. While the author acknowledges the existence of Trademark,
association of trademarked phrase with the the code developed by third association of trademarked phrase with the the code developed by third
parties and including the author is misleading in terms of authorship, parties and including the author is misleading in terms of authorship,
ownership of intellectual property of the source code. ownership of intellectual property of the source code.
In the absence of a legal resolution, we shall refer to the project as the In the absence of a legal resolution, we shall refer to the project as the
-Extend project and software as "OpenFOAM", as per agreement between -Extend project and software as "OpenFOAM", as per agreement between
Prof. Hrvoje Jasak and Mr. Henry G. Weller of 30 September 2004. Prof. Hrvoje Jasak and Mr. Henry G. Weller of 30 September 2004.
The excerpts below are from the correspondences of Dr.Jasak and are to The excerpts below are from the correspondences of Dr.Jasak and are to
his knowledge all the relevant writings about this topic. Although his knowledge all the relevant writings about this topic. Although
this topic has been discussed before in public forums there exists no this topic has been discussed before in public forums there exists no
'official' answer by OpenCFD on this issue. In the interest of 'official' answer by OpenCFD on this issue. In the interest of
fairness a reference (link) to such an answer will be added to this fairness a reference (link) to such an answer will be added to this
document as soon as we come aware of it. document as soon as we come aware of it.
09/30/2004 01:45 PM 09/30/2004 01:45 PM
Dear Dr. Jasak, Dear Dr. Jasak,
I propose that we make FOAM open-source and shutdown Nabla, that way we I propose that we make FOAM open-source and shutdown Nabla, that way we
are free to pursue whatever future we wish independently. are free to pursue whatever future we wish independently.
Henry G. Weller Henry G. Weller
Managing Director Managing Director
Nabla Ltd Nabla Ltd
The first release of the OpenFOAM source code of 10 December 2004 The first release of the OpenFOAM source code of 10 December 2004
and subsequent clarification of 16 February 2005. and subsequent clarification of 16 February 2005.
Hi Henry, Hi Henry,
... ...
Regarding the new setup, I would like to thank you for your effort i Regarding the new setup, I would like to thank you for your effort i
n making FOAM public domain, with crucial comments regarding some n making FOAM public domain, with crucial comments regarding some
important details. important details.
- in the header files for the open version of foam there exists a line - in the header files for the open version of foam there exists a line
which claims copyright for OpenCFD Ltd: which claims copyright for OpenCFD Ltd:
Copyright (C) 1991-2004 OpenCFD Ltd. Copyright (C) 1991-2004 OpenCFD Ltd.
I am not sure whether it is possible to claim copyright over a GPL piece I am not sure whether it is possible to claim copyright over a GPL piece
of software, but this is at least misleading. In order for this of software, but this is at least misleading. In order for this
statement to be true, OpenCFD Ltd. would need to purchase the copyright statement to be true, OpenCFD Ltd. would need to purchase the copyright
from Nabla Ltd. and from you and me personally (due to unresolved from Nabla Ltd. and from you and me personally (due to unresolved
ownership and the remaining payments Nabla Ltd. needs to make to us ownership and the remaining payments Nabla Ltd. needs to make to us
regarding the original transfer of copyright). I can offer a number regarding the original transfer of copyright). I can offer a number
of solutions acceptable to me as follows: of solutions acceptable to me as follows:
1) no Copyright statement apart from the required GPL statement 1) no Copyright statement apart from the required GPL statement
2) Copyright OpenFOAM.org 2) Copyright OpenFOAM.org
3) Copyright Henry Weller, Hrvoje Jasak et al 3) Copyright Henry Weller, Hrvoje Jasak et al
4) Copyright OpenCFD and Hrvoje Jasak 4) Copyright OpenCFD and Hrvoje Jasak
5) List of authors instead of the copyright statement 5) List of authors instead of the copyright statement
I am also prepared to consider any other reasonable ideas along the same I am also prepared to consider any other reasonable ideas along the same
lines (including the sale of copyright, which would be a bit pointless lines (including the sale of copyright, which would be a bit pointless
at this stage) - please advise. at this stage) - please advise.
02/17/2005 11:09 PM 02/17/2005 11:09 PM
Under the GPL there has to be an organization in place to enforce the GPL Under the GPL there has to be an organization in place to enforce the GPL
if there are violation, these are part of the rules. OpenCFD undertakes if there are violation, these are part of the rules. OpenCFD undertakes
that responsibility hence the copyright. that responsibility hence the copyright.
Henry G. Weller Henry G. Weller
Managing Director Managing Director
12 November 2010 12 November 2010
From the above it is clear that OpenCFD does not own the rights or From the above it is clear that OpenCFD does not own the rights or
authorship of the source code, which is currently concealed in public. authorship of the source code, which is currently concealed in public.
For further details, I have been attempting to get a formal response by For further details, I have been attempting to get a formal response by
this company for 6 years now, without success. It is time to move on. this company for 6 years now, without success. It is time to move on.
The current Copyright notice does not fairly describe the authorship of The current Copyright notice does not fairly describe the authorship of
the software. In the spirit of the open source development model, the the software. In the spirit of the open source development model, the
-Extend project wishes to formally acknowledge all contributors and -Extend project wishes to formally acknowledge all contributors and
developers of OpenFOAM from its inception to today. developers of OpenFOAM from its inception to today.
As this issue still remains unresolved, and the list of contributors As this issue still remains unresolved, and the list of contributors
has been removed from the OpenFOAM source code by Mr. Weller in 2006, has been removed from the OpenFOAM source code by Mr. Weller in 2006,
we shall re-base OpenFOAM as an Open Source community-based project we shall re-base OpenFOAM as an Open Source community-based project
and associate the authorship and Copyright in each case with the and associate the authorship and Copyright in each case with the
original author. original author.
Hrvoje Jasak Hrvoje Jasak
This offering is not affiliated with OpenCFD Limited, owner of the OpenFOAM This offering is not affiliated with OpenCFD Limited, owner of the OpenFOAM
trade mark. trade mark.
Please find the official website of OpenCFD Limited at http://www.openfoam.org Please find the official website of OpenCFD Limited at http://www.openfoam.org
In particular, OpenCFD Limited hosts the following legal statements: In particular, OpenCFD Limited hosts the following legal statements:
- licensing and intellectual property: http://www.openfoam.com/legal/index.php - licensing and intellectual property: http://www.openfoam.com/legal/index.php
- trademark policy: http://www.openfoam.com/legal/trademark-policy.php - trademark policy: http://www.openfoam.com/legal/trademark-policy.php

View file

@ -10,7 +10,7 @@ git Repository: Henrik Rusche (h.rusche@wikki.co.uk)
Martin Beaudoin (beaudoin.martin@ireq.ca) Martin Beaudoin (beaudoin.martin@ireq.ca)
1. SourceForge Access 1. SourceForge Access
~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~
To make contributions to the -extend project, you should first obtain an To make contributions to the -extend project, you should first obtain an
account at SourceForge.net. (SourceForge will suggest a username account at SourceForge.net. (SourceForge will suggest a username
@ -46,7 +46,7 @@ git Repository: Henrik Rusche (h.rusche@wikki.co.uk)
The article listed above should be considered mandatory reading material The article listed above should be considered mandatory reading material
for those planning to make contributions to the repository. Some links about for those planning to make contributions to the repository. Some links about
the general usage of GIT can be found in Section 8. the general usage of GIT can be found in Section 8.
Please do not hesitate to ask one of the "git Repository" contacts at the top Please do not hesitate to ask one of the "git Repository" contacts at the top
of this document if you are not sure about specific operation relative to the git of this document if you are not sure about specific operation relative to the git
@ -81,14 +81,14 @@ git Repository: Henrik Rusche (h.rusche@wikki.co.uk)
+ git checkout -b my-feature-branch + git checkout -b my-feature-branch
Feature branches should be named after the fix or feature that they contain, Feature branches should be named after the fix or feature that they contain,
*not* named after the author. There may be more than one author, after all, and *not* named after the author. There may be more than one author, after all, and
this information is recorded in the commit anyway. As an example, a bug fix this information is recorded in the commit anyway. As an example, a bug fix
to the mesquite package should be committed to a branch named "hotfix/mesquite". to the mesquite package should be committed to a branch named "hotfix/mesquite".
Carefully organized commits and branches, clear commit messages, and well-chosen Carefully organized commits and branches, clear commit messages, and well-chosen
branch names will make it easier for the release committee to review and merge branch names will make it easier for the release committee to review and merge
each contribution. each contribution.
When you have a feature branch that is ready to be merged, push it to the server When you have a feature branch that is ready to be merged, push it to the server
using a command such as this: using a command such as this:
@ -125,7 +125,7 @@ git Repository: Henrik Rusche (h.rusche@wikki.co.uk)
The feature branches provided by users will be merged by the release committee The feature branches provided by users will be merged by the release committee
into an integration branch called "nextRelease", and then both the local into an integration branch called "nextRelease", and then both the local
and remote copy of the feature branch will be deleted. The merge will be performed and remote copy of the feature branch will be deleted. The merge will be performed
using a "git merge --no-ff" command, which forces the creation of a merge commit using a "git merge --no-ff" command, which forces the creation of a merge commit
even in the case where the merge could be accomplished by fast-forward. even in the case where the merge could be accomplished by fast-forward.
Note that the automated test loop will be run off of this integration branch. Note that the automated test loop will be run off of this integration branch.
@ -136,7 +136,7 @@ git Repository: Henrik Rusche (h.rusche@wikki.co.uk)
and hotfixes. and hotfixes.
Note that hotfixes should be branched off of the master branch and should be merged Note that hotfixes should be branched off of the master branch and should be merged
twice - once into the integration branch and once into the master branch - in order to twice - once into the integration branch and once into the master branch - in order to
guarantee that a merge of the integration branch into the master branch can be guarantee that a merge of the integration branch into the master branch can be
accomplished by a fast-forward. accomplished by a fast-forward.

14
README
View file

@ -59,7 +59,7 @@
Then update the environment variables by sourcing the $HOME/.bashrc file by Then update the environment variables by sourcing the $HOME/.bashrc file by
typing in the terminal: typing in the terminal:
+ . $HOME/.bashrc + . $HOME/.bashrc
2) OR, if running tcsh or csh, source the etc/cshrc file by adding the 2) OR, if running tcsh or csh, source the etc/cshrc file by adding the
following line to the end of your $HOME/.cshrc file: following line to the end of your $HOME/.cshrc file:
@ -69,7 +69,7 @@
Then update the environment variables by sourcing the $HOME/.cshrc file by Then update the environment variables by sourcing the $HOME/.cshrc file by
typing in the terminal: typing in the terminal:
+ source $HOME/.cshrc + source $HOME/.cshrc
*** Installation in alternative locations *** Installation in alternative locations
OpenFOAM may also be installed in alternative locations. However, the OpenFOAM may also be installed in alternative locations. However, the
@ -79,13 +79,13 @@
The environment variable 'FOAM_INST_DIR' can be used to find and source the The environment variable 'FOAM_INST_DIR' can be used to find and source the
appropriate resource file. Here is a bash/ksh/sh example: appropriate resource file. Here is a bash/ksh/sh example:
+ export FOAM_INST_DIR=/data/app/OpenFOAM + export FOAM_INST_DIR=/data/app/OpenFOAM
+ foamDotFile=$FOAM_INST_DIR/OpenFOAM-<VERSION>/etc/bashrc + foamDotFile=$FOAM_INST_DIR/OpenFOAM-<VERSION>/etc/bashrc
+ [ -f $foamDotFile ] && . $foamDotFile + [ -f $foamDotFile ] && . $foamDotFile
and a csh/tcsh example: and a csh/tcsh example:
+ setenv FOAM_INST_DIR /data/app/OpenFOAM + setenv FOAM_INST_DIR /data/app/OpenFOAM
+ set foamDotFile=$FOAM_INST_DIR/OpenFOAM-<VERSION>/etc/cshrc + set foamDotFile=$FOAM_INST_DIR/OpenFOAM-<VERSION>/etc/cshrc
+ if ( -f $foamDotFile ) source $foamDotFile + if ( -f $foamDotFile ) source $foamDotFile
@ -93,7 +93,7 @@
of the OpenFOAM installation. of the OpenFOAM installation.
* Building the ThirdParty packages from Sources * Building the ThirdParty packages from Sources
A new way of compiling and installing the ThirdParty suite of packages is proposed with OpenFOAM-1.6-ext. A new way of compiling and installing the ThirdParty suite of packages is proposed with OpenFOAM-1.6-ext.
Please consult the file ThirdParty/README.ThirdParty a for more information. Please consult the file ThirdParty/README.ThirdParty a for more information.
* Building from Sources (Optional) * Building from Sources (Optional)
@ -154,7 +154,7 @@
* Reporting Bugs in OpenFOAM * Reporting Bugs in OpenFOAM
Please report all bugs and feature requests using our bug tracking system: Please report all bugs and feature requests using our bug tracking system:
http://sourceforge.net/apps/mantisbt/openfoam-extend/main_page.php or http://sourceforge.net/apps/mantisbt/openfoam-extend/main_page.php or
http://www.extend-project.de/project-final/openfoam-extend-bug-tracker http://www.extend-project.de/project-final/openfoam-extend-bug-tracker
Please also report bugs at the CFD-Online User Forum. Please also report bugs at the CFD-Online User Forum.
@ -188,7 +188,7 @@
David Hill David Hill
Niklas Wikstrom Niklas Wikstrom
Dubravko Matijasevic Dubravko Matijasevic
Darrin Stephens Darrin Stephens
Christian Beck Christian Beck
Oliver Borm Oliver Borm
James Criner James Criner

View file

@ -1,84 +1,84 @@
# -*- mode: org; -*- # -*- mode: org; -*-
# #
#+TITLE: *OpenFOAM release notes for version 1.6-ext* #+TITLE: *OpenFOAM release notes for version 1.6-ext*
#+AUTHOR: Hrvoje Jasak. #+AUTHOR: Hrvoje Jasak.
#+DATE: 12 November 2010 #+DATE: 12 November 2010
#+LINK: http://www.extend-project.de #+LINK: http://www.extend-project.de
#+OPTIONS: author:nil #+OPTIONS: author:nil
* Overview * Overview
OpenFOAM-1.6-ext is a formal release of the community developments in the OpenFOAM-1.6-ext is a formal release of the community developments in the
OpenFOAM software library, following the establishment of the -Extend Project OpenFOAM software library, following the establishment of the -Extend Project
and web portal. and web portal.
This release includes many improvements, including an automatic test harness This release includes many improvements, including an automatic test harness
which provides basic quality control by running OpenFOAM over a growing which provides basic quality control by running OpenFOAM over a growing
suite of tutorials and test cases. suite of tutorials and test cases.
* Compatibility * Compatibility
OpenFOAM-1.6-ext is compatible with the OpenFOAM-1.6.x and 1.7.1 versions OpenFOAM-1.6-ext is compatible with the OpenFOAM-1.6.x and 1.7.1 versions
of the code and incorporate most developments and changes from above versions of the code and incorporate most developments and changes from above versions
and ensure top-level compatibility. In some cases, the differences are and ensure top-level compatibility. In some cases, the differences are
caused by bug fixes and algorithmic improvements, considered more important caused by bug fixes and algorithmic improvements, considered more important
than inter-operability. than inter-operability.
The list of developments is a result of the work of numerous contributors. The list of developments is a result of the work of numerous contributors.
The Extend Project Admins would formally like to thank all contributors; The Extend Project Admins would formally like to thank all contributors;
the list will be updated with further code development the list will be updated with further code development
* Fundamental developments * Fundamental developments
Improvements in accuracy and stability on tetrahedral and tet-dominant meshes Improvements in accuracy and stability on tetrahedral and tet-dominant meshes
Implicit coupled multi-domain solver Implicit coupled multi-domain solver
Block-implicit multi-equation matrix support Block-implicit multi-equation matrix support
Proper Orthogonal Decomposition (POD) data analysis tools Proper Orthogonal Decomposition (POD) data analysis tools
Rewrite of Ordinary Differential Equation (ODE) and ODE solver classes Rewrite of Ordinary Differential Equation (ODE) and ODE solver classes
Dynamic remeshing classes, based on tetrahedral edge swapping Dynamic remeshing classes, based on tetrahedral edge swapping
Radial Basis Function interpolation and mesh motion classes Radial Basis Function interpolation and mesh motion classes
Turbomachinery features: GGI interface, cyclic GGI, partial overlap GGI Turbomachinery features: GGI interface, cyclic GGI, partial overlap GGI
Parallelisation of topological change engine Parallelisation of topological change engine
Support for parallelisation and topological changes in the Finite Area Method Support for parallelisation and topological changes in the Finite Area Method
Library preparation for Python/Swig interface for OpenFOAM: VulaSHAKA project Library preparation for Python/Swig interface for OpenFOAM: VulaSHAKA project
(http://sourceforge.net/projects/vulashaka/) (http://sourceforge.net/projects/vulashaka/)
Basic implementation of OpenMP wrapping for multi-core support Basic implementation of OpenMP wrapping for multi-core support
* Ongoing developments * Ongoing developments
This section lists the applications that existed in versions of OpenFOAM This section lists the applications that existed in versions of OpenFOAM
but were abandoned by OpenCFD due to lack of expertise. In some cases, code but were abandoned by OpenCFD due to lack of expertise. In some cases, code
may still be present but it is buggy or unusable. The -Extend project line may still be present but it is buggy or unusable. The -Extend project line
maintains, validates and develops the features in contact with original maintains, validates and develops the features in contact with original
authors and contributes new features and bug fixes. authors and contributes new features and bug fixes.
Working parallel point mesh and field tools, shared between interpolation Working parallel point mesh and field tools, shared between interpolation
and FEM discretisation solvers and FEM discretisation solvers
FEM-based automatic mesh motion solver, with working parallelisation FEM-based automatic mesh motion solver, with working parallelisation
Dynamic mesh features and topological mesh changes Dynamic mesh features and topological mesh changes
The Finite Area Method: parallelisation, volume-to-surface mapping The Finite Area Method: parallelisation, volume-to-surface mapping
New generation of linear equation solvers, including accelerated AMG New generation of linear equation solvers, including accelerated AMG
* Developments to solvers (applications) * Developments to solvers (applications)
Basic 6-degree-of-freedom (6-DOF) solver Basic 6-degree-of-freedom (6-DOF) solver
POD solver tools: prototype POD solver tools: prototype
Demonstration solver for fully implicit block-matrix coupling Demonstration solver for fully implicit block-matrix coupling
* Developments to utilities * Developments to utilities
New parallel decomposition and reconstruction formulation, with support New parallel decomposition and reconstruction formulation, with support
for topologically changing meshes for topologically changing meshes
Level-set support tools Level-set support tools
* Model development * Model development
Transport models and top-level solver for visco-elasto-plastic flows Transport models and top-level solver for visco-elasto-plastic flows
Updates to internal combustion engine simulation tools Updates to internal combustion engine simulation tools
Updated version of free surface tracking tools for free surface flows Updated version of free surface tracking tools for free surface flows
Updated fluid-structure interaction solver with parallelisation support Updated fluid-structure interaction solver with parallelisation support
Updated stress analysis tools, new large deformation solver formulation Updated stress analysis tools, new large deformation solver formulation
* Quality Control * Quality Control
Integrated automatic and publicly available test harness with ~150 cases Integrated automatic and publicly available test harness with ~150 cases
http://openfoam-extend.sourceforge.net/CDash/index.php http://openfoam-extend.sourceforge.net/CDash/index.php
Community-driven test-case base to enhance code quality and integrity. Community-driven test-case base to enhance code quality and integrity.
Contributions under Contributions under
http://www.extend-project.de/project-final/openfoam-extend-test-harness http://www.extend-project.de/project-final/openfoam-extend-test-harness
* Other * Other
The number of cumulative bug fixes compared to OpenFOAM-1.6.x is over The number of cumulative bug fixes compared to OpenFOAM-1.6.x is over
5000; we will stop counting. 5000; we will stop counting.

View file

@ -62,12 +62,12 @@ int main(int argc, char *argv[])
forAll (structure.names(), bodyI) forAll (structure.names(), bodyI)
{ {
Info<< nl << "Average velocity of " << structure.names()[bodyI] Info<< nl << "Average velocity of " << structure.names()[bodyI]
<< " in time step = " << " in time step = "
<< structure()[bodyI].Uaverage().value() << nl << structure()[bodyI].Uaverage().value() << nl
<< "Current velocity in time instant = " << "Current velocity in time instant = "
<< structure()[bodyI].U().value() << nl << structure()[bodyI].U().value() << nl
<< "Average omega of " << structure.names()[bodyI] << "Average omega of " << structure.names()[bodyI]
<< " in time step = " << " in time step = "
<< structure()[bodyI].omegaAverage().value() << nl << structure()[bodyI].omegaAverage().value() << nl
<< "Current omega in time instant = " << "Current omega in time instant = "

View file

@ -22,7 +22,7 @@
volScalarField GEta = GEtaCoef/tauEta; volScalarField GEta = GEtaCoef/tauEta;
volScalarField XiEqEta = 1.0 + XiCoef*sqrt(up/(Su + SuMin))*Reta; volScalarField XiEqEta = 1.0 + XiCoef*sqrt(up/(Su + SuMin))*Reta;
volScalarField R = volScalarField R =
GEta*XiEqEta/(XiEqEta - 0.999) + GIn*XiIn/(XiIn - 0.999); GEta*XiEqEta/(XiEqEta - 0.999) + GIn*XiIn/(XiIn - 0.999);
volScalarField XiEqStar = R/(R - GEta - GIn); volScalarField XiEqStar = R/(R - GEta - GIn);
@ -42,7 +42,7 @@
volScalarField GEta = GEtaCoef/tauEta; volScalarField GEta = GEtaCoef/tauEta;
volScalarField XiEqEta = 1.0 + XiCoef*sqrt(up/(Su + SuMin))*Reta; volScalarField XiEqEta = 1.0 + XiCoef*sqrt(up/(Su + SuMin))*Reta;
volScalarField R = volScalarField R =
GEta*XiEqEta/(XiEqEta - 0.999) + GIn*XiIn/(XiIn - 0.999); GEta*XiEqEta/(XiEqEta - 0.999) + GIn*XiIn/(XiIn - 0.999);
volScalarField XiEqStar = R/(R - GEta - GIn); volScalarField XiEqStar = R/(R - GEta - GIn);

View file

@ -91,7 +91,7 @@ bool Foam::XiModels::algebraic::read(const dictionary& XiProperties)
XiModel::read(XiProperties); XiModel::read(XiProperties);
XiModelCoeffs_.lookup("XiShapeCoef") >> XiShapeCoef; XiModelCoeffs_.lookup("XiShapeCoef") >> XiShapeCoef;
return true; return true;
} }

View file

@ -117,7 +117,7 @@ inline Foam::scalar Foam::laminarFlameSpeedModels::SCOPE::polyPhi
{ {
scalar x = phi - 1.0; scalar x = phi - 1.0;
return return
a[0] a[0]
*( *(
scalar(1) scalar(1)

View file

@ -45,7 +45,7 @@ if (adjustTimeStep)
maxDeltaT maxDeltaT
) )
); );
Info<< "deltaT = " << runTime.deltaT().value() << endl; Info<< "deltaT = " << runTime.deltaT().value() << endl;
} }

View file

@ -2,7 +2,7 @@
cd ${0%/*} || exit 1 # run from this directory cd ${0%/*} || exit 1 # run from this directory
set -x set -x
wmake libso BCs wmake libso BCs
wmake wmake
# ----------------------------------------------------------------- end-of-file # ----------------------------------------------------------------- end-of-file

View file

@ -84,7 +84,7 @@ int main(int argc, char *argv[])
( (
fvm::ddt(rhoU) fvm::ddt(rhoU)
+ fvm::div(phiv, rhoU) + fvm::div(phiv, rhoU)
== ==
- fvc::grad(p) - fvc::grad(p)
); );

View file

@ -2,7 +2,7 @@
cd ${0%/*} || exit 1 # run from this directory cd ${0%/*} || exit 1 # run from this directory
set -x set -x
wmake libso BCs wmake libso BCs
wmake wmake
# ----------------------------------------------------------------- end-of-file # ----------------------------------------------------------------- end-of-file

View file

@ -131,7 +131,7 @@ void inviscidWallPFvPatchScalarField::updateCoeffs()
const fvPatchField<vector>& rhoUp = const fvPatchField<vector>& rhoUp =
lookupPatchField<volVectorField, vector>("rhoU"); lookupPatchField<volVectorField, vector>("rhoU");
const fvsPatchField<scalar>& phip = const fvsPatchField<scalar>& phip =
lookupPatchField<surfaceScalarField, scalar>("phi"); lookupPatchField<surfaceScalarField, scalar>("phi");
const fvsPatchField<scalar>& rAp = const fvsPatchField<scalar>& rAp =
@ -147,7 +147,7 @@ void inviscidWallPFvPatchScalarField::updateCoeffs()
void inviscidWallPFvPatchScalarField::write(Ostream& os) const void inviscidWallPFvPatchScalarField::write(Ostream& os) const
{ {
fixedGradientFvPatchScalarField::write(os); fixedGradientFvPatchScalarField::write(os);
os.writeKeyword("fluxFraction") os.writeKeyword("fluxFraction")
<< fluxFraction_ << token::END_STATEMENT << nl; << fluxFraction_ << token::END_STATEMENT << nl;
writeEntry("value", os); writeEntry("value", os);
} }

View file

@ -145,7 +145,7 @@ void mixedRhoEFvPatchScalarField::updateCoeffs()
const volScalarField& T = db().lookupObject<volScalarField>("T"); const volScalarField& T = db().lookupObject<volScalarField>("T");
const label patchi = patch().index(); const label patchi = patch().index();
fvPatchScalarField& Tp = fvPatchScalarField& Tp =
const_cast<fvPatchScalarField&>(T.boundaryField()[patchi]); const_cast<fvPatchScalarField&>(T.boundaryField()[patchi]);
Tp.evaluate(); Tp.evaluate();
@ -164,7 +164,7 @@ void mixedRhoEFvPatchScalarField::updateCoeffs()
refGrad() = refGrad() =
rhop*Cv.value()*Tp.snGrad() rhop*Cv.value()*Tp.snGrad()
+ ( + (
refValue() refValue()
- (0.5*rhop.patchInternalField()* - (0.5*rhop.patchInternalField()*
magSqr(rhoUp.patchInternalField()/rhop.patchInternalField())) magSqr(rhoUp.patchInternalField()/rhop.patchInternalField()))
)*patch().deltaCoeffs(); )*patch().deltaCoeffs();

View file

@ -66,7 +66,7 @@ public:
const dictionary& const dictionary&
); );
//- Construct by mapping given fixedRhoUFvPatchVectorField //- Construct by mapping given fixedRhoUFvPatchVectorField
// onto a new patch // onto a new patch
fixedRhoUFvPatchVectorField fixedRhoUFvPatchVectorField
( (

View file

@ -1,3 +1,3 @@
conjugateHeatFoam.C conjugateHeatFoam.C
EXE = $(FOAM_APPBIN)/conjugateHeatFoam EXE = $(FOAM_APPBIN)/conjugateHeatFoam

View file

@ -1,3 +1,3 @@
electrostaticFoam.C electrostaticFoam.C
EXE = $(FOAM_APPBIN)/electrostaticFoam EXE = $(FOAM_APPBIN)/electrostaticFoam

View file

@ -98,7 +98,7 @@ int main(int argc, char *argv[])
U = rUA*UEqn.H(); U = rUA*UEqn.H();
phi = (fvc::interpolate(U) & mesh.Sf()) phi = (fvc::interpolate(U) & mesh.Sf())
+ fvc::ddtPhiCorr(rUA, U, phi); + fvc::ddtPhiCorr(rUA, U, phi);
for (int nonOrth=0; nonOrth<=nNonOrthCorr; nonOrth++) for (int nonOrth=0; nonOrth<=nNonOrthCorr; nonOrth++)

View file

@ -83,6 +83,6 @@
momentumPredictor = false; momentumPredictor = false;
# include "UEqn.H" # include "UEqn.H"
momentumPredictor = momentumPredictorSave; momentumPredictor = momentumPredictorSave;
rUA = 1.0/UEqn.A(); rUA = 1.0/UEqn.A();
} }

View file

@ -35,7 +35,7 @@ scalar meanMeshCoNum = 0.0;
if (mesh.nInternalFaces()) if (mesh.nInternalFaces())
{ {
surfaceScalarField SfUfbyDelta = surfaceScalarField SfUfbyDelta =
mesh.surfaceInterpolation::deltaCoeffs()*mag(mesh.phi()); mesh.surfaceInterpolation::deltaCoeffs()*mag(mesh.phi());
meshCoNum = max(SfUfbyDelta/mesh.magSf()) meshCoNum = max(SfUfbyDelta/mesh.magSf())

View file

@ -28,7 +28,7 @@
pcorr.oldTime() == p.oldTime(); pcorr.oldTime() == p.oldTime();
phi = fvc::interpolate(rho) phi = fvc::interpolate(rho)
*((fvc::interpolate(U) & mesh.Sf()) - fvc::meshPhi(rho, U)); *((fvc::interpolate(U) & mesh.Sf()) - fvc::meshPhi(rho, U));
for(int nonOrth=0; nonOrth<=nNonOrthCorr; nonOrth++) for(int nonOrth=0; nonOrth<=nNonOrthCorr; nonOrth++)
{ {
@ -43,7 +43,7 @@
if (nonOrth == nNonOrthCorr) if (nonOrth == nNonOrthCorr)
{ {
phi += pcorrEqn.flux(); phi += pcorrEqn.flux();
} }
} }
} }

View file

@ -35,7 +35,7 @@ scalar meanMeshCoNum = 0.0;
if (mesh.nInternalFaces()) if (mesh.nInternalFaces())
{ {
surfaceScalarField SfUfbyDelta = surfaceScalarField SfUfbyDelta =
mesh.surfaceInterpolation::deltaCoeffs()*mag(mesh.phi()); mesh.surfaceInterpolation::deltaCoeffs()*mag(mesh.phi());
meshCoNum = max(SfUfbyDelta/mesh.magSf()) meshCoNum = max(SfUfbyDelta/mesh.magSf())

View file

@ -28,7 +28,7 @@ Application
Description Description
Sample application testing the equationReader extension, and demonstrating Sample application testing the equationReader extension, and demonstrating
its use. its use.
Author Author
David L. F. Gaden David L. F. Gaden
@ -121,7 +121,7 @@ int main(int argc, char *argv[])
Info << "Reading equation b from testDict, linking an output variable" Info << "Reading equation b from testDict, linking an output variable"
<< endl; << endl;
eqns.readEquation(testDict, "b", activeOutB); eqns.readEquation(testDict, "b", activeOutB);
Info << "Output variable before update() = " << activeOutB << endl; Info << "Output variable before update() = " << activeOutB << endl;
Info << "Begining .update() - this evaluates all equations with active " Info << "Begining .update() - this evaluates all equations with active "
<< "output..." << endl; << "output..." << endl;
@ -161,11 +161,11 @@ int main(int argc, char *argv[])
Info << "done. Evaluating equation f ... "; Info << "done. Evaluating equation f ... ";
passiveOutF = eqns.evaluate("f"); passiveOutF = eqns.evaluate("f");
Info << "done." << token::NL << "The result is: " << passiveOutF << endl; Info << "done." << token::NL << "The result is: " << passiveOutF << endl;
Info << token::NL << "Creating output..." << endl; Info << token::NL << "Creating output..." << endl;
OFstream os(path/"outputDict"); OFstream os(path/"outputDict");
os << eqns; os << eqns;
eqns.dataSourceStatus(os); eqns.dataSourceStatus(os);
return(0); return(0);
} }

View file

@ -28,7 +28,7 @@ Application
Description Description
Sample application testing the equationReader in a finite volume solver Sample application testing the equationReader in a finite volume solver
environment. environment.
Author Author
David L. F. Gaden David L. F. Gaden
@ -174,7 +174,7 @@ int main(int argc, char *argv[])
eqns.addDataSource(Sj, "Sj"); eqns.addDataSource(Sj, "Sj");
eqns.addDataSource(Sk, "Sk"); eqns.addDataSource(Sk, "Sk");
eqns.addDataSource(Sl, "Sl"); eqns.addDataSource(Sl, "Sl");
label listIndex(0); label listIndex(0);
eqns.addDataSource(p); eqns.addDataSource(p);
eqns.addDataSource(dummy); eqns.addDataSource(dummy);
@ -201,7 +201,7 @@ int main(int argc, char *argv[])
scalar saD(readScalar(testDict1.lookup("saD"))); scalar saD(readScalar(testDict1.lookup("saD")));
scalar saE(readScalar(testDict1.lookup("saE"))); scalar saE(readScalar(testDict1.lookup("saE")));
scalar saF(readScalar(testDict1.lookup("saF"))); scalar saF(readScalar(testDict1.lookup("saF")));
dimensionedScalar dsaA(testDict1.lookup("dsaA")); dimensionedScalar dsaA(testDict1.lookup("dsaA"));
dimensionedScalar dsaB(testDict1.lookup("dsaB")); dimensionedScalar dsaB(testDict1.lookup("dsaB"));
dimensionedScalar dsaC(testDict1.lookup("dsaC")); dimensionedScalar dsaC(testDict1.lookup("dsaC"));
@ -244,7 +244,7 @@ int main(int argc, char *argv[])
{ {
Info<< "Time = " << runTime.timeName() << nl << endl; Info<< "Time = " << runTime.timeName() << nl << endl;
DStime.value() = runTime.value(); DStime.value() = runTime.value();
Info << "Moving p index to "; Info << "Moving p index to ";
listIndex++; listIndex++;
if (listIndex == p.size()) if (listIndex == p.size())
@ -313,7 +313,7 @@ int main(int argc, char *argv[])
volScalarField rUA = 1.0/UEqn.A(); volScalarField rUA = 1.0/UEqn.A();
U = rUA*UEqn.H(); U = rUA*UEqn.H();
phi = (fvc::interpolate(U) & mesh.Sf()) phi = (fvc::interpolate(U) & mesh.Sf())
+ fvc::ddtPhiCorr(rUA, U, phi); + fvc::ddtPhiCorr(rUA, U, phi);
adjustPhi(phi, U, p); adjustPhi(phi, U, p);

View file

@ -71,7 +71,7 @@ class solidWallMixedTemperatureCoupledFvPatchScalarField
//- Name of field on the neighbour region //- Name of field on the neighbour region
const word neighbourFieldName_; const word neighbourFieldName_;
//- Name of thermal conductivity field //- Name of thermal conductivity field
const word KName_; const word KName_;

View file

@ -85,6 +85,6 @@
momentumPredictor = false; momentumPredictor = false;
# include "UEqn.H" # include "UEqn.H"
momentumPredictor = momentumPredictorSave; momentumPredictor = momentumPredictorSave;
rAU = 1.0/UEqn.A(); rAU = 1.0/UEqn.A();
} }

View file

@ -85,6 +85,6 @@
momentumPredictor = false; momentumPredictor = false;
# include "UEqn.H" # include "UEqn.H"
momentumPredictor = momentumPredictorSave; momentumPredictor = momentumPredictorSave;
rAU = 1.0/UEqn.A(); rAU = 1.0/UEqn.A();
} }

View file

@ -1,3 +1,3 @@
icoFoam.C icoFoam.C
EXE = $(FOAM_APPBIN)/icoFoam EXE = $(FOAM_APPBIN)/icoFoam

View file

@ -1,3 +1,3 @@
nonNewtonianIcoFoam.C nonNewtonianIcoFoam.C
EXE = $(FOAM_APPBIN)/nonNewtonianIcoFoam EXE = $(FOAM_APPBIN)/nonNewtonianIcoFoam

View file

@ -1,3 +1,3 @@
simpleFoam.C simpleFoam.C
EXE = $(FOAM_APPBIN)/simpleFoam EXE = $(FOAM_APPBIN)/simpleFoam

View file

@ -62,7 +62,7 @@ int main(int argc, char *argv[])
# include "solverScalarTransportFoam.H" # include "solverScalarTransportFoam.H"
multiRun++; multiRun++;
// * * * * * * * * * * * * * * * * icoFoam2 * * * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * icoFoam2 * * * * * * * * * * * * * * * * //
Info << "*** Switching to icoFoam2 ***\n" << endl; Info << "*** Switching to icoFoam2 ***\n" << endl;

View file

@ -31,7 +31,7 @@
volScalarField rUA = 1.0/UEqn.A(); volScalarField rUA = 1.0/UEqn.A();
U = rUA*UEqn.H(); U = rUA*UEqn.H();
phi = (fvc::interpolate(U) & mesh.Sf()) phi = (fvc::interpolate(U) & mesh.Sf())
+ fvc::ddtPhiCorr(rUA, U, phi); + fvc::ddtPhiCorr(rUA, U, phi);
adjustPhi(phi, U, p); adjustPhi(phi, U, p);

View file

@ -3,12 +3,12 @@
dimensionedScalar totalMass = fvc::domainIntegrate(rho); dimensionedScalar totalMass = fvc::domainIntegrate(rho);
scalar sumLocalContErr = scalar sumLocalContErr =
( (
fvc::domainIntegrate(mag(rho - thermoRho))/totalMass fvc::domainIntegrate(mag(rho - thermoRho))/totalMass
).value(); ).value();
scalar globalContErr = scalar globalContErr =
( (
fvc::domainIntegrate(rho - thermoRho)/totalMass fvc::domainIntegrate(rho - thermoRho)/totalMass
).value(); ).value();

View file

@ -47,7 +47,7 @@ if (adjustTimeStep)
maxDeltaT maxDeltaT
) )
); );
Info<< "deltaT = " << runTime.deltaT().value() << endl; Info<< "deltaT = " << runTime.deltaT().value() << endl;
} }

View file

@ -63,7 +63,7 @@ Ostream& operator<<
) )
{ {
os << tp.theta0_ << token::SPACE os << tp.theta0_ << token::SPACE
<< tp.uTheta_ << token::SPACE << tp.uTheta_ << token::SPACE
<< tp.thetaA_ << token::SPACE << tp.thetaA_ << token::SPACE
<< tp.thetaR_; << tp.thetaR_;

View file

@ -166,13 +166,13 @@ Foam::tmp<Foam::surfaceScalarField> Foam::multiphaseMixture::muf() const
{ {
PtrDictionary<phase>::const_iterator iter = phases_.begin(); PtrDictionary<phase>::const_iterator iter = phases_.begin();
tmp<surfaceScalarField> tmuf = tmp<surfaceScalarField> tmuf =
fvc::interpolate(iter().limitedAlpha())*iter().rho()* fvc::interpolate(iter().limitedAlpha())*iter().rho()*
fvc::interpolate(iter().nu()); fvc::interpolate(iter().nu());
for(++iter; iter != phases_.end(); ++iter) for(++iter; iter != phases_.end(); ++iter)
{ {
tmuf() += tmuf() +=
fvc::interpolate(iter().limitedAlpha())*iter().rho()* fvc::interpolate(iter().limitedAlpha())*iter().rho()*
fvc::interpolate(iter().nu()); fvc::interpolate(iter().nu());
} }

View file

@ -28,7 +28,7 @@ License
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
Foam::phase::phase Foam::phase::phase
( (
const word& name, const word& name,
const dictionary& phaseDict, const dictionary& phaseDict,

View file

@ -3,9 +3,9 @@
// Creates the porosity field for MULES // Creates the porosity field for MULES
volScalarField porosity volScalarField porosity
( (
IOobject IOobject
( (
"porosity", "porosity",
runTime.timeName(), runTime.timeName(),
mesh, mesh,
IOobject::NO_READ, IOobject::NO_READ,
@ -19,14 +19,14 @@
forAll( pZones, zoneI ) forAll( pZones, zoneI )
{ {
const label & zoneId( pZones[zoneI].zoneId() ); const label & zoneId( pZones[zoneI].zoneId() );
const labelList & cells(mesh.cellZones()[zoneId]); const labelList & cells(mesh.cellZones()[zoneId]);
const scalar & zonePorosity( pZones[zoneI].porosity() ); const scalar & zonePorosity( pZones[zoneI].porosity() );
forAll( cells, cellI ) forAll( cells, cellI )
{ {
porosity[cells[cellI]] = zonePorosity; porosity[cells[cellI]] = zonePorosity;
} }
} }

View file

@ -1,5 +1,5 @@
{ {
mul = muc + mul = muc +
plasticViscosity plasticViscosity
( (
plasticViscosityCoeff, plasticViscosityCoeff,

View file

@ -26,7 +26,7 @@ if(turbulence)
+ fvm::div(phi, epsilon) + fvm::div(phi, epsilon)
- fvm::laplacian - fvm::laplacian
( (
alphaEps*mut + mul, epsilon, alphaEps*mut + mul, epsilon,
"laplacian(DepsilonEff,epsilon)" "laplacian(DepsilonEff,epsilon)"
) )
== ==

View file

@ -6,7 +6,7 @@ EXE_INC = \
-IinterfacialModels/lnInclude \ -IinterfacialModels/lnInclude \
-IphaseModel/lnInclude \ -IphaseModel/lnInclude \
-Iaveraging -Iaveraging
EXE_LIBS = \ EXE_LIBS = \
-lEulerianInterfacialModels \ -lEulerianInterfacialModels \
-lfiniteVolume \ -lfiniteVolume \

View file

@ -56,7 +56,7 @@ fvVectorMatrix UbEqn(Ub, Ub.dimensions()*dimVol/dimTime);
-fvc::interpolate(nuEffb)*mesh.magSf()*fvc::snGrad(beta) -fvc::interpolate(nuEffb)*mesh.magSf()*fvc::snGrad(beta)
/fvc::interpolate(beta + scalar(0.001)); /fvc::interpolate(beta + scalar(0.001));
UbEqn = UbEqn =
( (
(scalar(1) + Cvm*rhob*alpha/rhob)* (scalar(1) + Cvm*rhob*alpha/rhob)*
( (

View file

@ -83,7 +83,7 @@ Foam::tmp<Foam::volScalarField> Foam::GidaspowErgunWenYu::K
Cds[celli] = 0.44; Cds[celli] = 0.44;
} }
} }
// Wen and Yu (1966) // Wen and Yu (1966)
tmp<volScalarField> tKWenYu = 0.75*Cds*phaseb_.rho()*Ur*bp/phasea_.d(); tmp<volScalarField> tKWenYu = 0.75*Cds*phaseb_.rho()*Ur*bp/phasea_.d();
volScalarField& KWenYu = tKWenYu(); volScalarField& KWenYu = tKWenYu();

View file

@ -41,7 +41,7 @@ Foam::autoPtr<Foam::dragModel> Foam::dragModel::New
interfaceDict.lookup("dragModel" + phasea.name()) interfaceDict.lookup("dragModel" + phasea.name())
); );
Info << "Selecting dragModel for phase " Info << "Selecting dragModel for phase "
<< phasea.name() << phasea.name()
<< ": " << ": "
<< dragModelType << endl; << dragModelType << endl;

View file

@ -75,7 +75,7 @@ Foam::tmp<Foam::volScalarField> Foam::GidaspowConductivity::kappa
( (
2.0*sqr(alpha)*g0*(1.0 + e)/sqrtPi 2.0*sqr(alpha)*g0*(1.0 + e)/sqrtPi
+ (9.0/8.0)*sqrtPi*g0*0.5*(1.0 + e)*sqr(alpha) + (9.0/8.0)*sqrtPi*g0*0.5*(1.0 + e)*sqr(alpha)
+ (15.0/16.0)*sqrtPi*alpha + (15.0/16.0)*sqrtPi*alpha
+ (25.0/64.0)*sqrtPi/((1.0 + e)*g0) + (25.0/64.0)*sqrtPi/((1.0 + e)*g0)
); );
} }

View file

@ -76,7 +76,7 @@ Foam::tmp<Foam::volScalarField> Foam::HrenyaSinclairConductivity::kappa
{ {
const scalar sqrtPi = sqrt(mathematicalConstant::pi); const scalar sqrtPi = sqrt(mathematicalConstant::pi);
volScalarField lamda = volScalarField lamda =
scalar(1) + da/(6.0*sqrt(2.0)*(alpha + scalar(1.0e-5)))/L_; scalar(1) + da/(6.0*sqrt(2.0)*(alpha + scalar(1.0e-5)))/L_;
return rhoa*da*sqrt(Theta)* return rhoa*da*sqrt(Theta)*

View file

@ -35,9 +35,9 @@ Foam::autoPtr<Foam::conductivityModel> Foam::conductivityModel::New
{ {
word conductivityModelType(dict.lookup("conductivityModel")); word conductivityModelType(dict.lookup("conductivityModel"));
Info<< "Selecting conductivityModel " Info<< "Selecting conductivityModel "
<< conductivityModelType << endl; << conductivityModelType << endl;
dictionaryConstructorTable::iterator cstrIter = dictionaryConstructorTable::iterator cstrIter =
dictionaryConstructorTablePtr_->find(conductivityModelType); dictionaryConstructorTablePtr_->find(conductivityModelType);

View file

@ -73,7 +73,7 @@ frictionalPressure
) const ) const
{ {
return return
Fr*pow(max(alpha - alphaMinFriction, scalar(0)), eta) Fr*pow(max(alpha - alphaMinFriction, scalar(0)), eta)
/pow(max(alphaMax - alpha, scalar(5.0e-2)), p); /pow(max(alphaMax - alpha, scalar(5.0e-2)), p);
} }
@ -104,9 +104,9 @@ Foam::tmp<Foam::volScalarField> Foam::JohnsonJacksonFrictionalStress::muf
const dimensionedScalar& alphaMax, const dimensionedScalar& alphaMax,
const volScalarField& pf, const volScalarField& pf,
const volTensorField& D, const volTensorField& D,
const dimensionedScalar& phi const dimensionedScalar& phi
) const ) const
{ {
return dimensionedScalar("0.5", dimTime, 0.5)*pf*sin(phi); return dimensionedScalar("0.5", dimTime, 0.5)*pf*sin(phi);
} }

View file

@ -35,9 +35,9 @@ Foam::autoPtr<Foam::frictionalStressModel> Foam::frictionalStressModel::New
{ {
word frictionalStressModelType(dict.lookup("frictionalStressModel")); word frictionalStressModelType(dict.lookup("frictionalStressModel"));
Info<< "Selecting frictionalStressModel " Info<< "Selecting frictionalStressModel "
<< frictionalStressModelType << endl; << frictionalStressModelType << endl;
dictionaryConstructorTable::iterator cstrIter = dictionaryConstructorTable::iterator cstrIter =
dictionaryConstructorTablePtr_->find(frictionalStressModelType); dictionaryConstructorTablePtr_->find(frictionalStressModelType);

View file

@ -35,9 +35,9 @@ Foam::autoPtr<Foam::granularPressureModel> Foam::granularPressureModel::New
{ {
word granularPressureModelType(dict.lookup("granularPressureModel")); word granularPressureModelType(dict.lookup("granularPressureModel"));
Info<< "Selecting granularPressureModel " Info<< "Selecting granularPressureModel "
<< granularPressureModelType << endl; << granularPressureModelType << endl;
dictionaryConstructorTable::iterator cstrIter = dictionaryConstructorTable::iterator cstrIter =
dictionaryConstructorTablePtr_->find(granularPressureModelType); dictionaryConstructorTablePtr_->find(granularPressureModelType);

View file

@ -65,7 +65,7 @@ Foam::tmp<Foam::volScalarField> Foam::CarnahanStarlingRadial::g0
) const ) const
{ {
return return
1.0/(1.0 - alpha) 1.0/(1.0 - alpha)
+ 3.0*alpha/(2.0*sqr(1.0 - alpha)) + 3.0*alpha/(2.0*sqr(1.0 - alpha))
+ sqr(alpha)/(2.0*pow(1.0 - alpha, 3)); + sqr(alpha)/(2.0*pow(1.0 - alpha, 3));
@ -78,8 +78,8 @@ Foam::tmp<Foam::volScalarField> Foam::CarnahanStarlingRadial::g0prime
const dimensionedScalar& alphaMax const dimensionedScalar& alphaMax
) const ) const
{ {
return return
- alpha/sqr(1.0 - alpha) - alpha/sqr(1.0 - alpha)
+ (3.0*(1.0 - alpha) + 6.0*sqr(alpha))/(2.0*(1.0 - alpha)) + (3.0*(1.0 - alpha) + 6.0*sqr(alpha))/(2.0*(1.0 - alpha))
+ (2.0*alpha*(1.0 - alpha) + 3.0*pow(alpha, 3)) + (2.0*alpha*(1.0 - alpha) + 3.0*pow(alpha, 3))
/(2.0*pow(1.0 - alpha, 4)); /(2.0*pow(1.0 - alpha, 4));

View file

@ -74,7 +74,7 @@ Foam::tmp<Foam::volScalarField> Foam::GidaspowRadial::g0prime
const dimensionedScalar& alphaMax const dimensionedScalar& alphaMax
) const ) const
{ {
return return
(-1.0/5.0)*pow(alpha/alphaMax, -2.0/3.0) (-1.0/5.0)*pow(alpha/alphaMax, -2.0/3.0)
/(alphaMax*sqr(1.0 - pow(alpha/alphaMax, 1.0/3.0))); /(alphaMax*sqr(1.0 - pow(alpha/alphaMax, 1.0/3.0)));
} }

View file

@ -74,7 +74,7 @@ Foam::tmp<Foam::volScalarField> Foam::SinclairJacksonRadial::g0prime
const dimensionedScalar& alphaMax const dimensionedScalar& alphaMax
) const ) const
{ {
return return
(-1.0/3.0)*pow(alpha/alphaMax, -2.0/3.0) (-1.0/3.0)*pow(alpha/alphaMax, -2.0/3.0)
/(alphaMax*sqr(1.0 - pow(alpha/alphaMax, 1.0/3.0))); /(alphaMax*sqr(1.0 - pow(alpha/alphaMax, 1.0/3.0)));
} }

View file

@ -35,9 +35,9 @@ Foam::autoPtr<Foam::radialModel> Foam::radialModel::New
{ {
word radialModelType(dict.lookup("radialModel")); word radialModelType(dict.lookup("radialModel"));
Info<< "Selecting radialModel " Info<< "Selecting radialModel "
<< radialModelType << endl; << radialModelType << endl;
dictionaryConstructorTable::iterator cstrIter = dictionaryConstructorTable::iterator cstrIter =
dictionaryConstructorTablePtr_->find(radialModelType); dictionaryConstructorTablePtr_->find(radialModelType);

View file

@ -42,7 +42,7 @@
phib = (fvc::interpolate(Ub) & mesh.Sf()) + fvc::ddtPhiCorr(rUbA, Ub, phib) phib = (fvc::interpolate(Ub) & mesh.Sf()) + fvc::ddtPhiCorr(rUbA, Ub, phib)
+ phiDragb; + phiDragb;
phi = alphaf*phia + betaf*phib; phi = alphaf*phia + betaf*phib;
surfaceScalarField Dp("(rho*(1|A(U)))", alphaf*rUaAf/rhoa + betaf*rUbAf/rhob); surfaceScalarField Dp("(rho*(1|A(U)))", alphaf*rUaAf/rhoa + betaf*rUbAf/rhob);

View file

@ -14,12 +14,12 @@
// Initializing neighbouring cells contribution // Initializing neighbouring cells contribution
scalar neighboursEx = 0.0; scalar neighboursEx = 0.0;
forAll (neighbourCell, cellj) forAll (neighbourCell, cellj)
{ {
labelList neighboursNeighbour = neighbour[neighbourCell[cellj]]; labelList neighboursNeighbour = neighbour[neighbourCell[cellj]];
scalar neighboursNeighbourCellVolumes = 0.0; scalar neighboursNeighbourCellVolumes = 0.0;
forAll (neighboursNeighbour, cellk) forAll (neighboursNeighbour, cellk)
{ {
neighboursNeighbourCellVolumes += neighboursNeighbourCellVolumes +=

View file

@ -16,7 +16,7 @@ $(rheologyLaws)/PronyViscoelastic/PronyViscoelastic.C
thermalModel/thermalModel.C thermalModel/thermalModel.C
thermalLaws = thermalModel/thermalLaws thermalLaws = thermalModel/thermalLaws
$(thermalLaws)/thermalLaw/thermalLaw.C $(thermalLaws)/thermalLaw/thermalLaw.C
$(thermalLaws)/thermalLaw/newThermalLaw.C $(thermalLaws)/thermalLaw/newThermalLaw.C
$(thermalLaws)/constantThermal/constantThermal.C $(thermalLaws)/constantThermal/constantThermal.C
$(thermalLaws)/multiMaterialThermal/multiMaterialThermal.C $(thermalLaws)/multiMaterialThermal/multiMaterialThermal.C

View file

@ -71,7 +71,7 @@ public:
DugdaleCohesiveLaw DugdaleCohesiveLaw
( (
const word& cohesiveLawName, const word& cohesiveLawName,
const dictionary& dict const dictionary& dict
); );
//- Construct as copy //- Construct as copy

View file

@ -74,7 +74,7 @@ Foam::autoPtr<Foam::cohesiveLaw> Foam::cohesiveLaw::New
Foam::cohesiveLaw::cohesiveLaw Foam::cohesiveLaw::cohesiveLaw
( (
const word& cohesiveLawName, const word& cohesiveLawName,
const dictionary& dict const dictionary& dict
) )
: :
cohesiveLawCoeffs_(dict.subDict(cohesiveLawName + "Coeffs")), cohesiveLawCoeffs_(dict.subDict(cohesiveLawName + "Coeffs")),

View file

@ -97,7 +97,7 @@ public:
static autoPtr<cohesiveLaw> New static autoPtr<cohesiveLaw> New
( (
const word& cohesiveLawName, const word& cohesiveLawName,
const dictionary& dict const dictionary& dict
); );
@ -107,7 +107,7 @@ public:
cohesiveLaw cohesiveLaw
( (
const word& cohesiveLawName, const word& cohesiveLawName,
const dictionary& dict const dictionary& dict
); );
//- Construct as copy //- Construct as copy
@ -124,7 +124,7 @@ public:
// Member Functions // Member Functions
//- Return cohesive law coefficients //- Return cohesive law coefficients
const dictionary& cohesiveLawCoeffs() const const dictionary& cohesiveLawCoeffs() const
{ {

View file

@ -71,7 +71,7 @@ public:
linearCohesiveLaw linearCohesiveLaw
( (
const word& cohesiveLawName, const word& cohesiveLawName,
const dictionary& dict const dictionary& dict
); );
//- Construct as copy //- Construct as copy

View file

@ -171,7 +171,7 @@ public:
{ {
checkPatchFace(mesh); checkPatchFace(mesh);
} }
//- Construct from dictionary //- Construct from dictionary
componentReference componentReference

View file

@ -245,7 +245,7 @@ void cohesiveLawFvPatchVectorField::write(Ostream& os) const
{ {
fvPatchVectorField::write(os); fvPatchVectorField::write(os);
traction_.writeEntry("traction", os); traction_.writeEntry("traction", os);
os.writeKeyword("cohesiveLaw") << law().type() os.writeKeyword("cohesiveLaw") << law().type()
<< token::END_STATEMENT << nl; << token::END_STATEMENT << nl;
os.writeKeyword("relaxationFactor") << relaxationFactor_ os.writeKeyword("relaxationFactor") << relaxationFactor_
<< token::END_STATEMENT << nl; << token::END_STATEMENT << nl;

View file

@ -99,7 +99,7 @@ cohesiveZoneFvPatchVectorField::cohesiveZoneFvPatchVectorField
{ {
this->refValue() = vector::zero; this->refValue() = vector::zero;
} }
if (dict.found("refGradient")) if (dict.found("refGradient"))
{ {
this->refGrad() = vectorField("refGradient", dict, p.size()); this->refGrad() = vectorField("refGradient", dict, p.size());
@ -111,7 +111,7 @@ cohesiveZoneFvPatchVectorField::cohesiveZoneFvPatchVectorField
if (dict.found("valueFraction")) if (dict.found("valueFraction"))
{ {
this->valueFraction() = this->valueFraction() =
symmTensorField("valueFraction", dict, p.size()); symmTensorField("valueFraction", dict, p.size());
} }
else else
@ -206,7 +206,7 @@ void cohesiveZoneFvPatchVectorField::updateCoeffs()
const rheologyModel& rheology = const rheologyModel& rheology =
this->db().objectRegistry::lookupObject<rheologyModel>(rheologyName_); this->db().objectRegistry::lookupObject<rheologyModel>(rheologyName_);
const scalarField mu = const scalarField mu =
rheology.mu()().boundaryField()[patch().index()]; rheology.mu()().boundaryField()[patch().index()];
const scalarField lambda = const scalarField lambda =
@ -276,17 +276,17 @@ void cohesiveZoneFvPatchVectorField::updateCoeffs()
if(magSqr(valueFraction()[faceI]) < SMALL) if(magSqr(valueFraction()[faceI]) < SMALL)
{ {
cohesiveTraction = cohesiveTraction =
relaxationFactor_*cohesiveTraction relaxationFactor_*cohesiveTraction
+ (1.0 - relaxationFactor_)*sigmaN[faceI]*n[faceI]; + (1.0 - relaxationFactor_)*sigmaN[faceI]*n[faceI];
refGrad()[faceI] = refGrad()[faceI] =
( (
cohesiveTraction cohesiveTraction
- ( - (
n[faceI] n[faceI]
& ( & (
mu[faceI]*gradU[faceI].T() mu[faceI]*gradU[faceI].T()
- (mu[faceI] + lambda[faceI])*gradU[faceI] - (mu[faceI] + lambda[faceI])*gradU[faceI]
) )
) )
@ -306,7 +306,7 @@ void cohesiveZoneFvPatchVectorField::write(Ostream& os) const
directionMixedFvPatchVectorField::write(os); directionMixedFvPatchVectorField::write(os);
os.writeKeyword("U") << UName_ << token::END_STATEMENT << nl; os.writeKeyword("U") << UName_ << token::END_STATEMENT << nl;
os.writeKeyword("rheology") << rheologyName_ << token::END_STATEMENT << nl; os.writeKeyword("rheology") << rheologyName_ << token::END_STATEMENT << nl;
os.writeKeyword("cohesiveLaw") << law().type() os.writeKeyword("cohesiveLaw") << law().type()
<< token::END_STATEMENT << nl; << token::END_STATEMENT << nl;
os.writeKeyword("relaxationFactor") << relaxationFactor_ os.writeKeyword("relaxationFactor") << relaxationFactor_
<< token::END_STATEMENT << nl; << token::END_STATEMENT << nl;

View file

@ -174,7 +174,7 @@ public:
//- Update the coefficients associated with the patch field //- Update the coefficients associated with the patch field
virtual void updateCoeffs(); virtual void updateCoeffs();
//- Write //- Write
virtual void write(Ostream&) const; virtual void write(Ostream&) const;

View file

@ -107,7 +107,7 @@ Foam::tmp<Foam::volScalarField> Foam::BurgersViscoelastic::E(scalar t) const
+ eta2_.value()/k2_.value(); + eta2_.value()/k2_.value();
scalar p2 = eta1_.value()*eta2_.value()/(k1_.value()*k2_.value()); scalar p2 = eta1_.value()*eta2_.value()/(k1_.value()*k2_.value());
scalar q1 = eta1_.value(); scalar q1 = eta1_.value();
scalar q2 = eta1_.value()*eta2_.value()/k2_.value(); scalar q2 = eta1_.value()*eta2_.value()/k2_.value();
@ -120,7 +120,7 @@ Foam::tmp<Foam::volScalarField> Foam::BurgersViscoelastic::E(scalar t) const
E = (q1 - q2*r1)*exp(-r1*t)/A - (q1 - q2*r2)*exp(-r2*t)/A; E = (q1 - q2*r1)*exp(-r1*t)/A - (q1 - q2*r2)*exp(-r2*t)/A;
} }
tmp<volScalarField> tresult tmp<volScalarField> tresult
( (
@ -178,7 +178,7 @@ Foam::tmp<Foam::volScalarField> Foam::BurgersViscoelastic::J(scalar t) const
if(t >= 0) if(t >= 0)
{ {
J = 1.0/k1_.value() J = 1.0/k1_.value()
+ (1 - exp(-k2_.value()*t/eta2_.value()))/k2_.value() + (1 - exp(-k2_.value()*t/eta2_.value()))/k2_.value()
+ t/eta1_.value(); + t/eta1_.value();
} }

View file

@ -102,14 +102,14 @@ Foam::tmp<Foam::volScalarField> Foam::KelvinSLSViscoelastic::E(scalar t) const
if(t>=0) if(t>=0)
{ {
scalar p1 = eta2_.value()/(k1_.value() + k2_.value()); scalar p1 = eta2_.value()/(k1_.value() + k2_.value());
scalar q0 = k1_.value()*k2_.value()/(k1_.value() + k2_.value()); scalar q0 = k1_.value()*k2_.value()/(k1_.value() + k2_.value());
scalar q1 = k1_.value()*eta2_.value()/(k1_.value() + k2_.value()); scalar q1 = k1_.value()*eta2_.value()/(k1_.value() + k2_.value());
E = q0 + (q1/p1 - q0)*exp(-t/p1); E = q0 + (q1/p1 - q0)*exp(-t/p1);
} }
tmp<volScalarField> tresult tmp<volScalarField> tresult
( (
@ -168,7 +168,7 @@ Foam::tmp<Foam::volScalarField> Foam::KelvinSLSViscoelastic::J(scalar t) const
if(t >= 0) if(t >= 0)
{ {
scalar p1 = eta2_.value()/(k1_.value() + k2_.value()); scalar p1 = eta2_.value()/(k1_.value() + k2_.value());
scalar q0 = k1_.value()*k2_.value()/(k1_.value() + k2_.value()); scalar q0 = k1_.value()*k2_.value()/(k1_.value() + k2_.value());
scalar q1 = k1_.value()*eta2_.value()/(k1_.value() + k2_.value()); scalar q1 = k1_.value()*eta2_.value()/(k1_.value() + k2_.value());

View file

@ -183,8 +183,8 @@ Foam::MaxwellElasticViscoelastic::J(scalar t) const
mesh(), mesh(),
dimensionedScalar dimensionedScalar
( (
"J", "J",
dimless/k_.dimensions(), dimless/k_.dimensions(),
1.0/k_.value() + t/eta_.value() 1.0/k_.value() + t/eta_.value()
), ),
zeroGradientFvPatchScalarField::typeName zeroGradientFvPatchScalarField::typeName

View file

@ -101,7 +101,7 @@ Foam::tmp<Foam::volScalarField> Foam::MaxwellSLSViscoelastic::E(scalar t) const
{ {
E = k2_.value() + k1_.value()*exp(-k1_.value()*t/eta1_.value()); E = k2_.value() + k1_.value()*exp(-k1_.value()*t/eta1_.value());
} }
tmp<volScalarField> tresult tmp<volScalarField> tresult
( (

View file

@ -169,8 +169,8 @@ Foam::tmp<Foam::volScalarField> Foam::MaxwellViscoelastic::J(scalar t) const
mesh(), mesh(),
dimensionedScalar dimensionedScalar
( (
"J", "J",
dimless/k_.dimensions(), dimless/k_.dimensions(),
1.0/k_.value() + t/eta_.value() 1.0/k_.value() + t/eta_.value()
), ),
zeroGradientFvPatchScalarField::typeName zeroGradientFvPatchScalarField::typeName

View file

@ -105,7 +105,7 @@ Foam::tmp<Foam::volScalarField> Foam::PronyViscoelastic::E(scalar t) const
{ {
E += k_[i]*exp(-t/tau_[i]); E += k_[i]*exp(-t/tau_[i]);
} }
if(t < 0) if(t < 0)
{ {
E = 0; E = 0;
@ -162,7 +162,7 @@ Foam::tmp<Foam::volScalarField> Foam::PronyViscoelastic::nu(scalar t) const
Foam::tmp<Foam::volScalarField> Foam::PronyViscoelastic::J(scalar t) const Foam::tmp<Foam::volScalarField> Foam::PronyViscoelastic::J(scalar t) const
{ {
notImplemented(type() + "::J(scalar t)"); notImplemented(type() + "::J(scalar t)");
return 1.0/E(t); return 1.0/E(t);
} }

View file

@ -104,7 +104,7 @@ Foam::contactPatchPair::contactPatchPair
cp.mesh().boundaryMesh()[slavePatch_.index()], // to patch cp.mesh().boundaryMesh()[slavePatch_.index()], // to patch
intersection::algorithmNames_.read(dict.lookup("projectionAlgo")), intersection::algorithmNames_.read(dict.lookup("projectionAlgo")),
intersection::directionNames_.read(dict.lookup("projectionDir")) intersection::directionNames_.read(dict.lookup("projectionDir"))
), ),
slaveToMasterInterpolate_ slaveToMasterInterpolate_
( (
@ -112,7 +112,7 @@ Foam::contactPatchPair::contactPatchPair
cp.mesh().boundaryMesh()[masterPatch_.index()], // to patch cp.mesh().boundaryMesh()[masterPatch_.index()], // to patch
intersection::algorithmNames_.read(dict.lookup("projectionAlgo")), intersection::algorithmNames_.read(dict.lookup("projectionAlgo")),
intersection::directionNames_.read(dict.lookup("projectionDir")) intersection::directionNames_.read(dict.lookup("projectionDir"))
) )
{} {}

View file

@ -27,7 +27,7 @@ Class
Description Description
A pair of surfaces in contact. A pair of surfaces in contact.
SourceFiles SourceFiles
contactPatchPair.C contactPatchPair.C

View file

@ -241,7 +241,7 @@ void contactProblem::correct()
( (
lambdaPatches[patchI]*tr(gradUpatches[patchI]) lambdaPatches[patchI]*tr(gradUpatches[patchI])
) )
)/(2.0*muPatches[patchI] + lambdaPatches[patchI]); )/(2.0*muPatches[patchI] + lambdaPatches[patchI]);
// Set the value fractions // Set the value fractions

View file

@ -1,47 +1,47 @@
if(divDSigmaExpMethod == "standard") if(divDSigmaExpMethod == "standard")
{ {
divDSigmaExp = fvc::div divDSigmaExp = fvc::div
( (
mu*gradDU.T() + lambda*(I*tr(gradDU)) - (mu + lambda)*gradDU, mu*gradDU.T() + lambda*(I*tr(gradDU)) - (mu + lambda)*gradDU,
"div(sigma)" "div(sigma)"
); );
} }
else if(divDSigmaExpMethod == "surface") else if(divDSigmaExpMethod == "surface")
{ {
divDSigmaExp = fvc::div divDSigmaExp = fvc::div
( (
muf*(mesh.Sf() & fvc::interpolate(gradDU.T())) muf*(mesh.Sf() & fvc::interpolate(gradDU.T()))
+ lambdaf*(mesh.Sf() & I*fvc::interpolate(tr(gradDU))) + lambdaf*(mesh.Sf() & I*fvc::interpolate(tr(gradDU)))
- (muf + lambdaf)*(mesh.Sf() & fvc::interpolate(gradDU)) - (muf + lambdaf)*(mesh.Sf() & fvc::interpolate(gradDU))
); );
} }
else if(divDSigmaExpMethod == "decompose") else if(divDSigmaExpMethod == "decompose")
{ {
surfaceTensorField shearGradDU = surfaceTensorField shearGradDU = ((I - n*n)&fvc::interpolate(gradDU));
((I - n*n)&fvc::interpolate(gradDU));
divDSigmaExp = fvc::div
divDSigmaExp = fvc::div (
( mesh.magSf()
mesh.magSf() *
*( (
- (muf + lambdaf)*(fvc::snGrad(DU)&(I - n*n)) - (muf + lambdaf)*(fvc::snGrad(DU)&(I - n*n))
+ lambdaf*tr(shearGradDU&(I - n*n))*n + lambdaf*tr(shearGradDU&(I - n*n))*n
+ muf*(shearGradDU&n) + muf*(shearGradDU&n)
) )
); );
} }
else if(divDSigmaExpMethod == "laplacian") else if(divDSigmaExpMethod == "laplacian")
{ {
divDSigmaExp = divDSigmaExp =
- fvc::laplacian(mu + lambda, DU, "laplacian(DDU,DU)") - fvc::laplacian(mu + lambda, DU, "laplacian(DDU,DU)")
+ fvc::div + fvc::div
( (
mu*gradDU.T() mu*gradDU.T()
+ lambda*(I*tr(gradDU)), + lambda*(I*tr(gradDU)),
"div(sigma)" "div(sigma)"
); );
} }
else else
{ {
FatalError << "divDSigmaExp method " << divDSigmaExpMethod << " not found!" << endl; FatalError << "divDSigmaExp method " << divDSigmaExpMethod << " not found!" << endl;
} }

View file

@ -23,138 +23,131 @@ philipc
//- this is only needed in a parallel runs //- this is only needed in a parallel runs
if(Pstream::parRun()) if(Pstream::parRun())
{ {
//***** FIX INCORRECT POINT ON PATCHES WITH FACEZONE *****// //***** FIX INCORRECT POINT ON PATCHES WITH FACEZONE *****//
contactPatchPairList& contacts = contact; contactPatchPairList& contacts = contact;
forAll(contacts, contactI) forAll(contacts, contactI)
{ {
label masterID = contacts[contactI].masterPatch().index(); label masterID = contacts[contactI].masterPatch().index();
label slaveID = contacts[contactI].slavePatch().index(); label slaveID = contacts[contactI].slavePatch().index();
primitivePatchInterpolation masterInterpolator
(mesh.boundaryMesh()[masterID]);
primitivePatchInterpolation slaveInterpolator
(mesh.boundaryMesh()[slaveID]);
//- U must be interpolated to the vertices, this ignores the faceZone primitivePatchInterpolation masterInterpolator
//- points with no U (unlike volPointInterpolation) (
vectorField correctMasterPointU = mesh.boundaryMesh()[masterID]
masterInterpolator.faceToPointInterpolate<vector> );
( primitivePatchInterpolation slaveInterpolator
U.boundaryField()[masterID] (
); mesh.boundaryMesh()[slaveID]
vectorField correctSlavePointU = );
slaveInterpolator.faceToPointInterpolate<vector>
(
U.boundaryField()[slaveID]
);
vectorField oldMasterPoints =
mesh.boundaryMesh()[masterID].localPoints();
vectorField oldSlavePoints =
mesh.boundaryMesh()[slaveID].localPoints();
labelList masterPointLabels =
mesh.boundaryMesh()[masterID].meshPoints();
labelList slavePointLabels =
mesh.boundaryMesh()[slaveID].meshPoints();
//- correct the patch newPoints
forAll(masterPointLabels, pointI)
{
label pointGlobalLabel = masterPointLabels[pointI];
newPoints[pointGlobalLabel] =
oldMasterPoints[pointI]
+
correctMasterPointU[pointI];
}
forAll(slavePointLabels, pointI)
{
label pointGlobalLabel = slavePointLabels[pointI];
newPoints[pointGlobalLabel] =
oldSlavePoints[pointI]
+
correctSlavePointU[pointI];
}
}
//- U must be interpolated to the vertices, this ignores the faceZone
//- points with no U (unlike volPointInterpolation)
vectorField correctMasterPointU =
masterInterpolator.faceToPointInterpolate<vector>
(
U.boundaryField()[masterID]
);
vectorField correctSlavePointU =
slaveInterpolator.faceToPointInterpolate<vector>
(
U.boundaryField()[slaveID]
);
vectorField oldMasterPoints =
mesh.boundaryMesh()[masterID].localPoints();
vectorField oldSlavePoints =
mesh.boundaryMesh()[slaveID].localPoints();
labelList masterPointLabels =
mesh.boundaryMesh()[masterID].meshPoints();
labelList slavePointLabels =
mesh.boundaryMesh()[slaveID].meshPoints();
//- correct the patch newPoints
forAll(masterPointLabels, pointI)
{
label pointGlobalLabel = masterPointLabels[pointI];
newPoints[pointGlobalLabel] =
oldMasterPoints[pointI] + correctMasterPointU[pointI];
}
forAll(slavePointLabels, pointI)
{
label pointGlobalLabel = slavePointLabels[pointI];
newPoints[pointGlobalLabel] =
oldSlavePoints[pointI] + correctSlavePointU[pointI];
}
}
//***** NOW FIX AND SYNCHRONISE ALL THE FACEZONE POINTS *****// //***** NOW FIX AND SYNCHRONISE ALL THE FACEZONE POINTS *****//
forAll(mesh.faceZones(), faceZoneI) forAll(mesh.faceZones(), faceZoneI)
{ {
//- find the patch corresponding to this faceZone //- find the patch corresponding to this faceZone
//- assuming that the FZ is called <patch_name>FaceZone //- assuming that the FZ is called <patch_name>FaceZone
string faceZoneName = mesh.faceZones().names()[faceZoneI]; string faceZoneName = mesh.faceZones().names()[faceZoneI];
//- remove the string FaceZone from the end of the face zone name to get the patch name //- remove the string FaceZone from the end of the face zone name to get the patch name
string patchName = faceZoneName.substr(0, (faceZoneName.size()-8)); string patchName = faceZoneName.substr(0, (faceZoneName.size()-8));
label patchID = mesh.boundaryMesh().findPatchID(patchName); label patchID = mesh.boundaryMesh().findPatchID(patchName);
if(patchID == -1) if(patchID == -1)
{ {
FatalError << "Patch " << patchName << " not found corresponding for faceZone" FatalError << "Patch " << patchName << " not found corresponding for faceZone"
<< faceZoneName << exit(FatalError); << faceZoneName << exit(FatalError);
} }
vectorField globalFZpoints = vectorField globalFZpoints =
mesh.faceZones()[faceZoneI]().localPoints(); mesh.faceZones()[faceZoneI]().localPoints();
//- new points for the face zone //- new points for the face zone
vectorField globalFZnewPoints(globalFZpoints.size(), vector::zero); vectorField globalFZnewPoints(globalFZpoints.size(), vector::zero);
//- inter-proc points are shared by multiple procs //- inter-proc points are shared by multiple procs
//- pointNumProc is the number of procs which a point lies on //- pointNumProc is the number of procs which a point lies on
scalarField pointNumProcs(globalFZpoints.size(), 0.0); scalarField pointNumProcs(globalFZpoints.size(), 0.0);
forAll(globalFZnewPoints, globalPointI) forAll(globalFZnewPoints, globalPointI)
{ {
label localPoint = procToGlobalFZmap[faceZoneI][globalPointI]; label localPoint = procToGlobalFZmap[faceZoneI][globalPointI];
//if(localPoint < mesh.boundaryMesh()[patchID].localPoints().size()) //if(localPoint < mesh.boundaryMesh()[patchID].localPoints().size())
if(pointOnLocalProcPatch[faceZoneI][localPoint]) if(pointOnLocalProcPatch[faceZoneI][localPoint])
{ {
label procPoint = label procPoint =
mesh.faceZones()[faceZoneI]().meshPoints()[localPoint]; mesh.faceZones()[faceZoneI]().meshPoints()[localPoint];
globalFZnewPoints[globalPointI] = globalFZnewPoints[globalPointI] = newPoints[procPoint];
newPoints[procPoint]; pointNumProcs[globalPointI] = 1;
pointNumProcs[globalPointI] = 1; }
} }
}
reduce(globalFZnewPoints, sumOp<vectorField>());
reduce(globalFZnewPoints, sumOp<vectorField>()); reduce(pointNumProcs, sumOp<scalarField>());
reduce(pointNumProcs, sumOp<scalarField>());
//- now average the newPoints between all procs
//- now average the newPoints between all procs if(min(pointNumProcs) < 1)
if(min(pointNumProcs) < 1) {
{ FatalError << "pointNumProc has not been set for all points" << exit(FatalError);
FatalError << "pointNumProc has not been set for all points" << exit(FatalError); }
} globalFZnewPoints /= pointNumProcs;
globalFZnewPoints /= pointNumProcs;
//- the globalFZnewPoints now contains the correct FZ new points in
//- the globalFZnewPoints now contains the correct FZ new points in //- a global order, now convert them back into the local proc order
//- a global order, now convert them back into the local proc order
vectorField procFZnewPoints(globalFZpoints.size(), vector::zero);
vectorField procFZnewPoints(globalFZpoints.size(), vector::zero);
forAll(globalFZnewPoints, globalPointI)
forAll(globalFZnewPoints, globalPointI) {
{ label localPoint = procToGlobalFZmap[faceZoneI][globalPointI];
label localPoint = procToGlobalFZmap[faceZoneI][globalPointI]; procFZnewPoints[localPoint] = globalFZnewPoints[globalPointI];
}
procFZnewPoints[localPoint] =
globalFZnewPoints[globalPointI]; //- now fix the newPoints points on the globalFaceZones
} labelList procFZmeshPoints = mesh.faceZones()[faceZoneI]().meshPoints();
//- now fix the newPoints points on the globalFaceZones forAll(procFZmeshPoints, pointI)
labelList procFZmeshPoints = {
mesh.faceZones()[faceZoneI]().meshPoints(); label procPoint = procFZmeshPoints[pointI];
newPoints[procPoint] = procFZnewPoints[pointI];
forAll(procFZmeshPoints, pointI) }
{ }
label procPoint = procFZmeshPoints[pointI]; }
newPoints[procPoint] =
procFZnewPoints[pointI];
}
}
}

View file

@ -25,7 +25,7 @@
IOobject::AUTO_WRITE IOobject::AUTO_WRITE
), ),
mesh, mesh,
dimensionedVector("zero", dimLength, vector::zero) dimensionedVector("zero", dimLength, vector::zero)
); );
volSymmTensorField DEpsilon volSymmTensorField DEpsilon
@ -84,22 +84,22 @@
dimensionedSymmTensor("zero", dimForce/dimArea, symmTensor::zero) dimensionedSymmTensor("zero", dimForce/dimArea, symmTensor::zero)
); );
volVectorField divDSigmaExp volVectorField divDSigmaExp
( (
IOobject IOobject
( (
"divDSigmaExp", "divDSigmaExp",
runTime.timeName(), runTime.timeName(),
mesh, mesh,
IOobject::NO_READ, IOobject::NO_READ,
IOobject::NO_WRITE IOobject::NO_WRITE
), ),
mesh, mesh,
dimensionedVector("zero", dimensionSet(1,-2,-2,0,0,0,0), vector::zero) dimensionedVector("zero", dimensionSet(1,-2,-2,0,0,0,0), vector::zero)
); );
// read rheology properties // read rheology properties
rheologyModel rheology(sigma); rheologyModel rheology(sigma);
volScalarField rho = rheology.rho(); volScalarField rho = rheology.rho();
@ -111,5 +111,5 @@
surfaceVectorField n = mesh.Sf()/mesh.magSf(); surfaceVectorField n = mesh.Sf()/mesh.magSf();
//- create contact problem //- create contact problem
contactProblem contact(DU); contactProblem contact(DU);

View file

@ -19,117 +19,120 @@ philipc
//- these are read if present to allow restarting of contact cases //- these are read if present to allow restarting of contact cases
IOList<labelList> procToGlobalFZmap IOList<labelList> procToGlobalFZmap
( (
IOobject IOobject
( (
"procToGlobalFZmap", "procToGlobalFZmap",
runTime.timeName(), runTime.timeName(),
mesh, mesh,
IOobject::READ_IF_PRESENT, IOobject::READ_IF_PRESENT,
IOobject::AUTO_WRITE IOobject::AUTO_WRITE
), ),
mesh.faceZones().size() mesh.faceZones().size()
); );
IOList<labelList> pointOnLocalProcPatch IOList<labelList> pointOnLocalProcPatch
( (
IOobject IOobject
( (
"pointOnLocalProcPatch", "pointOnLocalProcPatch",
runTime.timeName(), runTime.timeName(),
mesh, mesh,
IOobject::READ_IF_PRESENT, IOobject::READ_IF_PRESENT,
IOobject::AUTO_WRITE IOobject::AUTO_WRITE
), ),
mesh.faceZones().size() mesh.faceZones().size()
); );
//- if they have been read then don't recalculate it //- if they have been read then don't recalculate it
bool globalFaceZoneMappingSet = false; bool globalFaceZoneMappingSet = false;
if(gMax(procToGlobalFZmap[0]) > 0 && gMax(pointOnLocalProcPatch[0]) > 0) if(gMax(procToGlobalFZmap[0]) > 0 && gMax(pointOnLocalProcPatch[0]) > 0)
{ {
Info << "Reading procToGlobalFZmap and pointOnLocalProcPatch allowing restart of contact cases" Info << "Reading procToGlobalFZmap and pointOnLocalProcPatch allowing restart of contact cases"
<< endl; << endl;
globalFaceZoneMappingSet = true; globalFaceZoneMappingSet = true;
} }
else else
{ {
Info << "procToGlobalFZmap and pointOnLocalProcPatch will be calculated as it has not been found" << nl Info << "procToGlobalFZmap and pointOnLocalProcPatch will be calculated as it has not been found" << nl
<< "this message should only appear starting a new analysis" << endl; << "this message should only appear starting a new analysis" << endl;
} }
//- this is only needed in a parallel runs //- this is only needed in a parallel runs
if(Pstream::parRun()) if(Pstream::parRun())
{ {
if(!globalFaceZoneMappingSet) if(!globalFaceZoneMappingSet)
{ {
forAll(mesh.faceZones(), faceZoneI) forAll(mesh.faceZones(), faceZoneI)
{ {
vectorField globalFZpoints = mesh.faceZones()[faceZoneI]().localPoints(); vectorField globalFZpoints = mesh.faceZones()[faceZoneI]().localPoints();
procToGlobalFZmap[faceZoneI].setSize(globalFZpoints.size(), 0); procToGlobalFZmap[faceZoneI].setSize(globalFZpoints.size(), 0);
//- set all slave points to zero because only the master order is used //- set all slave points to zero because only the master order is used
if(!Pstream::master()) if(!Pstream::master())
globalFZpoints *= 0.0; {
globalFZpoints *= 0.0;
//- pass points to all procs }
reduce(globalFZpoints, sumOp<vectorField>());
//- pass points to all procs
reduce(globalFZpoints, sumOp<vectorField>());
//- now every proc has the master's list of FZ points
//- every proc must now find the mapping from their local FZpoints to
//- the globalFZpoints //- now every proc has the master's list of FZ points
//- every proc must now find the mapping from their local FZpoints to
vectorField procFZpoints = mesh.faceZones()[faceZoneI]().localPoints(); //- the globalFZpoints
forAll(globalFZpoints, globalPointI) vectorField procFZpoints = mesh.faceZones()[faceZoneI]().localPoints();
{
forAll(procFZpoints, procPointI) forAll(globalFZpoints, globalPointI)
{ {
if(procFZpoints[procPointI] == globalFZpoints[globalPointI]) forAll(procFZpoints, procPointI)
{ {
procToGlobalFZmap[faceZoneI][globalPointI] = procPointI; if(procFZpoints[procPointI] == globalFZpoints[globalPointI])
break; {
} procToGlobalFZmap[faceZoneI][globalPointI] = procPointI;
} break;
} }
//- procToGlobalFZmap now contains the local FZpoint label for each }
//- global FZ point label - for each faceZone }
//- check what points are on the current proc patch //- procToGlobalFZmap now contains the local FZpoint label for each
pointOnLocalProcPatch[faceZoneI].setSize(globalFZpoints.size(), 0); //- global FZ point label - for each faceZone
//- find corresponding patch //- check what points are on the current proc patch
string faceZoneName = mesh.faceZones().names()[faceZoneI]; pointOnLocalProcPatch[faceZoneI].setSize(globalFZpoints.size(), 0);
//- remove the string FaceZone from the end of the face zone name to get the patch name
string patchName = faceZoneName.substr(0, (faceZoneName.size()-8)); //- find corresponding patch
label patchID = mesh.boundaryMesh().findPatchID(patchName); string faceZoneName = mesh.faceZones().names()[faceZoneI];
if(patchID == -1) //- remove the string FaceZone from the end of the face zone name to get the patch name
{ string patchName = faceZoneName.substr(0, (faceZoneName.size()-8));
FatalError << "Patch " << patchName << " not found corresponding for faceZone" label patchID = mesh.boundaryMesh().findPatchID(patchName);
<< faceZoneName << exit(FatalError); if(patchID == -1)
} {
FatalError << "Patch " << patchName << " not found corresponding for faceZone"
forAll(mesh.faceZones()[faceZoneI]().localPoints(), fzpi) << faceZoneName << exit(FatalError);
{ }
forAll(mesh.boundaryMesh()[patchID].localPoints(), pi)
{ forAll(mesh.faceZones()[faceZoneI]().localPoints(), fzpi)
if(mesh.faceZones()[faceZoneI]().localPoints()[fzpi] == mesh.boundaryMesh()[patchID].localPoints()[pi]) {
{ forAll(mesh.boundaryMesh()[patchID].localPoints(), pi)
pointOnLocalProcPatch[faceZoneI][fzpi] = 1; {
break; if(mesh.faceZones()[faceZoneI]().localPoints()[fzpi] == mesh.boundaryMesh()[patchID].localPoints()[pi])
} {
} pointOnLocalProcPatch[faceZoneI][fzpi] = 1;
} break;
} }
} //- end if(!globalFaceZoneMappingSet) }
} }
}
} //- end if(!globalFaceZoneMappingSet)
}
//- write to disk to allow restart of cases //- write to disk to allow restart of cases
//- because it is not possible to calculate the //- because it is not possible to calculate the
//- mapping after the meshes have moved //- mapping after the meshes have moved
if(!globalFaceZoneMappingSet && Pstream::parRun()) if(!globalFaceZoneMappingSet && Pstream::parRun())
{ {
procToGlobalFZmap.write(); procToGlobalFZmap.write();
pointOnLocalProcPatch.write(); pointOnLocalProcPatch.write();
} }

View file

@ -4,22 +4,22 @@ solidInterface* solidInterfacePtr(NULL);
{ {
const dictionary& stressControl = const dictionary& stressControl =
mesh.solutionDict().subDict("stressedFoam"); mesh.solutionDict().subDict("stressedFoam");
solidInterfaceCorr = Switch(stressControl.lookup("solidInterface")); solidInterfaceCorr = Switch(stressControl.lookup("solidInterface"));
if(solidInterfaceCorr) if(solidInterfaceCorr)
{ {
Info << "Creating solid interface correction" << endl; Info << "Creating solid interface correction" << endl;
solidInterfacePtr = new solidInterface(mesh, rheology); solidInterfacePtr = new solidInterface(mesh, rheology);
solidInterfacePtr->modifyProperties(muf, lambdaf); solidInterfacePtr->modifyProperties(muf, lambdaf);
gradDU = solidInterfacePtr->grad(DU); gradDU = solidInterfacePtr->grad(DU);
//- solidInterface needs muf and lambdaf to be used for divSigmaExp //- solidInterface needs muf and lambdaf to be used for divDSigmaExp
if(divDSigmaExpMethod != "surface" && divDSigmaExpMethod != "decompose") if(divDSigmaExpMethod != "surface" && divDSigmaExpMethod != "decompose")
{ {
FatalError << "divDSigmaExp must be decompose or surface when solidInterface is on" FatalError << "divDSigmaExp must be decompose or surface when solidInterface is on"
<< exit(FatalError); << exit(FatalError);
} }
} }
} }

View file

@ -61,35 +61,35 @@ Author
int main(int argc, char *argv[]) int main(int argc, char *argv[])
{ {
# include "setRootCase.H" # include "setRootCase.H"
# include "createTime.H" # include "createTime.H"
# include "createMesh.H" # include "createMesh.H"
# include "createFields.H" # include "createFields.H"
# include "readDivDSigmaExpMethod.H" # include "readDivDSigmaExpMethod.H"
# include "createGlobalToLocalFaceZonePointMap.H" # include "createGlobalToLocalFaceZonePointMap.H"
# include "createSolidInterface.H" # include "createSolidInterface.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
Info<< "\nStarting time loop\n" << endl; Info<< "\nStarting time loop\n" << endl;
for (runTime++; !runTime.end(); runTime++) for (runTime++; !runTime.end(); runTime++)
{ {
Info<< "Time: " << runTime.timeName() << endl; Info<< "Time: " << runTime.timeName() << endl;
# include "readContactControls.H" # include "readContactControls.H"
# include "readStressedFoamControls.H" # include "readStressedFoamControls.H"
//-- for moving the mesh and then back again //-- for moving the mesh and then back again
vectorField oldMeshPoints = mesh.allPoints(); vectorField oldMeshPoints = mesh.allPoints();
int iCorr = 0; int iCorr = 0;
lduMatrix::solverPerformance solverPerf; lduMatrix::solverPerformance solverPerf;
word solverName; word solverName;
@ -101,116 +101,118 @@ int main(int argc, char *argv[])
//- reset DU to zero at the start of the time-step if //- reset DU to zero at the start of the time-step if
//- a predictor is not required //- a predictor is not required
if(!predictor) if(!predictor)
DU = dimensionedVector("zero", dimLength, vector::zero); {
DU = dimensionedVector("zero", dimLength, vector::zero);
}
do //- start of momentum loop do //- start of momentum loop
{ {
DU.storePrevIter(); DU.storePrevIter();
//- correct the contact boundaries //- correct the contact boundaries
if(iCorr % uEqnContactCorrFreq == 0) if(iCorr % uEqnContactCorrFreq == 0)
{ {
Info << "\t\tCorrecting contact in the momentum loop " Info << "\t\tCorrecting contact in the momentum loop "
<< "iteration: " << iCorr << "iteration: " << iCorr
<< ", residual: " << residual << ", residual: " << residual
<< endl; << endl;
//# include "moveMeshLeastSquares.H" //# include "moveMeshLeastSquares.H"
# include "moveSolidMesh.H" # include "moveSolidMesh.H"
contact.correct(); contact.correct();
mesh.movePoints(oldMeshPoints); mesh.movePoints(oldMeshPoints);
} }
# include "calculateDivDSigmaExp.H" # include "calculateDivDSigmaExp.H"
fvVectorMatrix DUEqn fvVectorMatrix DUEqn
( (
fvm::d2dt2(rho, DU) fvm::d2dt2(rho, DU)
== ==
fvm::laplacian(2*mu + lambda, DU, "laplacian(DDU,DU)") fvm::laplacian(2*mu + lambda, DU, "laplacian(DDU,DU)")
+ divDSigmaExp + divDSigmaExp
); );
if(solidInterfaceCorr) if(solidInterfaceCorr)
{ {
solidInterfacePtr->correct(DUEqn); solidInterfacePtr->correct(DUEqn);
} }
solverPerf = DUEqn.solve();
DU.relax();
solverName = solverPerf.solverName();
solverPerf = DUEqn.solve();
DU.relax();
solverName = solverPerf.solverName();
if(solidInterfaceCorr) if(solidInterfaceCorr)
{ {
gradDU = solidInterfacePtr->grad(DU); gradDU = solidInterfacePtr->grad(DU);
} }
else else
{ {
gradDU = fvc::grad(DU); gradDU = fvc::grad(DU);
} }
U = U.oldTime() + DU;
residual = solverPerf.initialResidual(); U = U.oldTime() + DU;
//****************************************************//
// The contact residual is the initial residual for the
// first iteration of the momentum equation
//****************************************************//
if(iCorr == 0)
{
initialResidual = solverPerf.initialResidual();
}
# include "calculateRelativeResidual.H"
Info << "\tTime " << runTime.value() residual = solverPerf.initialResidual();
<< ", Corrector " << iCorr
<< ", Solving for " << DU.name() //****************************************************//
<< " using " << solverPerf.solverName() // The contact residual is the initial residual for the
<< ", residual = " << solverPerf.initialResidual() // first iteration of the momentum equation
<< ", relative residual = " << relativeResidual << endl; //****************************************************//
} //- end of momentum loop if(iCorr == 0)
{
initialResidual = solverPerf.initialResidual();
}
# include "calculateRelativeResidual.H"
Info << "\tTime " << runTime.value()
<< ", Corrector " << iCorr
<< ", Solving for " << DU.name()
<< " using " << solverPerf.solverName()
<< ", residual = " << solverPerf.initialResidual()
<< ", relative residual = " << relativeResidual << endl;
} //- end of momentum loop
while while
( (
relativeResidual > convergenceTolerance relativeResidual > convergenceTolerance
//residual > convergenceTolerance //residual > convergenceTolerance
&& &&
++iCorr < nCorr ++iCorr < nCorr
); );
// Print out info per contact iteration // Print out info per contact iteration
Info << "\t\tSolving for " << DU.name() Info << "\t\tSolving for " << DU.name()
<< " using " << solverName << " using " << solverName
<< ", Initial residual = " << initialResidual << ", Initial residual = " << initialResidual
<< ", Final residual = " << solverPerf.initialResidual() << ", Final residual = " << solverPerf.initialResidual()
<< ", No outer iterations " << iCorr << endl; << ", No outer iterations " << iCorr << endl;
lduMatrix::debug = 1; lduMatrix::debug = 1;
# include "calculateDEpsilonDSigma.H" # include "calculateDEpsilonDSigma.H"
epsilon += DEpsilon; epsilon += DEpsilon;
sigma += DSigma; sigma += DSigma;
# include "writeFields.H" # include "writeFields.H"
//# include "writeBoundaryNetForces.H" //# include "writeBoundaryNetForces.H"
//# include "moveMeshLeastSquares.H" //# include "moveMeshLeastSquares.H"
//# include "moveSolidMesh.H" //# include "moveSolidMesh.H"
//# include "printContactResults.H" //# include "printContactResults.H"
//mesh.movePoints(oldMeshPoints); //mesh.movePoints(oldMeshPoints);
Info<< "ExecutionTime = " << runTime.elapsedCpuTime() << " s" Info<< "ExecutionTime = " << runTime.elapsedCpuTime() << " s"
<< " ClockTime = " << runTime.elapsedClockTime() << " s" << " ClockTime = " << runTime.elapsedClockTime() << " s"
<< endl << endl; << endl << endl;
} }
Info<< "End\n" << endl; Info<< "End\n" << endl;
return(0); return(0);
} }

View file

@ -2,55 +2,54 @@
//- move mesh //- move mesh
//--------------------------------------------------// //--------------------------------------------------//
if(min(J.internalField()) > 0) if(min(J.internalField()) > 0)
{ {
Info << "Moving mesh using least squares interpolation" << endl; Info << "Moving mesh using least squares interpolation" << endl;
leastSquaresVolPointInterpolation pointInterpolation(mesh); leastSquaresVolPointInterpolation pointInterpolation(mesh);
// Create point mesh // Create point mesh
pointMesh pMesh(mesh); pointMesh pMesh(mesh);
wordList types wordList types
( (
pMesh.boundary().size(), pMesh.boundary().size(),
calculatedFvPatchVectorField::typeName calculatedFvPatchVectorField::typeName
); );
pointVectorField pointDU pointVectorField pointDU
( (
IOobject IOobject
( (
"pointDU", "pointDU",
runTime.timeName(), runTime.timeName(),
mesh mesh
), ),
pMesh, pMesh,
dimensionedVector("zero", dimLength, vector::zero), dimensionedVector("zero", dimLength, vector::zero),
types types
); );
pointInterpolation.interpolate(DU, pointDU); pointInterpolation.interpolate(DU, pointDU);
const vectorField& pointDUI = const vectorField& pointDUI = pointDU.internalField();
pointDU.internalField();
//- Move mesh //- Move mesh
vectorField newPoints = mesh.allPoints(); vectorField newPoints = mesh.allPoints();
forAll (pointDUI, pointI) forAll (pointDUI, pointI)
{ {
newPoints[pointI] += pointDUI[pointI]; newPoints[pointI] += pointDUI[pointI];
} }
twoDPointCorrector twoDCorrector(mesh); twoDPointCorrector twoDCorrector(mesh);
twoDCorrector.correctPoints(newPoints); twoDCorrector.correctPoints(newPoints);
mesh.movePoints(newPoints); mesh.movePoints(newPoints);
mesh.V00(); mesh.V00();
mesh.moving(false); mesh.moving(false);
} }
else else
{ {
FatalErrorIn(args.executable()) FatalErrorIn(args.executable())
<< "Negative Jacobian" << "Negative Jacobian"
<< exit(FatalError); << exit(FatalError);
} }

View file

@ -1,4 +1,4 @@
{ {
//- move mesh for the contact correction //- move mesh for the contact correction
// Create point interpolation // Create point interpolation
@ -8,20 +8,20 @@
pointVectorField pointU = pointInterpolation.interpolate(U); pointVectorField pointU = pointInterpolation.interpolate(U);
const vectorField& pointUI = pointU.internalField(); const vectorField& pointUI = pointU.internalField();
// Move mesh // Move mesh
vectorField newPoints = mesh.allPoints(); vectorField newPoints = mesh.allPoints();
forAll (pointUI, pointI) forAll (pointUI, pointI)
{ {
newPoints[pointI] += pointUI[pointI]; newPoints[pointI] += pointUI[pointI];
} }
# include "correctGlobalFaceZoneMesh.H" # include "correctGlobalFaceZoneMesh.H"
twoDPointCorrector twoDCorrector(mesh); twoDPointCorrector twoDCorrector(mesh);
twoDCorrector.correctPoints(newPoints); twoDCorrector.correctPoints(newPoints);
mesh.movePoints(newPoints); mesh.movePoints(newPoints);
mesh.V00(); mesh.V00();
mesh.moving(false); mesh.moving(false);

View file

@ -1,55 +1,55 @@
if (runTime.outputTime()) if (runTime.outputTime())
{ {
// FAILS IN PARALLEL - FIX // FAILS IN PARALLEL - FIX
// Info << "Print contact area" << endl; // Info << "Print contact area" << endl;
//volScalarField ca = contact.contactArea(); //volScalarField ca = contact.contactArea();
//ca.write(); //ca.write();
//-------------------------------------------------------------// //-------------------------------------------------------------//
// I couldn't get tmp to return the pointScalarField correctly // // I couldn't get tmp to return the pointScalarField correctly //
// so I had to make the pointScalarField here and pass it to // // so I had to make the pointScalarField here and pass it to //
// contactGapPoints and pointContactForce to populate // // contactGapPoints and pointContactForce to populate //
//-------------------------------------------------------------// //-------------------------------------------------------------//
//This is the point distance for each contact vertex //This is the point distance for each contact vertex
pointScalarField cGapPoints pointScalarField cGapPoints
( (
IOobject IOobject
( (
"pointContactGap", "pointContactGap",
runTime.timeName(), runTime.timeName(),
mesh, mesh,
IOobject::NO_READ, IOobject::NO_READ,
IOobject::AUTO_WRITE IOobject::AUTO_WRITE
), ),
pMesh, pMesh,
dimensionedScalar("scalar", dimLength, 0.0), dimensionedScalar("scalar", dimLength, 0.0),
"calculated" "calculated"
); );
contact.contactGapPoints(cGapPoints); contact.contactGapPoints(cGapPoints);
cGapPoints.write(); cGapPoints.write();
//- This is the point distance for each contact vertex //- This is the point distance for each contact vertex
pointVectorField cPointForce pointVectorField cPointForce
( (
IOobject IOobject
( (
"pointContactForce", "pointContactForce",
runTime.timeName(), runTime.timeName(),
mesh, mesh,
IOobject::NO_READ, IOobject::NO_READ,
IOobject::AUTO_WRITE IOobject::AUTO_WRITE
), ),
pMesh, pMesh,
dimensionedVector("vector", dimForce, vector::zero), dimensionedVector("vector", dimForce, vector::zero),
"calculated" "calculated"
); );
contact.contactPointForce(cPointForce); contact.contactPointForce(cPointForce);
cPointForce.write(); cPointForce.write();
//- this is the actual (sigma&n)&n) on the contact patches //- this is the actual (sigma&n)&n) on the contact patches
//- SHOULD THIS BE A REF TO A TMP...? //- SHOULD THIS BE A REF TO A TMP...?
volScalarField cPressure = contact.contactPressure(); volScalarField cPressure = contact.contactPressure();
cPressure.write(); cPressure.write();
} }

View file

@ -1,9 +1,15 @@
//- how explicit component of sigma is to be calculated //- how explicit component of sigma is to be calculated
word divDSigmaExpMethod(mesh.solutionDict().subDict("stressedFoam").lookup("divDSigmaExp")); word divDSigmaExpMethod(mesh.solutionDict().subDict("stressedFoam").lookup("divDSigmaExp"));
Info << divDSigmaExpMethod << " method chosen for calculation of sigmaExp" << endl; Info << divDSigmaExpMethod << " method chosen for calculation of sigmaExp" << endl;
if(divDSigmaExpMethod != "standard" && divDSigmaExpMethod != "surface" && divDSigmaExpMethod != "decompose" && divDSigmaExpMethod != "laplacian") if
{ (
divDSigmaExpMethod != "standard"
&& divDSigmaExpMethod != "surface"
&& divDSigmaExpMethod != "decompose"
&& divDSigmaExpMethod != "laplacian"
)
{
FatalError << "divDSigmaExp method " << divDSigmaExpMethod << " not found!" << nl FatalError << "divDSigmaExp method " << divDSigmaExpMethod << " not found!" << nl
<< "valid methods are:\nstandard\nsurface\ndecompose\nlaplacian" << "valid methods are:\nstandard\nsurface\ndecompose\nlaplacian"
<< exit(FatalError); << exit(FatalError);
} }

View file

@ -6,9 +6,9 @@ Info << nl;
forAll(netForces, patchI) forAll(netForces, patchI)
{ {
netForces[patchI] = gSum(mesh.Sf().boundaryField()[patchI] & sigma.boundaryField()[patchI]); netForces[patchI] = gSum(mesh.Sf().boundaryField()[patchI] & sigma.boundaryField()[patchI]);
Info << "patch\t" << mesh.boundary()[patchI].name() << "\t\tnet force is\t" Info << "patch\t" << mesh.boundary()[patchI].name() << "\t\tnet force is\t"
<< netForces[patchI] << " N" << endl; << netForces[patchI] << " N" << endl;
} }
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //

View file

@ -1,77 +1,78 @@
if (runTime.outputTime()) if (runTime.outputTime())
{ {
volScalarField epsilonEq volScalarField epsilonEq
( (
IOobject IOobject
( (
"epsilonEq", "epsilonEq",
runTime.timeName(), runTime.timeName(),
mesh, mesh,
IOobject::NO_READ, IOobject::NO_READ,
IOobject::AUTO_WRITE IOobject::AUTO_WRITE
), ),
sqrt((2.0/3.0)*magSqr(dev(epsilon))) sqrt((2.0/3.0)*magSqr(dev(epsilon)))
); );
Info<< "Max epsilonEq = " << max(epsilonEq).value() Info<< "Max epsilonEq = " << max(epsilonEq).value()
<< endl; << endl;
volScalarField sigmaEq volScalarField sigmaEq
( (
IOobject IOobject
( (
"sigmaEq", "sigmaEq",
runTime.timeName(), runTime.timeName(),
mesh, mesh,
IOobject::NO_READ, IOobject::NO_READ,
IOobject::AUTO_WRITE IOobject::AUTO_WRITE
), ),
sqrt((3.0/2.0)*magSqr(dev(sigma))) sqrt((3.0/2.0)*magSqr(dev(sigma)))
); );
Info<< "Max sigmaEq = " << max(sigmaEq).value() Info<< "Max sigmaEq = " << max(sigmaEq).value()
<< endl; << endl;
volScalarField pressure volScalarField pressure
( (
IOobject IOobject
( (
"pressure", "pressure",
runTime.timeName(), runTime.timeName(),
mesh, mesh,
IOobject::NO_READ, IOobject::NO_READ,
IOobject::AUTO_WRITE IOobject::AUTO_WRITE
), ),
tr(sigma)/3.0 tr(sigma)/3.0
); );
//- boundary surface pressure //- boundary surface pressure
forAll(pressure.boundaryField(), patchi) forAll(pressure.boundaryField(), patchi)
{ {
const vectorField& nb = n.boundaryField()[patchi]; const vectorField& nb = n.boundaryField()[patchi];
pressure.boundaryField()[patchi] = pressure.boundaryField()[patchi] =
-(nb & ( nb & sigma.boundaryField()[patchi] )); -(nb & ( nb & sigma.boundaryField()[patchi] ));
} }
//- contact slave penetration //- contact slave penetration
# include "moveSolidMesh.H" # include "moveSolidMesh.H"
pointMesh pMesh(mesh); pointMesh pMesh(mesh);
pointScalarField cGapPoints pointScalarField cGapPoints
( (
IOobject IOobject
( (
"pointContactGap", "pointContactGap",
runTime.timeName(), runTime.timeName(),
mesh, mesh,
IOobject::NO_READ, IOobject::NO_READ,
IOobject::AUTO_WRITE IOobject::AUTO_WRITE
), ),
pMesh, pMesh,
dimensionedScalar("scalar", dimLength, 0.0), dimensionedScalar("scalar", dimLength, 0.0),
"calculated" "calculated"
); );
contact.contactGapPoints(cGapPoints); contact.contactGapPoints(cGapPoints);
cGapPoints.write(); cGapPoints.write();
mesh.movePoints(oldMeshPoints); mesh.movePoints(oldMeshPoints);
runTime.write(); runTime.write();
} }

View file

@ -1,47 +1,47 @@
if(divDSigmaExpMethod == "standard") if(divDSigmaExpMethod == "standard")
{ {
divDSigmaExp = fvc::div divDSigmaExp = fvc::div
( (
mu*gradDU.T() + lambda*(I*tr(gradDU)) - (mu + lambda)*gradDU, mu*gradDU.T() + lambda*(I*tr(gradDU)) - (mu + lambda)*gradDU,
"div(sigma)" "div(sigma)"
); );
} }
else if(divDSigmaExpMethod == "surface") else if(divDSigmaExpMethod == "surface")
{ {
divDSigmaExp = fvc::div divDSigmaExp = fvc::div
( (
muf*(mesh.Sf() & fvc::interpolate(gradDU.T())) muf*(mesh.Sf() & fvc::interpolate(gradDU.T()))
+ lambdaf*(mesh.Sf() & I*fvc::interpolate(tr(gradDU))) + lambdaf*(mesh.Sf() & I*fvc::interpolate(tr(gradDU)))
- (muf + lambdaf)*(mesh.Sf() & fvc::interpolate(gradDU)) - (muf + lambdaf)*(mesh.Sf() & fvc::interpolate(gradDU))
); );
} }
else if(divDSigmaExpMethod == "decompose") else if(divDSigmaExpMethod == "decompose")
{ {
surfaceTensorField shearGradDU = surfaceTensorField shearGradDU =
((I - n*n)&fvc::interpolate(gradDU)); ((I - n*n)&fvc::interpolate(gradDU));
divDSigmaExp = fvc::div divDSigmaExp = fvc::div
( (
mesh.magSf() mesh.magSf()
*( *(
- (muf + lambdaf)*(fvc::snGrad(DU)&(I - n*n)) - (muf + lambdaf)*(fvc::snGrad(DU)&(I - n*n))
+ lambdaf*tr(shearGradDU&(I - n*n))*n + lambdaf*tr(shearGradDU&(I - n*n))*n
+ muf*(shearGradDU&n) + muf*(shearGradDU&n)
) )
); );
} }
else if(divDSigmaExpMethod == "laplacian") else if(divDSigmaExpMethod == "laplacian")
{ {
divDSigmaExp = divDSigmaExp =
- fvc::laplacian(mu + lambda, DU, "laplacian(DDU,DU)") - fvc::laplacian(mu + lambda, DU, "laplacian(DDU,DU)")
+ fvc::div + fvc::div
( (
mu*gradDU.T() mu*gradDU.T()
+ lambda*(I*tr(gradDU)), + lambda*(I*tr(gradDU)),
"div(sigma)" "div(sigma)"
); );
} }
else else
{ {
FatalError << "divDSigmaExp method " << divDSigmaExpMethod << " not found!" << endl; FatalError << "divDSigmaExp method " << divDSigmaExpMethod << " not found!" << endl;
} }

View file

@ -2,33 +2,33 @@
//- sigma explicit large strain explicit terms //- sigma explicit large strain explicit terms
//----------------------------------------------------// //----------------------------------------------------//
if(divDSigmaLargeStrainExpMethod == "standard") if(divDSigmaLargeStrainExpMethod == "standard")
{ {
divDSigmaLargeStrainExp = divDSigmaLargeStrainExp =
fvc::div fvc::div
( (
mu*(gradDU & gradDU.T()) mu*(gradDU & gradDU.T())
+ 0.5*lambda*(gradDU && gradDU)*I //- equivalent to 0.5*lambda*(I*tr(gradDU & gradDU.T())) + 0.5*lambda*(gradDU && gradDU)*I //- equivalent to 0.5*lambda*(I*tr(gradDU & gradDU.T()))
+ ((sigma + DSigma) & DF.T()), + ((sigma + DSigma) & DF.T()),
"div(sigma)" "div(sigma)"
); );
} }
else if(divDSigmaLargeStrainExpMethod == "surface") else if(divDSigmaLargeStrainExpMethod == "surface")
{ {
divDSigmaLargeStrainExp = divDSigmaLargeStrainExp =
fvc::div fvc::div
( (
muf * (mesh.Sf() & fvc::interpolate(gradDU & gradDU.T())) muf * (mesh.Sf() & fvc::interpolate(gradDU & gradDU.T()))
+ 0.5*lambdaf * (mesh.Sf() & (fvc::interpolate(gradDU && gradDU)*I)) + 0.5*lambdaf * (mesh.Sf() & (fvc::interpolate(gradDU && gradDU)*I))
+ (mesh.Sf() & fvc::interpolate( sigma & DF.T() )) + (mesh.Sf() & fvc::interpolate( sigma & DF.T() ))
+ (mesh.Sf() & fvc::interpolate(DSigma & DF.T() )) + (mesh.Sf() & fvc::interpolate(DSigma & DF.T() ))
); );
} }
else else
{ {
FatalError FatalError
<< "divDSigmaLargeStrainExp not found!" << "divDSigmaLargeStrainExp not found!"
<< exit(FatalError); << exit(FatalError);
} }
//- relax large strain component //- relax large strain component
divDSigmaLargeStrainExp.relax(); divDSigmaLargeStrainExp.relax();

View file

@ -15,59 +15,58 @@
FieldField<Field, vector> extraVecs(ptc.size()); FieldField<Field, vector> extraVecs(ptc.size());
{ {
const labelListList& pfaces = mesh.pointFaces(); const labelListList& pfaces = mesh.pointFaces();
const volVectorField& centres = mesh.C(); const volVectorField& centres = mesh.C();
const fvBoundaryMesh& bm = mesh.boundary(); const fvBoundaryMesh& bm = mesh.boundary();
forAll (ptc, pointI) forAll (ptc, pointI)
{ {
const label curPoint = ptc[pointI]; const label curPoint = ptc[pointI];
const labelList& curFaces = pfaces[curPoint];
// extraVecs.hook(new vectorField(curFaces.size())); //- no hook function
extraVecs.set
(
pointI,
new vectorField(curFaces.size())
);
vectorField& curExtraVectors = extraVecs[pointI]; const labelList& curFaces = pfaces[curPoint];
label nFacesAroundPoint = 0;
const vector& pointLoc = mesh.points()[curPoint]; // extraVecs.hook(new vectorField(curFaces.size())); //- no hook function
extraVecs.set
// Go through all the faces (
forAll (curFaces, faceI) pointI,
{ new vectorField(curFaces.size())
if (!mesh.isInternalFace(curFaces[faceI])) );
{
// This is a boundary face. If not in the empty patch vectorField& curExtraVectors = extraVecs[pointI];
// or coupled calculate the extrapolation vector
label patchID = label nFacesAroundPoint = 0;
mesh.boundaryMesh().whichPatch(curFaces[faceI]);
const vector& pointLoc = mesh.points()[curPoint];
if
( // Go through all the faces
!isA<emptyFvPatch>(bm[patchID]) forAll (curFaces, faceI)
&& !bm[patchID].coupled() {
) if (!mesh.isInternalFace(curFaces[faceI]))
{ {
// Found a face for extrapolation // This is a boundary face. If not in the empty patch
curExtraVectors[nFacesAroundPoint] = // or coupled calculate the extrapolation vector
pointLoc label patchID =
- centres.boundaryField()[patchID] mesh.boundaryMesh().whichPatch(curFaces[faceI]);
[bm[patchID].patch().whichFace(curFaces[faceI])];
if
nFacesAroundPoint++; (
} !isA<emptyFvPatch>(bm[patchID])
} && !bm[patchID].coupled()
} )
{
curExtraVectors.setSize(nFacesAroundPoint); // Found a face for extrapolation
curExtraVectors[nFacesAroundPoint] =
pointLoc
- centres.boundaryField()[patchID]
[bm[patchID].patch().whichFace(curFaces[faceI])];
nFacesAroundPoint++;
}
}
}
curExtraVectors.setSize(nFacesAroundPoint);
} }
} }

View file

@ -8,114 +8,116 @@
FieldField<Field, scalar> w(ptc.size()); FieldField<Field, scalar> w(ptc.size());
{ {
const labelListList& pf = mesh.pointFaces(); const labelListList& pf = mesh.pointFaces();
const volVectorField& centres = mesh.C(); const volVectorField& centres = mesh.C();
const fvBoundaryMesh& bm = mesh.boundary(); const fvBoundaryMesh& bm = mesh.boundary();
pointScalarField volPointSumWeights pointScalarField volPointSumWeights
( (
IOobject IOobject
( (
"volPointSumWeights", "volPointSumWeights",
mesh.polyMesh::instance(), mesh.polyMesh::instance(),
mesh mesh
), ),
pMesh, pMesh,
dimensionedScalar("zero", dimless, 0) dimensionedScalar("zero", dimless, 0)
);
forAll (ptc, pointI)
{
const label curPoint = ptc[pointI];
const labelList& curFaces = pf[curPoint];
//w.hook(new scalarField(curFaces.size())); //philipc no hook function
w.set
(
pointI,
new scalarField(curFaces.size())
);
scalarField& curWeights = w[pointI];
label nFacesAroundPoint = 0;
const vector& pointLoc = mesh.points()[curPoint];
// Go through all the faces
forAll (curFaces, faceI)
{
if (!mesh.isInternalFace(curFaces[faceI]))
{
// This is a boundary face. If not in the empty patch
// or coupled calculate the extrapolation vector
label patchID =
mesh.boundaryMesh().whichPatch(curFaces[faceI]);
if
(
!isA<emptyFvPatch>(bm[patchID])
&& !(
bm[patchID].coupled()
//&& Pstream::parRun()
//&& !mesh.parallelData().cyclicParallel()
)
)
{
curWeights[nFacesAroundPoint] =
1.0/mag
(
pointLoc
- centres.boundaryField()[patchID]
[
bm[patchID].patch().whichFace(curFaces[faceI])
]
);
nFacesAroundPoint++;
}
}
}
// Reset the sizes of the local weights
curWeights.setSize(nFacesAroundPoint);
// Collect the sum of weights for parallel correction
volPointSumWeights[curPoint] += sum(curWeights);
}
// Do parallel correction of weights
// Update coupled boundaries
// Work-around for cyclic parallels.
/*if (Pstream::parRun() && !mesh.parallelData().cyclicParallel())
{
forAll (volPointSumWeights.boundaryField(), patchI)
{
if (volPointSumWeights.boundaryField()[patchI].coupled())
{
volPointSumWeights.boundaryField()[patchI].initAddField();
}
}
forAll (volPointSumWeights.boundaryField(), patchI)
{
if (volPointSumWeights.boundaryField()[patchI].coupled())
{
volPointSumWeights.boundaryField()[patchI].addField
(
volPointSumWeights.internalField()
); );
}
} forAll (ptc, pointI)
}*/
// Re-scale the weights for the current point
forAll (ptc, pointI)
{ {
w[pointI] /= volPointSumWeights[ptc[pointI]]; const label curPoint = ptc[pointI];
const labelList& curFaces = pf[curPoint];
//w.hook(new scalarField(curFaces.size())); //philipc no hook function
w.set
(
pointI,
new scalarField(curFaces.size())
);
scalarField& curWeights = w[pointI];
label nFacesAroundPoint = 0;
const vector& pointLoc = mesh.points()[curPoint];
// Go through all the faces
forAll (curFaces, faceI)
{
if (!mesh.isInternalFace(curFaces[faceI]))
{
// This is a boundary face. If not in the empty patch
// or coupled calculate the extrapolation vector
label patchID =
mesh.boundaryMesh().whichPatch(curFaces[faceI]);
if
(
!isA<emptyFvPatch>(bm[patchID])
&& !(
bm[patchID].coupled()
//&& Pstream::parRun()
//&& !mesh.parallelData().cyclicParallel()
)
)
{
curWeights[nFacesAroundPoint] =
1.0/mag
(
pointLoc
- centres.boundaryField()[patchID]
[
bm[patchID].patch().whichFace(curFaces[faceI])
]
);
nFacesAroundPoint++;
}
}
}
// Reset the sizes of the local weights
curWeights.setSize(nFacesAroundPoint);
// Collect the sum of weights for parallel correction
volPointSumWeights[curPoint] += sum(curWeights);
}
// Do parallel correction of weights
// Update coupled boundaries
// Work-around for cyclic parallels.
/*
if (Pstream::parRun() && !mesh.parallelData().cyclicParallel())
{
forAll (volPointSumWeights.boundaryField(), patchI)
{
if (volPointSumWeights.boundaryField()[patchI].coupled())
{
volPointSumWeights.boundaryField()[patchI].initAddField();
}
}
forAll (volPointSumWeights.boundaryField(), patchI)
{
if (volPointSumWeights.boundaryField()[patchI].coupled())
{
volPointSumWeights.boundaryField()[patchI].addField
(
volPointSumWeights.internalField()
);
}
}
}
*/
// Re-scale the weights for the current point
forAll (ptc, pointI)
{
w[pointI] /= volPointSumWeights[ptc[pointI]];
} }
} }

View file

@ -23,138 +23,131 @@ philipc
//- this is only needed in a parallel runs //- this is only needed in a parallel runs
if(Pstream::parRun()) if(Pstream::parRun())
{ {
//***** FIX INCORRECT POINT ON PATCHES WITH FACEZONE *****// //***** FIX INCORRECT POINT ON PATCHES WITH FACEZONE *****//
contactPatchPairList& contacts = contact; contactPatchPairList& contacts = contact;
forAll(contacts, contactI) forAll(contacts, contactI)
{ {
label masterID = contacts[contactI].masterPatch().index(); label masterID = contacts[contactI].masterPatch().index();
label slaveID = contacts[contactI].slavePatch().index(); label slaveID = contacts[contactI].slavePatch().index();
primitivePatchInterpolation masterInterpolator
(mesh.boundaryMesh()[masterID]);
primitivePatchInterpolation slaveInterpolator
(mesh.boundaryMesh()[slaveID]);
//- DU must be interpolated to the vertices, this ignores the faceZone primitivePatchInterpolation masterInterpolator
//- points with no DU (unlike volPointInterpolation) (
vectorField correctMasterPointDU = mesh.boundaryMesh()[masterID]
masterInterpolator.faceToPointInterpolate<vector> );
( primitivePatchInterpolation slaveInterpolator
DU.boundaryField()[masterID] (
); mesh.boundaryMesh()[slaveID]
vectorField correctSlavePointDU = );
slaveInterpolator.faceToPointInterpolate<vector>
(
DU.boundaryField()[slaveID]
);
vectorField oldMasterPoints =
mesh.boundaryMesh()[masterID].localPoints();
vectorField oldSlavePoints =
mesh.boundaryMesh()[slaveID].localPoints();
labelList masterPointLabels =
mesh.boundaryMesh()[masterID].meshPoints();
labelList slavePointLabels =
mesh.boundaryMesh()[slaveID].meshPoints();
//- correct the patch newPoints
forAll(masterPointLabels, pointI)
{
label pointGlobalLabel = masterPointLabels[pointI];
newPoints[pointGlobalLabel] =
oldMasterPoints[pointI]
+
correctMasterPointDU[pointI];
}
forAll(slavePointLabels, pointI)
{
label pointGlobalLabel = slavePointLabels[pointI];
newPoints[pointGlobalLabel] =
oldSlavePoints[pointI]
+
correctSlavePointDU[pointI];
}
}
//- DU must be interpolated to the vertices, this ignores the faceZone
//- points with no DU (unlike volPointInterpolation)
vectorField correctMasterPointDU =
masterInterpolator.faceToPointInterpolate<vector>
(
DU.boundaryField()[masterID]
);
vectorField correctSlavePointDU =
slaveInterpolator.faceToPointInterpolate<vector>
(
DU.boundaryField()[slaveID]
);
vectorField oldMasterPoints =
mesh.boundaryMesh()[masterID].localPoints();
vectorField oldSlavePoints =
mesh.boundaryMesh()[slaveID].localPoints();
labelList masterPointLabels =
mesh.boundaryMesh()[masterID].meshPoints();
labelList slavePointLabels =
mesh.boundaryMesh()[slaveID].meshPoints();
//- correct the patch newPoints
forAll(masterPointLabels, pointI)
{
label pointGlobalLabel = masterPointLabels[pointI];
newPoints[pointGlobalLabel] =
oldMasterPoints[pointI] + correctMasterPointDU[pointI];
}
forAll(slavePointLabels, pointI)
{
label pointGlobalLabel = slavePointLabels[pointI];
newPoints[pointGlobalLabel] =
oldSlavePoints[pointI] + correctSlavePointDU[pointI];
}
}
//***** NOW FIX AND SYNCHRONISE ALL THE FACEZONE POINTS *****// //***** NOW FIX AND SYNCHRONISE ALL THE FACEZONE POINTS *****//
forAll(mesh.faceZones(), faceZoneI) forAll(mesh.faceZones(), faceZoneI)
{ {
//- find the patch corresponding to this faceZone //- find the patch corresponding to this faceZone
//- assuming that the FZ is called <patch_name>FaceZone //- assuming that the FZ is called <patch_name>FaceZone
string faceZoneName = mesh.faceZones().names()[faceZoneI]; string faceZoneName = mesh.faceZones().names()[faceZoneI];
//- remove the string FaceZone from the end of the face zone name to get the patch name //- remove the string FaceZone from the end of the face zone name to get the patch name
string patchName = faceZoneName.substr(0, (faceZoneName.size()-8)); string patchName = faceZoneName.substr(0, (faceZoneName.size()-8));
label patchID = mesh.boundaryMesh().findPatchID(patchName); label patchID = mesh.boundaryMesh().findPatchID(patchName);
if(patchID == -1) if(patchID == -1)
{ {
FatalError << "Patch " << patchName << " not found corresponding for faceZone" FatalError << "Patch " << patchName << " not found corresponding for faceZone"
<< faceZoneName << exit(FatalError); << faceZoneName << exit(FatalError);
} }
vectorField globalFZpoints = vectorField globalFZpoints =
mesh.faceZones()[faceZoneI]().localPoints(); mesh.faceZones()[faceZoneI]().localPoints();
//- new points for the face zone //- new points for the face zone
vectorField globalFZnewPoints(globalFZpoints.size(), vector::zero); vectorField globalFZnewPoints(globalFZpoints.size(), vector::zero);
//- inter-proc points are shared by multiple procs //- inter-proc points are shared by multiple procs
//- pointNumProc is the number of procs which a point lies on //- pointNumProc is the number of procs which a point lies on
scalarField pointNumProcs(globalFZpoints.size(), 0.0); scalarField pointNumProcs(globalFZpoints.size(), 0.0);
forAll(globalFZnewPoints, globalPointI) forAll(globalFZnewPoints, globalPointI)
{ {
label localPoint = procToGlobalFZmap[faceZoneI][globalPointI]; label localPoint = procToGlobalFZmap[faceZoneI][globalPointI];
//if(localPoint < mesh.boundaryMesh()[patchID].localPoints().size()) //if(localPoint < mesh.boundaryMesh()[patchID].localPoints().size())
if(pointOnLocalProcPatch[faceZoneI][localPoint]) if(pointOnLocalProcPatch[faceZoneI][localPoint])
{ {
label procPoint = label procPoint =
mesh.faceZones()[faceZoneI]().meshPoints()[localPoint]; mesh.faceZones()[faceZoneI]().meshPoints()[localPoint];
globalFZnewPoints[globalPointI] = globalFZnewPoints[globalPointI] = newPoints[procPoint];
newPoints[procPoint]; pointNumProcs[globalPointI] = 1;
pointNumProcs[globalPointI] = 1; }
} }
}
reduce(globalFZnewPoints, sumOp<vectorField>());
reduce(globalFZnewPoints, sumOp<vectorField>()); reduce(pointNumProcs, sumOp<scalarField>());
reduce(pointNumProcs, sumOp<scalarField>());
//- now average the newPoints between all procs
//- now average the newPoints between all procs if(min(pointNumProcs) < 1)
if(min(pointNumProcs) < 1) {
{ FatalError << "pointNumProc has not been set for all points" << exit(FatalError);
FatalError << "pointNumProc has not been set for all points" << exit(FatalError); }
} globalFZnewPoints /= pointNumProcs;
globalFZnewPoints /= pointNumProcs;
//- the globalFZnewPoints now contains the correct FZ new points in
//- the globalFZnewPoints now contains the correct FZ new points in //- a global order, now convert them back into the local proc order
//- a global order, now convert them back into the local proc order
vectorField procFZnewPoints(globalFZpoints.size(), vector::zero);
vectorField procFZnewPoints(globalFZpoints.size(), vector::zero);
forAll(globalFZnewPoints, globalPointI)
forAll(globalFZnewPoints, globalPointI) {
{ label localPoint = procToGlobalFZmap[faceZoneI][globalPointI];
label localPoint = procToGlobalFZmap[faceZoneI][globalPointI]; procFZnewPoints[localPoint] = globalFZnewPoints[globalPointI];
}
procFZnewPoints[localPoint] =
globalFZnewPoints[globalPointI]; //- now fix the newPoints points on the globalFaceZones
} labelList procFZmeshPoints = mesh.faceZones()[faceZoneI]().meshPoints();
//- now fix the newPoints points on the globalFaceZones forAll(procFZmeshPoints, pointI)
labelList procFZmeshPoints = {
mesh.faceZones()[faceZoneI]().meshPoints(); label procPoint = procFZmeshPoints[pointI];
newPoints[procPoint] = procFZnewPoints[pointI];
forAll(procFZmeshPoints, pointI) }
{ }
label procPoint = procFZmeshPoints[pointI]; }
newPoints[procPoint] =
procFZnewPoints[pointI];
}
}
}

View file

@ -25,7 +25,7 @@
IOobject::AUTO_WRITE IOobject::AUTO_WRITE
), ),
mesh, mesh,
dimensionedVector("zero", dimLength, vector::zero) dimensionedVector("zero", dimLength, vector::zero)
); );
volSymmTensorField DEpsilon volSymmTensorField DEpsilon
@ -84,35 +84,35 @@
dimensionedSymmTensor("zero", dimForce/dimArea, symmTensor::zero) dimensionedSymmTensor("zero", dimForce/dimArea, symmTensor::zero)
); );
volVectorField divDSigmaExp volVectorField divDSigmaExp
( (
IOobject IOobject
( (
"divDSigmaExp", "divDSigmaExp",
runTime.timeName(), runTime.timeName(),
mesh, mesh,
IOobject::NO_READ, IOobject::NO_READ,
IOobject::NO_WRITE IOobject::NO_WRITE
), ),
mesh, mesh,
dimensionedVector("zero", dimensionSet(1,-2,-2,0,0,0,0), vector::zero) dimensionedVector("zero", dimensionSet(1,-2,-2,0,0,0,0), vector::zero)
); );
volVectorField divDSigmaLargeStrainExp volVectorField divDSigmaLargeStrainExp
( (
IOobject IOobject
( (
"divDSigmaLargeStrainExp", "divDSigmaLargeStrainExp",
runTime.timeName(), runTime.timeName(),
mesh, mesh,
IOobject::NO_READ, IOobject::NO_READ,
IOobject::NO_WRITE IOobject::NO_WRITE
), ),
mesh, mesh,
dimensionedVector("zero", dimensionSet(1,-2,-2,0,0,0,0), vector::zero) dimensionedVector("zero", dimensionSet(1,-2,-2,0,0,0,0), vector::zero)
); );
// read rheology properties // read rheology properties
rheologyModel rheology(sigma); rheologyModel rheology(sigma);
volScalarField rho = rheology.rho(); volScalarField rho = rheology.rho();

View file

@ -19,117 +19,117 @@ philipc
//- these are read if present to allow restarting of contact cases //- these are read if present to allow restarting of contact cases
IOList<labelList> procToGlobalFZmap IOList<labelList> procToGlobalFZmap
( (
IOobject IOobject
( (
"procToGlobalFZmap", "procToGlobalFZmap",
runTime.timeName(), runTime.timeName(),
mesh, mesh,
IOobject::READ_IF_PRESENT, IOobject::READ_IF_PRESENT,
IOobject::AUTO_WRITE IOobject::AUTO_WRITE
), ),
mesh.faceZones().size() mesh.faceZones().size()
); );
IOList<labelList> pointOnLocalProcPatch IOList<labelList> pointOnLocalProcPatch
( (
IOobject IOobject
( (
"pointOnLocalProcPatch", "pointOnLocalProcPatch",
runTime.timeName(), runTime.timeName(),
mesh, mesh,
IOobject::READ_IF_PRESENT, IOobject::READ_IF_PRESENT,
IOobject::AUTO_WRITE IOobject::AUTO_WRITE
), ),
mesh.faceZones().size() mesh.faceZones().size()
); );
//- if they have been read then don't recalculate it //- if they have been read then don't recalculate it
bool globalFaceZoneMappingSet = false; bool globalFaceZoneMappingSet = false;
if(gMax(procToGlobalFZmap[0]) > 0 && gMax(pointOnLocalProcPatch[0]) > 0) if(gMax(procToGlobalFZmap[0]) > 0 && gMax(pointOnLocalProcPatch[0]) > 0)
{ {
Info << "Reading procToGlobalFZmap and pointOnLocalProcPatch allowing restart of contact cases" Info << "Reading procToGlobalFZmap and pointOnLocalProcPatch allowing restart of contact cases"
<< endl; << endl;
globalFaceZoneMappingSet = true; globalFaceZoneMappingSet = true;
} }
else else
{ {
Info << "procToGlobalFZmap and pointOnLocalProcPatch will be calculated as it has not been found" << nl Info << "procToGlobalFZmap and pointOnLocalProcPatch will be calculated as it has not been found" << nl
<< "this message should only appear starting a new analysis" << endl; << "this message should only appear starting a new analysis" << endl;
} }
//- this is only needed in a parallel runs //- this is only needed in a parallel runs
if(Pstream::parRun()) if(Pstream::parRun())
{ {
if(!globalFaceZoneMappingSet) if(!globalFaceZoneMappingSet)
{ {
forAll(mesh.faceZones(), faceZoneI) forAll(mesh.faceZones(), faceZoneI)
{ {
vectorField globalFZpoints = mesh.faceZones()[faceZoneI]().localPoints(); vectorField globalFZpoints = mesh.faceZones()[faceZoneI]().localPoints();
procToGlobalFZmap[faceZoneI].setSize(globalFZpoints.size(), 0); procToGlobalFZmap[faceZoneI].setSize(globalFZpoints.size(), 0);
//- set all slave points to zero because only the master order is used //- set all slave points to zero because only the master order is used
if(!Pstream::master()) if(!Pstream::master())
globalFZpoints *= 0.0; {
globalFZpoints *= 0.0;
//- pass points to all procs }
reduce(globalFZpoints, sumOp<vectorField>());
//- pass points to all procs
reduce(globalFZpoints, sumOp<vectorField>());
//- now every proc has the master's list of FZ points
//- every proc must now find the mapping from their local FZpoints to
//- the globalFZpoints //- now every proc has the master's list of FZ points
//- every proc must now find the mapping from their local FZpoints to
vectorField procFZpoints = mesh.faceZones()[faceZoneI]().localPoints(); //- the globalFZpoints
forAll(globalFZpoints, globalPointI) vectorField procFZpoints = mesh.faceZones()[faceZoneI]().localPoints();
{
forAll(procFZpoints, procPointI) forAll(globalFZpoints, globalPointI)
{ {
if(procFZpoints[procPointI] == globalFZpoints[globalPointI]) forAll(procFZpoints, procPointI)
{ {
procToGlobalFZmap[faceZoneI][globalPointI] = procPointI; if(procFZpoints[procPointI] == globalFZpoints[globalPointI])
break; {
} procToGlobalFZmap[faceZoneI][globalPointI] = procPointI;
} break;
} }
//- procToGlobalFZmap now contains the local FZpoint label for each }
//- global FZ point label - for each faceZone }
//- check what points are on the current proc patch //- check what points are on the current proc patch
pointOnLocalProcPatch[faceZoneI].setSize(globalFZpoints.size(), 0); pointOnLocalProcPatch[faceZoneI].setSize(globalFZpoints.size(), 0);
//- find corresponding patch //- find corresponding patch
string faceZoneName = mesh.faceZones().names()[faceZoneI]; string faceZoneName = mesh.faceZones().names()[faceZoneI];
//- remove the string FaceZone from the end of the face zone name to get the patch name //- remove the string FaceZone from the end of the face zone name to get the patch name
string patchName = faceZoneName.substr(0, (faceZoneName.size()-8)); string patchName = faceZoneName.substr(0, (faceZoneName.size()-8));
label patchID = mesh.boundaryMesh().findPatchID(patchName); label patchID = mesh.boundaryMesh().findPatchID(patchName);
if(patchID == -1) if(patchID == -1)
{ {
FatalError << "Patch " << patchName << " not found corresponding for faceZone" FatalError << "Patch " << patchName << " not found corresponding for faceZone"
<< faceZoneName << exit(FatalError); << faceZoneName << exit(FatalError);
} }
forAll(mesh.faceZones()[faceZoneI]().localPoints(), fzpi) forAll(mesh.faceZones()[faceZoneI]().localPoints(), fzpi)
{ {
forAll(mesh.boundaryMesh()[patchID].localPoints(), pi) forAll(mesh.boundaryMesh()[patchID].localPoints(), pi)
{ {
if(mesh.faceZones()[faceZoneI]().localPoints()[fzpi] == mesh.boundaryMesh()[patchID].localPoints()[pi]) if(mesh.faceZones()[faceZoneI]().localPoints()[fzpi] == mesh.boundaryMesh()[patchID].localPoints()[pi])
{ {
pointOnLocalProcPatch[faceZoneI][fzpi] = 1; pointOnLocalProcPatch[faceZoneI][fzpi] = 1;
break; break;
} }
} }
} }
} }
} //- end if(!globalFaceZoneMappingSet) } //- end if(!globalFaceZoneMappingSet)
} }
//- write to disk to allow restart of cases //- write to disk to allow restart of cases
//- because it is not possible to calculate the //- because it is not possible to calculate the
//- mapping after the meshes have moved //- mapping after the meshes have moved
if(!globalFaceZoneMappingSet) if(!globalFaceZoneMappingSet)
{ {
procToGlobalFZmap.write(); procToGlobalFZmap.write();
pointOnLocalProcPatch.write(); pointOnLocalProcPatch.write();
} }

View file

@ -66,137 +66,136 @@ Author
int main(int argc, char *argv[]) int main(int argc, char *argv[])
{ {
# include "setRootCase.H" # include "setRootCase.H"
# include "createTime.H" # include "createTime.H"
# include "createMesh.H" # include "createMesh.H"
# include "createFields.H" # include "createFields.H"
# include "readDivDSigmaExpMethod.H" # include "readDivDSigmaExpMethod.H"
# include "readDivDSigmaLargeStrainMethod.H" # include "readDivDSigmaLargeStrainMethod.H"
# include "readMoveMeshMethod.H" # include "readMoveMeshMethod.H"
# include "createGlobalToLocalFaceZonePointMap.H" # include "createGlobalToLocalFaceZonePointMap.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
Info<< "\nStarting time loop\n" << endl; Info<< "\nStarting time loop\n" << endl;
for (runTime++; !runTime.end(); runTime++) for (runTime++; !runTime.end(); runTime++)
{ {
Info<< "Time: " << runTime.timeName() << endl; Info<< "Time: " << runTime.timeName() << endl;
# include "readContactControls.H"
# include "readStressedFoamControls.H" # include "readContactControls.H"
//-- for moving the mesh and then back again
vectorField oldMeshPoints = mesh.allPoints();
int iCorr = 0;
lduMatrix::solverPerformance solverPerf;
word solverName;
lduMatrix::debug = 0;
scalar residual = GREAT;
scalar initialResidual = 0;
scalar relativeResidual = GREAT;
do //- start of momentum loop
{
DU.storePrevIter();
divDSigmaLargeStrainExp.storePrevIter();
//- correct the contact boundaries # include "readStressedFoamControls.H"
if(iCorr % uEqnContactCorrFreq == 0)
{
Info << "\t\tCorrecting contact in the momentum loop "
<< "iteration: " << iCorr
<< ", residual: " << residual
<< endl;
//# include "moveMeshLeastSquares.H"
# include "moveSolidMeshForContact.H"
contact.correct();
mesh.movePoints(oldMeshPoints);
}
# include "calculateDivDSigmaExp.H"
# include "calculateDivDSigmaExpLargeStrain.H"
fvVectorMatrix DUEqn //-- for moving the mesh and then back again
( vectorField oldMeshPoints = mesh.allPoints();
fvm::d2dt2(rho, DU)
==
fvm::laplacian(2*mu + lambda, DU, "laplacian(DDU,DU)")
+ divDSigmaExp
+ divDSigmaLargeStrainExp
); int iCorr = 0;
lduMatrix::solverPerformance solverPerf;
solverPerf = DUEqn.solve(); word solverName;
lduMatrix::debug = 0;
DU.relax(); scalar residual = GREAT;
scalar initialResidual = 0;
solverName = solverPerf.solverName(); scalar relativeResidual = GREAT;
gradDU = fvc::grad(DU);
DF = gradDU.T();
# include "calculateDEpsilonDSigma.H" do //- start of momentum loop
{
DU.storePrevIter();
residual = solverPerf.initialResidual(); divDSigmaLargeStrainExp.storePrevIter();
if(iCorr == 0)
{
initialResidual = solverPerf.initialResidual();
}
# include "calculateRelativeResidual.H"
Info << "\tTime " << runTime.value()
<< ", Corrector " << iCorr
<< ", Solving for " << DU.name()
<< " using " << solverPerf.solverName()
<< ", residual = " << solverPerf.initialResidual()
<< ", relative residual = " << relativeResidual << endl;
} //- end of momentum loop
while
(
relativeResidual > convergenceTolerance
//residual > convergenceTolerance
&&
++iCorr < nCorr
);
// Print out info per contact iteration
Info << "\t\tSolving for " << DU.name()
<< " using " << solverName
<< ", Initial residual = " << initialResidual
<< ", Final residual = " << solverPerf.initialResidual()
<< ", No outer iterations " << iCorr << endl;
lduMatrix::debug = 1;
# include "rotateFields.H"
# include "moveMesh.H" //- correct the contact boundaries
if(iCorr % uEqnContactCorrFreq == 0)
{
Info << "\t\tCorrecting contact in the momentum loop "
<< "iteration: " << iCorr
<< ", residual: " << residual
<< endl;
//# include "moveMeshLeastSquares.H"
# include "moveSolidMeshForContact.H"
contact.correct();
mesh.movePoints(oldMeshPoints);
}
# include "writeFields.H" # include "calculateDivDSigmaExp.H"
Info<< "ExecutionTime = " << runTime.elapsedCpuTime() << " s" # include "calculateDivDSigmaExpLargeStrain.H"
<< " ClockTime = " << runTime.elapsedClockTime() << " s"
<< endl << endl; fvVectorMatrix DUEqn
(
fvm::d2dt2(rho, DU)
==
fvm::laplacian(2*mu + lambda, DU, "laplacian(DDU,DU)")
+ divDSigmaExp
+ divDSigmaLargeStrainExp
);
solverPerf = DUEqn.solve();
DU.relax();
solverName = solverPerf.solverName();
gradDU = fvc::grad(DU);
DF = gradDU.T();
# include "calculateDEpsilonDSigma.H"
residual = solverPerf.initialResidual();
if(iCorr == 0)
{
initialResidual = solverPerf.initialResidual();
}
# include "calculateRelativeResidual.H"
Info << "\tTime " << runTime.value()
<< ", Corrector " << iCorr
<< ", Solving for " << DU.name()
<< " using " << solverPerf.solverName()
<< ", residual = " << solverPerf.initialResidual()
<< ", relative residual = " << relativeResidual << endl;
} //- end of momentum loop
while
(
relativeResidual > convergenceTolerance
//residual > convergenceTolerance
&&
++iCorr < nCorr
);
// Print out info per contact iteration
Info << "\t\tSolving for " << DU.name()
<< " using " << solverName
<< ", Initial residual = " << initialResidual
<< ", Final residual = " << solverPerf.initialResidual()
<< ", No outer iterations " << iCorr << endl;
lduMatrix::debug = 1;
# include "rotateFields.H"
# include "moveMesh.H"
# include "writeFields.H"
Info<< "ExecutionTime = " << runTime.elapsedCpuTime() << " s"
<< " ClockTime = " << runTime.elapsedClockTime() << " s"
<< endl << endl;
} }
Info<< "End\n" << endl; Info<< "End\n" << endl;
return(0); return(0);
} }

View file

@ -8,26 +8,26 @@ const fvBoundaryMesh& bm = mesh.boundary();
forAll (bm, patchI) forAll (bm, patchI)
{ {
// If the patch is empty, skip it // If the patch is empty, skip it
// If the patch is coupled, and there are no cyclic parallels, skip it // If the patch is coupled, and there are no cyclic parallels, skip it
if if
( (
!isA<emptyFvPatch>(bm[patchI]) !isA<emptyFvPatch>(bm[patchI])
&& !( && !(
bm[patchI].coupled() bm[patchI].coupled()
//&& Pstream::parRun() //&& Pstream::parRun()
//&& !mesh.parallelData().cyclicParallel() //&& !mesh.parallelData().cyclicParallel()
) )
) )
{ {
const labelList& bp = bm[patchI].patch().boundaryPoints(); const labelList& bp = bm[patchI].patch().boundaryPoints();
const labelList& meshPoints = bm[patchI].patch().meshPoints();
forAll (bp, pointI) const labelList& meshPoints = bm[patchI].patch().meshPoints();
{
pointsCorrectionMap.insert(meshPoints[bp[pointI]]); forAll (bp, pointI)
} {
pointsCorrectionMap.insert(meshPoints[bp[pointI]]);
}
} }
} }

View file

@ -1,15 +1,15 @@
if(moveMeshMethod == "inverseDistance") if(moveMeshMethod == "inverseDistance")
{ {
# include "moveMeshInverseDistance.H" # include "moveMeshInverseDistance.H"
} }
else if(moveMeshMethod == "leastSquares") else if(moveMeshMethod == "leastSquares")
{ {
# include "moveMeshLeastSquares.H" # include "moveMeshLeastSquares.H"
} }
else else
{ {
FatalError << "move mesh method " << moveMeshMethod << " not recognised" << nl FatalError << "move mesh method " << moveMeshMethod << " not recognised" << nl
<< "available methods are:" << nl << "available methods are:" << nl
<< "inverseDistance" << nl << "inverseDistance" << nl
<< "leastSquares" << exit(FatalError); << "leastSquares" << exit(FatalError);
} }

View file

@ -2,34 +2,34 @@
//- move mesh //- move mesh
//--------------------------------------------------// //--------------------------------------------------//
if(min(J.internalField()) > 0) if(min(J.internalField()) > 0)
{ {
Info << "Move solid mesh using inverse distance interpolation" << endl; Info << "Move solid mesh using inverse distance interpolation" << endl;
// Create point mesh // Create point mesh
pointMesh pMesh(mesh); pointMesh pMesh(mesh);
// Create point interpolation // Create point interpolation
volPointInterpolation pointInterpolation(mesh); volPointInterpolation pointInterpolation(mesh);
wordList types wordList types
( (
pMesh.boundary().size(), pMesh.boundary().size(),
//fixedValueFvPatchVectorField::typeName //fixedValueFvPatchVectorField::typeName
calculatedFvPatchVectorField::typeName calculatedFvPatchVectorField::typeName
); );
pointVectorField pointDU pointVectorField pointDU
( (
IOobject IOobject
( (
"pointDU", "pointDU",
runTime.timeName(), runTime.timeName(),
mesh mesh
), ),
pMesh, pMesh,
dimensionedVector("zero", dimLength, vector::zero), dimensionedVector("zero", dimLength, vector::zero),
types types
); );
// Calculate mesh points displacement // Calculate mesh points displacement
pointInterpolation.interpolate(DU, pointDU); pointInterpolation.interpolate(DU, pointDU);
@ -41,26 +41,25 @@ if(min(J.internalField()) > 0)
//pointDU.write(); //pointDU.write();
const vectorField& pointDUI = const vectorField& pointDUI = pointDU.internalField();
pointDU.internalField();
// Move mesh // Move mesh
vectorField newPoints = mesh.allPoints(); vectorField newPoints = mesh.allPoints();
forAll (pointDUI, pointI) forAll (pointDUI, pointI)
{ {
newPoints[pointI] += pointDUI[pointI]; newPoints[pointI] += pointDUI[pointI];
} }
twoDPointCorrector twoDCorrector(mesh); twoDPointCorrector twoDCorrector(mesh);
twoDCorrector.correctPoints(newPoints); twoDCorrector.correctPoints(newPoints);
mesh.movePoints(newPoints); mesh.movePoints(newPoints);
mesh.V00(); mesh.V00();
mesh.moving(false); mesh.moving(false);
} }
else else
{ {
FatalErrorIn(args.executable()) FatalErrorIn(args.executable())
<< "Negative Jacobian" << "Negative Jacobian"
<< exit(FatalError); << exit(FatalError);
} }

Some files were not shown because too many files have changed in this diff Show more