controlSwitches: updating after code merge. Minor correction for DimensionedConstants sigmaSB. Adapting foamDebugSwitches

This commit is contained in:
Martin Beaudoin 2015-05-02 09:57:47 -04:00
parent e64704203c
commit 4706995f5f
505 changed files with 666459 additions and 32898 deletions

View file

@ -80,3 +80,5 @@ Contents:
Tian Tang
Zeljko Tukovic
Niklas Wikstrom
Vanja Skuric
Alexander Vakhrushev

View file

@ -62,22 +62,35 @@ echo
# Gcc and companion libraries
#
# Uncomment the following 3 lines for gcc-4.4.5. You need gmp and mpfr for gcc-4.4.5
#( rpm_make -p gmp-5.0.1 -s gmp-5.0.1.spec -u ftp://ftp.gnu.org/gnu/gmp/gmp-5.0.1.tar.gz )
#( rpm_make -p mpfr-3.0.1 -s mpfr-3.0.1.spec -u ftp://ftp.gnu.org/gnu/mpfr/mpfr-3.0.1.tar.gz )
#( rpm_make -p gcc-4.4.5 -s gcc-4.4.5.spec -u ftp://ftp.gnu.org/gnu/gcc/gcc-4.4.5/gcc-4.4.5.tar.gz )
# Uncomment the following 4 lines for gcc-4.9.2. You need gmp, mpfr and mpc for gcc-4.9.2
#( rpm_make -p gmp-5.1.2 -s gmp-5.1.2.spec -u ftp://ftp.gnu.org/gnu/gmp/gmp-5.1.2.tar.bz2 )
#( rpm_make -p mpfr-3.1.2 -s mpfr-3.1.2.spec -u ftp://ftp.gnu.org/gnu/mpfr/mpfr-3.1.2.tar.gz )
#( rpm_make -p mpc-1.0.1 -s mpc-1.0.1.spec -u http://www.multiprecision.org/mpc/download/mpc-1.0.1.tar.gz )
#( rpm_make -p gcc-4.9.2 -s gcc-4.9.2.spec -u ftp://ftp.gnu.org/gnu/gcc/gcc-4.9.2/gcc-4.9.2.tar.gz )
#
# Uncomment the following 4 lines for gcc-4.7.4. You need gmp, mpfr and mpc for gcc-4.7.4
#( rpm_make -p gmp-5.1.2 -s gmp-5.1.2.spec -u ftp://ftp.gnu.org/gnu/gmp/gmp-5.1.2.tar.bz2 )
#( rpm_make -p mpfr-3.1.2 -s mpfr-3.1.2.spec -u ftp://ftp.gnu.org/gnu/mpfr/mpfr-3.1.2.tar.gz )
#( rpm_make -p mpc-1.0.1 -s mpc-1.0.1.spec -u http://www.multiprecision.org/mpc/download/mpc-1.0.1.tar.gz )
#( rpm_make -p gcc-4.7.4 -s gcc-4.7.4.spec -u ftp://ftp.gnu.org/gnu/gcc/gcc-4.7.4/gcc-4.7.4.tar.gz )
#
# Uncomment the following 4 lines for gcc-4.6.4. You need gmp, mpfr and mpc for gcc-4.6.4
#( rpm_make -p gmp-5.1.2 -s gmp-5.1.2.spec -u ftp://ftp.gnu.org/gnu/gmp/gmp-5.1.2.tar.bz2 )
#( rpm_make -p mpfr-3.1.2 -s mpfr-3.1.2.spec -u ftp://ftp.gnu.org/gnu/mpfr/mpfr-3.1.2.tar.gz )
#( rpm_make -p mpc-1.0.1 -s mpc-1.0.1.spec -u http://www.multiprecision.org/mpc/download/mpc-1.0.1.tar.gz )
#( rpm_make -p gcc-4.6.4 -s gcc-4.6.4.spec -u ftp://ftp.gnu.org/gnu/gcc/gcc-4.6.4/gcc-4.6.4.tar.gz )
#
# Uncomment the following 4 lines for gcc-4.5.1. You need gmp, mpfr and mpc for gcc-4.5.1
#( rpm_make -p gmp-5.0.1 -s gmp-5.0.1.spec -u ftp://ftp.gnu.org/gnu/gmp/gmp-5.0.1.tar.gz )
#( rpm_make -p mpfr-3.0.1 -s mpfr-3.0.1.spec -u ftp://ftp.gnu.org/gnu/mpfr/mpfr-3.0.1.tar.gz )
#( rpm_make -p mpc-0.8.2 -s mpc-0.8.2.spec -u http://www.multiprecision.org/mpc/download/mpc-0.8.2.tar.gz )
#( rpm_make -p gcc-4.5.1 -s gcc-4.5.1.spec -u ftp://ftp.gnu.org/gnu/gcc/gcc-4.5.1/gcc-4.5.1.tar.gz )
# Uncomment the following 4 lines for gcc-4.6.3. You need gmp, mpfr and mpc for gcc-4.6.3
#( rpm_make -p gmp-5.0.5 -s gmp-5.0.5.spec -u ftp://ftp.gnu.org/gnu/gmp/gmp-5.0.5.tar.bz2 )
#( rpm_make -p mpfr-3.1.0 -s mpfr-3.1.0.spec -u ftp://ftp.gnu.org/gnu/mpfr/mpfr-3.1.0.tar.gz )
#( rpm_make -p mpc-0.9 -s mpc-0.9.spec -u http://www.multiprecision.org/mpc/download/mpc-0.9.tar.gz )
#( rpm_make -p gcc-4.6.3 -s gcc-4.6.3.spec -u ftp://ftp.gnu.org/gnu/gcc/gcc-4.6.3/gcc-4.6.3.tar.gz )
#
# Uncomment the following 3 lines for gcc-4.4.5. You need gmp and mpfr for gcc-4.4.5
#( rpm_make -p gmp-5.0.1 -s gmp-5.0.1.spec -u ftp://ftp.gnu.org/gnu/gmp/gmp-5.0.1.tar.gz )
#( rpm_make -p mpfr-3.0.1 -s mpfr-3.0.1.spec -u ftp://ftp.gnu.org/gnu/mpfr/mpfr-3.0.1.tar.gz )
#( rpm_make -p gcc-4.4.5 -s gcc-4.4.5.spec -u ftp://ftp.gnu.org/gnu/gcc/gcc-4.4.5/gcc-4.4.5.tar.gz )
#
# Python
#( rpm_make -p Python-2.7 -s Python-2.7.spec -u http://www.python.org/ftp/python/2.7/Python-2.7.tgz )

View file

@ -133,7 +133,8 @@ then
# ( rpm_make -p PyFoam-0.5.7 -s PyFoam-0.5.7.spec -u http://openfoamwiki.net/images/d/dc/PyFoam-0.5.7.tar.gz -n PyFoam-0.5.7-1.noarch -a noarch)
# ( rpm_make -p PyFoam-0.6.1 -s PyFoam-0.6.1.spec -u http://openfoamwiki.net/images/3/33/PyFoam-0.6.1.tar.gz -n PyFoam-0.6.1-1.noarch -a noarch)
# ( rpm_make -p PyFoam-0.6.2 -s PyFoam-0.6.2.spec -u http://openfoamwiki.net/images/8/89/PyFoam-0.6.2.tar.gz -n PyFoam-0.6.2-1.noarch -a noarch)
( rpm_make -p PyFoam-0.6.3 -s PyFoam-0.6.3.spec -u http://downloads.sourceforge.net/project/openfoam-extend/foam-extend-3.1/ThirdParty/PyFoam-0.6.1.tar.gz )
# ( rpm_make -p PyFoam-0.6.3 -s PyFoam-0.6.3.spec -u http://downloads.sourceforge.net/project/openfoam-extend/foam-extend-3.1/ThirdParty/PyFoam-0.6.1.tar.gz )
( rpm_make -p PyFoam-0.6.4 -s PyFoam-0.6.4.spec -u http://openfoamwiki.net/images/3/3b/PyFoam-0.6.4.tar.gz -n PyFoam-0.6.4-1.noarch -a noarch )
else
echo "Using system installed PyFoam"
echo ""

View file

@ -56,10 +56,10 @@ echo Starting ThirdParty AllMake: Stage4
echo ========================================
echo
# qt-everywhere-opensource-src-4.8.5
# qt-everywhere-opensource-src-4.8.6
if [ ! -z "$QT_THIRD_PARTY" ]
then
( rpm_make -p qt-everywhere-opensource-src-4.8.5 -s qt-everywhere-opensource-src-4.8.5.spec -u http://download.qt-project.org/official_releases/qt/4.8/4.8.5/qt-everywhere-opensource-src-4.8.5.tar.gz )
( rpm_make -p qt-everywhere-opensource-src-4.8.6 -s qt-everywhere-opensource-src-4.8.6.spec -u http://download.qt-project.org/official_releases/qt/4.8/4.8.6/qt-everywhere-opensource-src-4.8.6.tar.gz )
else
echo "Using system installed QT"
echo ""
@ -75,10 +75,14 @@ then
then
( rpm_make -p ParaView-4.0.1 -s ParaView-4.0.1.spec -u http://downloads.sourceforge.net/project/openfoam-extend/foam-extend-3.1/ThirdParty/ParaView-v4.0.1-source.tgz \
-f --define='_qmakePath $QT_BIN_DIR/qmake'
# ( rpm_make -p ParaView-4.2.0 -s ParaView-4.2.0.spec -u http://www.paraview.org/files/v4.2/ParaView-v4.2.0-source.tar.gz \
# -f --define='_qmakePath $QT_BIN_DIR/qmake'
# ( rpm_make -p ParaView-4.2.0 -s ParaView-4.2.0.spec -u http://downloads.sourceforge.net/project/openfoam-extend/foam-extend-3.1/ThirdParty/ParaView-v4.2.0-source.tgz \
# -f --define='_qmakePath $QT_BIN_DIR/qmake'
)
else
echo "WARNING: "
echo "WARNING: Skipping the installation of ParaView-4.0.1."
echo "WARNING: Skipping the installation of ParaView-4.2.0."
echo "WARNING: Please make sure the QT_BIN_DIR environment variable properly"
echo "WARNING: initialized in the file prefs.sh or prefs.csh"
echo "WARNING: The command \$QT_BIN_DIR/qmake needs to be valid"

View file

@ -55,12 +55,12 @@ echo "Starting ThirdParty AllMake: Stage5 "
echo "========================================"
echo
# swak4Foam - Version 0.3.1
# swak4Foam - Version 0.3.2
# In fact, we are basically tracking the head branch from the Mercurial repository
# which is also replicated under the Breeder_2.0 section of the Subversion repository
#
SWAK_RELEASE_VERSION=0.3.1
SWAK_RELEASE_VERSION=0.3.2
if [ -z "$SWAK4FOAM_SYSTEM" ]
then

View file

@ -36,43 +36,46 @@
command -v python -V >/dev/null
if [ ! $? -eq 0 ];
then
echo "Error: You need to install python in order to run these PyFoam scripts"
echo "Error: You need to install python in order to run these PyFoam scripts"
exit -1
fi
if [ -z "$PYFOAM_DIR" ]
if [ -z "$PYFOAM_SITE_DIR" ]
then
echo "Error: Missing environment variable \$PYFOAM_DIR."
echo " Please execute the script ThirdParty/AllMake.stage5 to install PyFoam."
echo "Error: Missing environment variable \$PYFOAM_SITE_DIR."
echo " Please execute the script ThirdParty/AllMake.stage5 to install PyFoam."
echo " Then, make sure to freshen your foam-extend environment by sourcing your main"
echo " foam-extend configuration file."
exit -1
fi
pythonVersion=$(python -V 2>&1 | awk -F ' ' {'print $2'} | awk -F \. {'print $1 "." $2'})
set -x
if [ ! -d $PYFOAM_SITE_DIR/lib -o ! -d $PYFOAM_SITE_DIR/bin ]
then
echo "Directory $PYFOAM_SITE_DIR/lib or $PYFOAM_SITE_DIR/bin missing"
exit -1
fi
set -x
# pyFoamChangeMixingPlaneBoundary.py
cp pyFoamChangeMixingPlaneBoundary.py $PYFOAM_DIR/bin
cp ChangeMixingPlaneBoundary.py $PYFOAM_DIR/lib/python$pythonVersion/site-packages/PyFoam/Applications
cp pyFoamChangeMixingPlaneBoundary.py $PYFOAM_SITE_DIR/bin
cp ChangeMixingPlaneBoundary.py $PYFOAM_SITE_DIR/lib
# pyFoamChangeGGIBoundary.py
# Same as pyFoamModifyGGIBoundary.py. We just harmonize the name with rest of PyFoam
cp pyFoamChangeGGIBoundary.py $PYFOAM_DIR/bin
cp ChangeGGIBoundary.py $PYFOAM_DIR/lib/python$pythonVersion/site-packages/PyFoam/Applications
cp pyFoamChangeGGIBoundary.py $PYFOAM_SITE_DIR/bin
cp ChangeGGIBoundary.py $PYFOAM_SITE_DIR/lib
# pyFoamInitializeMixingPlane.py
cp pyFoamInitMixingPlaneInterface.py $PYFOAM_DIR/bin
cp InitMixingPlaneInterface.py $PYFOAM_DIR/lib/python$pythonVersion/site-packages/PyFoam/Applications
cp pyFoamInitMixingPlaneInterface.py $PYFOAM_SITE_DIR/bin
cp InitMixingPlaneInterface.py $PYFOAM_SITE_DIR/lib
# pyFoamInitializeGGI.py
cp pyFoamInitGgiInterface.py $PYFOAM_DIR/bin
cp InitGgiInterface.py $PYFOAM_DIR/lib/python$pythonVersion/site-packages/PyFoam/Applications
cp pyFoamInitGgiInterface.py $PYFOAM_SITE_DIR/bin
cp InitGgiInterface.py $PYFOAM_SITE_DIR/lib
# pyFoamConvertMixingPlaneBoundaryToNewSyntax.py
cp pyFoamConvertMixingPlaneBoundaryToNewSyntax.py $PYFOAM_DIR/bin
cp ConvertMixingPlaneBoundaryToNewSyntax.py $PYFOAM_DIR/lib/python$pythonVersion/site-packages/PyFoam/Applications
cp pyFoamConvertMixingPlaneBoundaryToNewSyntax.py $PYFOAM_SITE_DIR/bin
cp ConvertMixingPlaneBoundaryToNewSyntax.py $PYFOAM_SITE_DIR/lib
set +x
echo ========================================
@ -81,5 +84,3 @@ echo ========================================
echo
# ----------------------------------------------------------------- end-of-file

View file

@ -9,8 +9,9 @@ Author:
"""
from PyFoamApplication import PyFoamApplication
from PyFoam.Applications.PyFoamApplication import PyFoamApplication
from PyFoam.RunDictionary.ParsedParameterFile import ParsedParameterFile
from PyFoam.ThirdParty.six import print_
from os import path
import sys
import re
@ -74,7 +75,7 @@ Change GGI boundary condition parameters
dest="separationOffset",
default=None,
help='separation offset for cyclicGgi')
self.parser.add_option("--test",
action="store_true",
default=False,
@ -84,7 +85,7 @@ Change GGI boundary condition parameters
def run(self):
fName=self.parser.getArgs()[0]
bName=self.parser.getArgs()[1]
boundary=ParsedParameterFile(path.join(".",fName,"constant","polyMesh","boundary"),debug=False,boundaryDict=True)
bnd=boundary.content
@ -123,7 +124,7 @@ Change GGI boundary condition parameters
val["separationOffset"]=self.parser.getOptions().separationOffset
# Deprecated
# Deprecated
if self.parser.getOptions().shadowName!=None:
self.warning("\n PatchName:",bName,": Option --shadowName is deprecated. Use --shadowPatch instead\n")
shadowName=self.parser.getOptions().shadowName
@ -131,26 +132,25 @@ Change GGI boundary condition parameters
if shadowName not in bnd:
self.error("\n Option --shadowName for patch:",bName,": there is no patch called",shadowName,"\n")
# Deprecated
# Deprecated
if self.parser.getOptions().patchZoneName!=None:
self.warning("\n PatchName:",bName,": Option --patchZoneName is deprecated. Use --zone instead\n")
val["zone"]=self.parser.getOptions().patchZoneName
# Deprecated
# Deprecated
if self.parser.getOptions().bridgeOverlapFlag!=None:
self.warning("\n PatchName:",bName,": Option --bridgeOverlapFlag is deprecated. Use --bridgeOverlap instead\n")
val["bridgeOverlap"]=self.parser.getOptions().bridgeOverlapFlag
else:
print "Unsupported GGI type '",bcType,"' for patch",bName
print_("Unsupported GGI type '",bcType,"' for patch",bName)
break
if not found:
self.error("Boundary",bName,"not found in",bnd[::2])
if self.parser.getOptions().test:
print boundary
print_(boundary)
else:
boundary.writeFile()

View file

@ -9,8 +9,9 @@ Author:
"""
from PyFoamApplication import PyFoamApplication
from PyFoam.Applications.PyFoamApplication import PyFoamApplication
from PyFoam.RunDictionary.ParsedParameterFile import ParsedParameterFile
from PyFoam.ThirdParty.six import print_
from os import path
import sys
@ -37,7 +38,7 @@ Change MixingPlane boundary condition parameters
action="store",
dest="zone",
default=None,
help='Name of the zone for mixingPlanePatch')
help='Name of the zone for mixingPlanePatch')
self.parser.add_option("--coordinateSystemName",
action="store",
dest="coordinateSystemName",
@ -78,7 +79,7 @@ Change MixingPlane boundary condition parameters
dest="ribbonPatchDiscretisation",
default=None,
help='ribbonPatch discretisation (masterPatch|slavePatch|bothPatches|uniform|userDefined)')
self.parser.add_option("--test",
action="store_true",
default=False,
@ -88,13 +89,13 @@ Change MixingPlane boundary condition parameters
def run(self):
fName=self.parser.getArgs()[0]
bName=self.parser.getArgs()[1]
boundary=ParsedParameterFile(path.join(".",fName,"constant","polyMesh","boundary"),debug=False,boundaryDict=True)
bnd=boundary.content
if type(bnd)!=list:
print "Problem with boundary file (not a list)"
print_("Problem with boundary file (not a list)")
sys.exit(-1)
found=False
@ -143,11 +144,10 @@ Change MixingPlane boundary condition parameters
break
if not found:
print "Boundary",bName,"not found in",bnd[::2]
print_("Boundary",bName,"not found in",bnd[::2])
sys.exit(-1)
if self.parser.getOptions().test:
print boundary
print_(boundary)
else:
boundary.writeFile()

View file

@ -9,8 +9,9 @@ Author:
"""
from PyFoamApplication import PyFoamApplication
from PyFoam.Applications.PyFoamApplication import PyFoamApplication
from PyFoam.RunDictionary.ParsedParameterFile import ParsedParameterFile
from PyFoam.ThirdParty.six import print_
from os import path
import sys
@ -46,7 +47,7 @@ class switch(object):
"""Return the match method once, then stop"""
yield self.match
raise StopIteration
def match(self, *args):
"""Indicate whether or not to enter a case suite"""
if self.fall or not args:
@ -87,13 +88,13 @@ Change MixingPlane boundary condition parameters
def run(self):
fName=self.parser.getArgs()[0]
boundary=ParsedParameterFile(path.join(".",fName,"constant","polyMesh","boundary"),debug=False,boundaryDict=True)
bnd=boundary.content
if type(bnd)!=list:
print "Problem with boundary file (not a list)"
print_("Problem with boundary file (not a list)")
sys.exit(-1)
found=False
@ -107,23 +108,23 @@ Change MixingPlane boundary condition parameters
if bnd[indexDefPatch]["type"]=="mixingPlane":
if bnd[indexDefPatch].has_key("assembly"):
print " Replacing the parameter 'assembly' for patch", bnd[index]
print_(" Replacing the parameter 'assembly' for patch", bnd[index])
oldAssembly=bnd[indexDefPatch]["assembly"]
del bnd[indexDefPatch]["assembly"]
if bnd[indexDefPatch].has_key("orientation"):
print " Replacing the parameter 'orientation' for patch", bnd[index]
print_(" Replacing the parameter 'orientation' for patch", bnd[index])
oldOrientation=bnd[indexDefPatch]["orientation"]
del bnd[indexDefPatch]["orientation"]
if bnd[indexDefPatch].has_key("ribbonPatch")==False:
bnd[indexDefPatch]["ribbonPatch"]={}
if bnd[indexDefPatch].has_key("zone")==False:
bnd[indexDefPatch]["zone"]=bnd[index] + "Zone"
if oldAssembly != "":
# Converting "assembly" to ribbonPatch/discretisation
# Converting "assembly" to ribbonPatch/discretisation
for case in switch(oldAssembly):
if case('master'):
bnd[indexDefPatch]["ribbonPatch"]["discretisation"]="masterPatch"
@ -138,7 +139,7 @@ Change MixingPlane boundary condition parameters
bnd[indexDefPatch]["ribbonPatch"]["discretisation"]="userDefined"
break
if case(): # default
print "Unsupported assembly type: ", oldAssembly
print_("Unsupported assembly type: ", oldAssembly)
if oldOrientation != "":
# Converting "orientation" to ribbonPatch/ribbonPatchSweepAxis and
@ -194,10 +195,9 @@ Change MixingPlane boundary condition parameters
bnd[indexDefPatch]["ribbonPatch"]["sweepAxis"]="R"
break
if case(): # default
print "Unsupported orientation type: ", oldOrientation
print_("Unsupported orientation type: ", oldOrientation)
if self.parser.getOptions().test:
print boundary
print_(boundary)
else:
boundary.writeFile()

View file

@ -19,15 +19,16 @@ import sys, fnmatch, re
from os import path, listdir, chmod
from stat import *
from PyFoamApplication import PyFoamApplication
from PyFoam.Applications.PyFoamApplication import PyFoamApplication
from PyFoam.RunDictionary.ParsedParameterFile import ParsedParameterFile
from PyFoam.ThirdParty.six import print_
from PyFoam.RunDictionary.TimeDirectory import TimeDirectory
from PyFoam.Basics.BasicFile import BasicFile
class InitGgiInterface(PyFoamApplication):
def __init__(self,args=None):
description="""
Init GGI boundary condition parameters in boundary file.
Init GGI boundary condition parameters in boundary file.
Init GGI boundary fields in time directories.
Generate faceSet scripts for ggi zones.
Modify GGI zones information in decomposeParDict file.
@ -105,7 +106,7 @@ Modify GGI zones information in decomposeParDict file.
Create a default definition for a ggi patch, and replace
the current definition
"""
print "Replacing definition of patch: ", patchName, ":", patch
print_("Replacing definition of patch: ", patchName, ":", patch)
newPatch={
'type' : ggiType,
'nFaces' : patch["nFaces"],
@ -123,7 +124,7 @@ the current definition
description="""\
Modify the definition of a ggi patch
"""
print " Modifying ggi boundary definition in constant/polyMesh/boundary for patch", patchName
print_(" Modifying ggi boundary definition in constant/polyMesh/boundary for patch", patchName)
patch["type"]=ggiType
@ -167,12 +168,12 @@ Modify the definition of a ggi patch in the time directories
for timeDir in listdir(caseDir):
if reobj.match(timeDir):
print " Modifying ggi boundaryFields in timeDir", timeDir, "for patch", patchName
print_(" Modifying ggi boundaryFields in timeDir", timeDir, "for patch", patchName)
td=TimeDirectory(caseDir, timeDir, yieldParsedFiles=True)
for f in td:
print " Modifying field", f.name
print_(" Modifying field", f.name)
f["boundaryField"][patchName]["type"]=ggiType
f.writeFile()
@ -183,10 +184,10 @@ Generate a setSet batch file based on the zone info specified in the ggi interfa
Generate a bash file for invoking setSet and setsToZones
Update GGI zone infoprmation in decomposeParDict
"""
# Default file: genFaceSetForGgiZones.setSet
# Default file: genFaceSetForGgiZones.setSet
bfGenFaceSets = BasicFile(path.join(caseDir, self.parser.getOptions().genFaceSetForGgiZonesScriptName))
print " Updating file ", bfGenFaceSets.name, " for generating GGI zones faceSet using the setSet command"
print_(" Updating file ", bfGenFaceSets.name, " for generating GGI zones faceSet using the setSet command")
bnd=boundary.content
@ -209,8 +210,8 @@ Update GGI zone infoprmation in decomposeParDict
# Default file: initGgiZones.sh
bfInitGgiZones = BasicFile(path.join(caseDir, self.parser.getOptions().initGgiZonesScriptName))
print " Updating file ", bfInitGgiZones.name, " for inititalizing GGI zones"
print_(" Updating file ", bfInitGgiZones.name, " for inititalizing GGI zones")
bfInitGgiZones.writeLine([ "#!/bin/bash" ])
bfInitGgiZones.writeLine([ "setSet -batch " + self.parser.getOptions().genFaceSetForGgiZonesScriptName ])
bfInitGgiZones.writeLine([ "setsToZones -noFlipMap" ])
@ -222,7 +223,7 @@ Update GGI zone infoprmation in decomposeParDict
# DecomposeParDict
decomposeParDictPath=path.join(caseDir,"system","decomposeParDict")
if path.exists(decomposeParDictPath):
print " Updating file ", decomposeParDictPath, " for GGI zones"
print_(" Updating file ", decomposeParDictPath, " for GGI zones")
decomposeParDict=ParsedParameterFile(decomposeParDictPath,debug=False,backup=True)
dcp=decomposeParDict.content
dcp["globalFaceZones"]="(\n " + '\n '.join(list(listOfGgiZones)) + "\n)"
@ -232,7 +233,7 @@ Update GGI zone infoprmation in decomposeParDict
caseDir=self.parser.getArgs()[0]
masterbName=self.parser.getArgs()[1]
shadowbName=self.parser.getArgs()[2]
boundary=ParsedParameterFile(path.join(".",caseDir,"constant","polyMesh","boundary"),debug=False,boundaryDict=True,backup=True)
bnd=boundary.content
@ -249,7 +250,7 @@ Update GGI zone infoprmation in decomposeParDict
timeDirs=self.parser.getOptions().timeDirs
updateTimeDirs=True
ggiType=self.parser.getOptions().ggiType
ggiType=self.parser.getOptions().ggiType
rotationAngle=0.0
if self.parser.getOptions().rotationAngle!=None:
@ -287,10 +288,9 @@ Update GGI zone infoprmation in decomposeParDict
self.error("Boundary patch",shadowbName,"not found in",bnd[::2])
if self.parser.getOptions().test:
print boundary
print_(boundary)
else:
boundary.writeFile()
# Write companion files
self.generateCompanionFiles(caseDir, boundary)

View file

@ -11,9 +11,10 @@ Author:
"""
from PyFoamApplication import PyFoamApplication
from PyFoam.Applications.PyFoamApplication import PyFoamApplication
from PyFoam.RunDictionary.ParsedParameterFile import ParsedParameterFile
from PyFoam.RunDictionary.TimeDirectory import TimeDirectory
from PyFoam.ThirdParty.six import print_
from os import path, listdir
import sys, fnmatch, re
@ -95,7 +96,7 @@ Init MixingPlane boundary condition parameters
Create a default definition for a mixingPlane patch, and replace
the current definition
"""
print "Replacing definition of patch: ", patchName, ":", patch
print_("Replacing definition of patch: ", patchName, ":", patch)
newPatch={
'type' : "mixingPlane",
'nFaces' : patch["nFaces"],
@ -120,7 +121,7 @@ the current definition
description="""\
Modify the definition of a mixingPlane patch
"""
print " Modifying mixingPlane boundary definition in constant/polyMesh/boundary for patch", patchName
print_(" Modifying mixingPlane boundary definition in constant/polyMesh/boundary for patch", patchName)
patch["shadowPatch"]=shadowName
@ -166,12 +167,12 @@ Modify the definition of a mixingPlane patch in the time directories
for timeDir in listdir(caseDir):
if reobj.match(timeDir):
print " Modifying mixingPlane boundaryFields in timeDir", timeDir, "for patch", patchName
print_(" Modifying mixingPlane boundaryFields in timeDir", timeDir, "for patch", patchName)
td=TimeDirectory(caseDir, timeDir, yieldParsedFiles=True)
for f in td:
print " Modifying field", f.name
print_(" Modifying field", f.name)
f["boundaryField"][patchName]["type"]='mixingPlane'
f.writeFile()
@ -179,13 +180,13 @@ Modify the definition of a mixingPlane patch in the time directories
fName=self.parser.getArgs()[0]
masterbName=self.parser.getArgs()[1]
shadowbName=self.parser.getArgs()[2]
boundary=ParsedParameterFile(path.join(".",fName,"constant","polyMesh","boundary"),debug=False,boundaryDict=True,backup=True)
bnd=boundary.content
if type(bnd)!=list:
print "Problem with boundary file (not a list)"
print_("Problem with boundary file (not a list)")
sys.exit(-1)
masterFound=False
@ -197,7 +198,7 @@ Modify the definition of a mixingPlane patch in the time directories
timeDirs=self.parser.getOptions().timeDirs
updateTimeDirs=True
print "UpdateTimeDirs: ", updateTimeDirs
print_("UpdateTimeDirs: ", updateTimeDirs)
for index in range(len(bnd)):
@ -233,9 +234,6 @@ Modify the definition of a mixingPlane patch in the time directories
self.error("Boundary patch",shadowbName,"not found in",bnd[::2])
if self.parser.getOptions().test:
print boundary
print_(boundary)
else:
boundary.writeFile()

View file

@ -1,5 +1,5 @@
#!/usr/bin/env python
from PyFoam.Applications.ChangeGGIBoundary import ChangeGGIBoundary
from PyFoam.Site.ChangeGGIBoundary import ChangeGGIBoundary
ChangeGGIBoundary()

View file

@ -1,5 +1,5 @@
#!/usr/bin/env python
from PyFoam.Applications.ChangeMixingPlaneBoundary import ChangeMixingPlaneBoundary
from PyFoam.Site.ChangeMixingPlaneBoundary import ChangeMixingPlaneBoundary
ChangeMixingPlaneBoundary()

View file

@ -1,5 +1,5 @@
#!/usr/bin/env python
from PyFoam.Applications.ConvertMixingPlaneBoundaryToNewSyntax import ConvertMixingPlaneBoundaryToNewSyntax
from PyFoam.Site.ConvertMixingPlaneBoundaryToNewSyntax import ConvertMixingPlaneBoundaryToNewSyntax
ConvertMixingPlaneBoundaryToNewSyntax()

View file

@ -1,5 +1,5 @@
#!/usr/bin/env python
from PyFoam.Applications.InitGgiInterface import InitGgiInterface
from PyFoam.Site.InitGgiInterface import InitGgiInterface
InitGgiInterface()

View file

@ -1,5 +1,5 @@
#!/usr/bin/env python
from PyFoam.Applications.InitMixingPlaneInterface import InitMixingPlaneInterface
from PyFoam.Site.InitMixingPlaneInterface import InitMixingPlaneInterface
InitMixingPlaneInterface()

View file

View file

View file

View file

@ -0,0 +1,12 @@
diff -ruN ParaView-4.2.0_orig/Applications/ParaView-4.2.0_extra_install_Darwin.cmake ParaView-4.2.0/Applications/ParaView-4.2.0_extra_install_Darwin.cmake
--- ParaView-4.2.0_orig/Applications/ParaView-4.2.0_extra_install_Darwin.cmake 1969-12-31 19:00:00.000000000 -0500
+++ ParaView-4.2.0/Applications/ParaView-4.2.0_extra_install_Darwin.cmake 2013-10-02 19:00:00.000000000 -0400
@@ -0,0 +1,8 @@
+#
+# Additional install rules for Mac OS X platforms
+#
+INSTALL (DIRECTORY ../../buildObj/bin/paraview.app
+ DESTINATION ${PV_INSTALL_BIN_DIR}
+ USE_SOURCE_PERMISSIONS
+ COMPONENT Runtime)
+

View file

@ -0,0 +1,274 @@
#------------------------------------------------------------------------------
# ========= |
# \\ / F ield | foam-extend: Open Source CFD
# \\ / O peration |
# \\ / A nd | For copyright notice see file Copyright
# \\/ M anipulation |
#------------------------------------------------------------------------------
# License
# This file is part of foam-extend.
#
# foam-extend is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# foam-extend is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with foam-extend. If not, see <http://www.gnu.org/licenses/>.
#
# Script
# RPM spec file for ParaView-4.2.0
#
# Description
# RPM spec file for creating a relocatable RPM
#
# Authors:
# Martin Beaudoin, Hydro-Quebec, (2010)
# Andreas Feymark, Chalmers University of Technology, (2013)
#
#------------------------------------------------------------------------------
# We grab the value of WM_THIRD_PARTY and WM_OPTIONS from the environment variable
%{expand:%%define _WM_THIRD_PARTY_DIR %(echo $WM_THIRD_PARTY_DIR)}
%{expand:%%define _WM_OPTIONS %(echo $WM_OPTIONS)}
# Disable the generation of debuginfo packages
%define debug_package %{nil}
# The topdir needs to point to the $WM_THIRD_PARTY/rpmbuild directory
%define _topdir %{_WM_THIRD_PARTY_DIR}/rpmBuild
%define _tmppath %{_topdir}/tmp
# Will install the package directly $WM_THIRD_PARTY_DIR
# Some comments about package relocation:
# By using this prefix for the Prefix: parameter in this file, you will make this
# package relocatable.
#
# This is fine, as long as your software is itself relocatable.
#
# Simply take note that libraries built with libtool are not relocatable because the
# prefix we specify will be hard-coded in the library .la files.
# Ref: http://sourceware.org/autobook/autobook/autobook_80.html
#
# In that case, if you ever change the value of the $WM_THIRD_PARTY_DIR, you will
# not be able to reutilize this RPM, even though it is relocatable. You will need to
# regenerate the RPM.
#
%define _prefix %{_WM_THIRD_PARTY_DIR}
%define name ParaView
%define release %{_WM_OPTIONS}
%define version 4.2.0
%define buildroot %{_topdir}/BUILD/%{name}-%{version}-root
BuildRoot: %{buildroot}
Summary: ParaView
License: Unkown
Name: %{name}
Version: %{version}
Release: %{release}
URL: http://www.paraview.org/files/v4.2/
Source: %url/%{name}-v%{version}-source.tar.gz
Prefix: %{_prefix}
Group: Development/Tools
Patch0: paraview-4.2.0.patch_darwin
%define _installPrefix %{_prefix}/packages/%{name}-%{version}/platforms/%{_WM_OPTIONS}
#--------------------------------------------------------------------------
#
# Here, we define default compiling options for cmake
#
# One can override the option on the commande line : --define='MACRO EXPR'
#
%{!?_withVerbose: %define _withVerbose false}
%{!?_withMesa: %define _withMesa false}
%{!?_withMPI: %define _withMPI false}
%{!?_withPython: %define _withPython false}
%{!?_withQt: %define _withQt true}
%{!?_qmakePath: %define _qmakePath Undefined}
%{!?_mesaIncludePath: %define _mesaIncludePath Undefined}
%{!?_mesaLibPath: %define _mesaLibPath Undefined}
%{!?_pythonLibPath: %define _pythonLibPath Undefined}
#--------------------------------------------------------------------------
%description
%{summary}
%prep
%setup -q -n %{name}-v%{version}-source
%ifos darwin
%patch0 -p1
%endif
%build
#
# set CMake cache variables
#
addCMakeVariable()
{
while [ -n "$1" ]
do
CMAKE_VARIABLES="$CMAKE_VARIABLES -D$1"
shift
done
}
# export WM settings in a form that GNU configure recognizes
[ -n "$WM_CC" ] && export CC="$WM_CC"
[ -n "$WM_CXX" ] && export CXX="$WM_CXX"
[ -n "$WM_CFLAGS" ] && export CFLAGS="$WM_CFLAGS"
[ -n "$WM_CXXFLAGS" ] && export CXXFLAGS="$WM_CXXFLAGS"
[ -n "$WM_LDFLAGS" ] && export LDFLAGS="$WM_LDFLAGS"
set +x
echo ""
echo "Compilation options:"
echo " _withVerbose : %{_withVerbose}"
echo " _withMesa : %{_withMesa}"
echo " _withMPI : %{_withMPI}"
echo " _withPython : %{_withPython}"
echo " _withQt : %{_withQt}"
echo " _qmakePath : %{_qmakePath}"
echo " _mesaIncludePath : %{_mesaIncludePath}"
echo " _mesaLibPath : %{_mesaLibPath}"
echo " _pythonLibPath : %{_pythonLibPath}"
echo ""
set -x
# start with these general settings
addCMakeVariable VTK_USE_TK:BOOL=OFF
addCMakeVariable BUILD_SHARED_LIBS:BOOL=ON VTK_USE_RPATH:BOOL=OFF
addCMakeVariable CMAKE_BUILD_TYPE:STRING=Release
# include development files in "make install"
addCMakeVariable PARAVIEW_INSTALL_DEVELOPMENT_FILES:BOOL=ON
# new alternative to "make HTMLDocumentation"
addCMakeVariable PARAVIEW_GENERATE_PROXY_DOCUMENTATION:BOOL=ON
%ifos darwin
# Additional installation rules for Mac OS X
addCMakeVariable PARAVIEW_EXTRA_INSTALL_RULES_FILE:FILEPATH=%{_topdir}/BUILD/%{name}-%{version}/Applications/ParaView-3.8.1_extra_install_Darwin.cmake
%endif
# Add the value of _qmakePath for QT_QMAKE_EXECUTABLE
addCMakeVariable QT_QMAKE_EXECUTABLE:FILEPATH=%{_qmakePath}
echo "CMAKE_VARIABLES: $CMAKE_VARIABLES"
mkdir -p ./buildObj
cd ./buildObj
cmake \
-DCMAKE_INSTALL_PREFIX:PATH=%{_installPrefix} \
$CMAKE_VARIABLES \
..
[ -z "$WM_NCOMPPROCS" ] && WM_NCOMPPROCS=1
make -j $WM_NCOMPPROCS
%install
# On OpenSUSE, rpmbuild, will choke when detecting unreferenced symlinks
# created during installation.
# Qt version 4.6.3 will generate some unreferenced symlinks when
# ParaView is compiled and installed. By enabling the following
# environment variable, the command brp-symlink will still complain
# about missing link targets, but it won't stop rpmbuild from generating
# the final rpm.
# For all other Unix distros, this is a no-op.
export NO_BRP_STALE_LINK_ERROR=yes
cd buildObj
make install DESTDIR=$RPM_BUILD_ROOT
# Creation of foam-extend specific .csh and .sh files"
echo ""
echo "Generating foam-extend specific .csh and .sh files for the package %{name}-%{version}"
echo ""
#
# Generate package specific .sh file for foam-extend
#
mkdir -p $RPM_BUILD_ROOT/%{_installPrefix}/etc
cat << DOT_SH_EOF > $RPM_BUILD_ROOT/%{_installPrefix}/etc/%{name}-%{version}.sh
# Load %{name}-%{version} libraries and binaries if available
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
export PARAVIEW_DIR=\$WM_THIRD_PARTY_DIR/packages/%{name}-%{version}/platforms/\$WM_OPTIONS
export PARAVIEW_BIN_DIR=\$PARAVIEW_DIR/bin
export PARAVIEW_LIB_DIR=\$PARAVIEW_DIR/lib
export PARAVIEW_INCLUDE_DIR=\$PARAVIEW_DIR/include/paraview-4.2
export PARAVIEW_VERSION=%{version}
# NB: It is important to set the PV_PLUGIN_PATH location to a directory containing only the ParaView plugins.
# Otherwise, paraview will try to automatically autoload each and every dynamic library it can find in the
# specified directory to see if a given library is a paraview plugin.
# In the case of \$FOAM_LIBBIN, with over 80 libraries, this is a total waste of time that will slow down the
# startup of paraview or even make paraview crash on startup.
export PV_PLUGIN_PATH=\$FOAM_LIBBIN/paraview_plugins
[ -d \$PARAVIEW_LIB_DIR/paraview-4.2 ] && _foamAddLib \$PARAVIEW_LIB_DIR/paraview-4.2
# Enable access to the package applications if present
[ -d \$PARAVIEW_BIN_DIR ] && _foamAddPath \$PARAVIEW_BIN_DIR
# Additional binary path if running on Mac OS X
[ -d \$PARAVIEW_BIN_DIR/paraview.app/Contents/MacOS ] && _foamAddPath \$PARAVIEW_BIN_DIR/paraview.app/Contents/MacOS
DOT_SH_EOF
#
# Generate package specific .csh file for foam-extend
#
cat << DOT_CSH_EOF > $RPM_BUILD_ROOT/%{_installPrefix}/etc/%{name}-%{version}.csh
# Load %{name}-%{version} libraries and binaries if available
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
setenv PARAVIEW_DIR \$WM_THIRD_PARTY_DIR/packages/%{name}-%{version}/platforms/\$WM_OPTIONS
setenv PARAVIEW_BIN_DIR \$PARAVIEW_DIR/bin
setenv PARAVIEW_LIB_DIR \$PARAVIEW_DIR/lib
setenv PARAVIEW_INCLUDE_DIR \$PARAVIEW_DIR/include/paraview-4.2
setenv PARAVIEW_VERSION %{version}
# NB: It is important to set the PV_PLUGIN_PATH location to a directory containing only the ParaView plugins.
# Otherwise, paraview will try to automatically autoload each and every dynamic library it can find in the
# specified directory to see if a given library is a paraview plugin.
# In the case of \$FOAM_LIBBIN, with over 80 libraries, this is a total waste of time that will slow down the
# startup of paraview or even make paraview crash on startup.
setenv PV_PLUGIN_PATH \$FOAM_LIBBIN/paraview_plugins
if ( -e \$PARAVIEW_BIN_DIR ) then
_foamAddPath \$PARAVIEW_BIN_DIR
endif
if ( -e \$PARAVIEW_LIB_DIR/paraview-4.2 ) then
_foamAddLib \$PARAVIEW_LIB_DIR/paraview-4.2
endif
# Additional binary path if running on Mac OS X
if ( -e \$PARAVIEW_BIN_DIR/paraview.app/Contents/MacOS ) then
_foamAddPath \$PARAVIEW_BIN_DIR/paraview.app/Contents/MacOS
endif
DOT_CSH_EOF
#finally, generate a .tgz file for systems where using rpm for installing packages
# as a non-root user might be a problem.
(mkdir -p %{_topdir}/TGZS/%{_target_cpu}; cd $RPM_BUILD_ROOT/%{_prefix}; tar -zcvf %{_topdir}/TGZS/%{_target_cpu}/%{name}-%{version}.tgz packages/%{name}-%{version})
%clean
%files
%defattr(-,root,root)
%{_installPrefix}

View file

@ -0,0 +1,274 @@
#------------------------------------------------------------------------------
# ========= |
# \\ / F ield | foam-extend: Open Source CFD
# \\ / O peration |
# \\ / A nd | For copyright notice see file Copyright
# \\/ M anipulation |
#------------------------------------------------------------------------------
# License
# This file is part of foam-extend.
#
# foam-extend is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# foam-extend is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with foam-extend. If not, see <http://www.gnu.org/licenses/>.
#
# Script
# RPM spec file for ParaView-4.3.1
#
# Description
# RPM spec file for creating a relocatable RPM
#
# Authors:
# Martin Beaudoin, Hydro-Quebec, (2010)
# Andreas Feymark, Chalmers University of Technology, (2013)
#
#------------------------------------------------------------------------------
# We grab the value of WM_THIRD_PARTY and WM_OPTIONS from the environment variable
%{expand:%%define _WM_THIRD_PARTY_DIR %(echo $WM_THIRD_PARTY_DIR)}
%{expand:%%define _WM_OPTIONS %(echo $WM_OPTIONS)}
# Disable the generation of debuginfo packages
%define debug_package %{nil}
# The topdir needs to point to the $WM_THIRD_PARTY/rpmbuild directory
%define _topdir %{_WM_THIRD_PARTY_DIR}/rpmBuild
%define _tmppath %{_topdir}/tmp
# Will install the package directly $WM_THIRD_PARTY_DIR
# Some comments about package relocation:
# By using this prefix for the Prefix: parameter in this file, you will make this
# package relocatable.
#
# This is fine, as long as your software is itself relocatable.
#
# Simply take note that libraries built with libtool are not relocatable because the
# prefix we specify will be hard-coded in the library .la files.
# Ref: http://sourceware.org/autobook/autobook/autobook_80.html
#
# In that case, if you ever change the value of the $WM_THIRD_PARTY_DIR, you will
# not be able to reutilize this RPM, even though it is relocatable. You will need to
# regenerate the RPM.
#
%define _prefix %{_WM_THIRD_PARTY_DIR}
%define name ParaView
%define release %{_WM_OPTIONS}
%define version 4.3.1
%define buildroot %{_topdir}/BUILD/%{name}-%{version}-root
BuildRoot: %{buildroot}
Summary: ParaView
License: Unkown
Name: %{name}
Version: %{version}
Release: %{release}
URL: http://www.paraview.org/files/v4.3/
Source: %url/%{name}-v%{version}-source.tar.gz
Prefix: %{_prefix}
Group: Development/Tools
Patch0: paraview-4.3.1.patch_darwin
%define _installPrefix %{_prefix}/packages/%{name}-%{version}/platforms/%{_WM_OPTIONS}
#--------------------------------------------------------------------------
#
# Here, we define default compiling options for cmake
#
# One can override the option on the commande line : --define='MACRO EXPR'
#
%{!?_withVerbose: %define _withVerbose false}
%{!?_withMesa: %define _withMesa false}
%{!?_withMPI: %define _withMPI false}
%{!?_withPython: %define _withPython false}
%{!?_withQt: %define _withQt true}
%{!?_qmakePath: %define _qmakePath Undefined}
%{!?_mesaIncludePath: %define _mesaIncludePath Undefined}
%{!?_mesaLibPath: %define _mesaLibPath Undefined}
%{!?_pythonLibPath: %define _pythonLibPath Undefined}
#--------------------------------------------------------------------------
%description
%{summary}
%prep
%setup -q -n %{name}-v%{version}-source
%ifos darwin
%patch0 -p1
%endif
%build
#
# set CMake cache variables
#
addCMakeVariable()
{
while [ -n "$1" ]
do
CMAKE_VARIABLES="$CMAKE_VARIABLES -D$1"
shift
done
}
# export WM settings in a form that GNU configure recognizes
[ -n "$WM_CC" ] && export CC="$WM_CC"
[ -n "$WM_CXX" ] && export CXX="$WM_CXX"
[ -n "$WM_CFLAGS" ] && export CFLAGS="$WM_CFLAGS"
[ -n "$WM_CXXFLAGS" ] && export CXXFLAGS="$WM_CXXFLAGS"
[ -n "$WM_LDFLAGS" ] && export LDFLAGS="$WM_LDFLAGS"
set +x
echo ""
echo "Compilation options:"
echo " _withVerbose : %{_withVerbose}"
echo " _withMesa : %{_withMesa}"
echo " _withMPI : %{_withMPI}"
echo " _withPython : %{_withPython}"
echo " _withQt : %{_withQt}"
echo " _qmakePath : %{_qmakePath}"
echo " _mesaIncludePath : %{_mesaIncludePath}"
echo " _mesaLibPath : %{_mesaLibPath}"
echo " _pythonLibPath : %{_pythonLibPath}"
echo ""
set -x
# start with these general settings
addCMakeVariable VTK_USE_TK:BOOL=OFF
addCMakeVariable BUILD_SHARED_LIBS:BOOL=ON VTK_USE_RPATH:BOOL=OFF
addCMakeVariable CMAKE_BUILD_TYPE:STRING=Release
# include development files in "make install"
addCMakeVariable PARAVIEW_INSTALL_DEVELOPMENT_FILES:BOOL=ON
# new alternative to "make HTMLDocumentation"
addCMakeVariable PARAVIEW_GENERATE_PROXY_DOCUMENTATION:BOOL=ON
%ifos darwin
# Additional installation rules for Mac OS X
addCMakeVariable PARAVIEW_EXTRA_INSTALL_RULES_FILE:FILEPATH=%{_topdir}/BUILD/%{name}-%{version}/Applications/ParaView-3.8.1_extra_install_Darwin.cmake
%endif
# Add the value of _qmakePath for QT_QMAKE_EXECUTABLE
addCMakeVariable QT_QMAKE_EXECUTABLE:FILEPATH=%{_qmakePath}
echo "CMAKE_VARIABLES: $CMAKE_VARIABLES"
mkdir -p ./buildObj
cd ./buildObj
cmake \
-DCMAKE_INSTALL_PREFIX:PATH=%{_installPrefix} \
$CMAKE_VARIABLES \
..
[ -z "$WM_NCOMPPROCS" ] && WM_NCOMPPROCS=1
make -j $WM_NCOMPPROCS
%install
# On OpenSUSE, rpmbuild, will choke when detecting unreferenced symlinks
# created during installation.
# Qt version 4.6.3 will generate some unreferenced symlinks when
# ParaView is compiled and installed. By enabling the following
# environment variable, the command brp-symlink will still complain
# about missing link targets, but it won't stop rpmbuild from generating
# the final rpm.
# For all other Unix distros, this is a no-op.
export NO_BRP_STALE_LINK_ERROR=yes
cd buildObj
make install DESTDIR=$RPM_BUILD_ROOT
# Creation of foam-extend specific .csh and .sh files"
echo ""
echo "Generating foam-extend specific .csh and .sh files for the package %{name}-%{version}"
echo ""
#
# Generate package specific .sh file for foam-extend
#
mkdir -p $RPM_BUILD_ROOT/%{_installPrefix}/etc
cat << DOT_SH_EOF > $RPM_BUILD_ROOT/%{_installPrefix}/etc/%{name}-%{version}.sh
# Load %{name}-%{version} libraries and binaries if available
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
export PARAVIEW_DIR=\$WM_THIRD_PARTY_DIR/packages/%{name}-%{version}/platforms/\$WM_OPTIONS
export PARAVIEW_BIN_DIR=\$PARAVIEW_DIR/bin
export PARAVIEW_LIB_DIR=\$PARAVIEW_DIR/lib
export PARAVIEW_INCLUDE_DIR=\$PARAVIEW_DIR/include/paraview-4.2
export PARAVIEW_VERSION=%{version}
# NB: It is important to set the PV_PLUGIN_PATH location to a directory containing only the ParaView plugins.
# Otherwise, paraview will try to automatically autoload each and every dynamic library it can find in the
# specified directory to see if a given library is a paraview plugin.
# In the case of \$FOAM_LIBBIN, with over 80 libraries, this is a total waste of time that will slow down the
# startup of paraview or even make paraview crash on startup.
export PV_PLUGIN_PATH=\$FOAM_LIBBIN/paraview_plugins
[ -d \$PARAVIEW_LIB_DIR/paraview-4.2 ] && _foamAddLib \$PARAVIEW_LIB_DIR/paraview-4.2
# Enable access to the package applications if present
[ -d \$PARAVIEW_BIN_DIR ] && _foamAddPath \$PARAVIEW_BIN_DIR
# Additional binary path if running on Mac OS X
[ -d \$PARAVIEW_BIN_DIR/paraview.app/Contents/MacOS ] && _foamAddPath \$PARAVIEW_BIN_DIR/paraview.app/Contents/MacOS
DOT_SH_EOF
#
# Generate package specific .csh file for foam-extend
#
cat << DOT_CSH_EOF > $RPM_BUILD_ROOT/%{_installPrefix}/etc/%{name}-%{version}.csh
# Load %{name}-%{version} libraries and binaries if available
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
setenv PARAVIEW_DIR \$WM_THIRD_PARTY_DIR/packages/%{name}-%{version}/platforms/\$WM_OPTIONS
setenv PARAVIEW_BIN_DIR \$PARAVIEW_DIR/bin
setenv PARAVIEW_LIB_DIR \$PARAVIEW_DIR/lib
setenv PARAVIEW_INCLUDE_DIR \$PARAVIEW_DIR/include/paraview-4.2
setenv PARAVIEW_VERSION %{version}
# NB: It is important to set the PV_PLUGIN_PATH location to a directory containing only the ParaView plugins.
# Otherwise, paraview will try to automatically autoload each and every dynamic library it can find in the
# specified directory to see if a given library is a paraview plugin.
# In the case of \$FOAM_LIBBIN, with over 80 libraries, this is a total waste of time that will slow down the
# startup of paraview or even make paraview crash on startup.
setenv PV_PLUGIN_PATH \$FOAM_LIBBIN/paraview_plugins
if ( -e \$PARAVIEW_BIN_DIR ) then
_foamAddPath \$PARAVIEW_BIN_DIR
endif
if ( -e \$PARAVIEW_LIB_DIR/paraview-4.2 ) then
_foamAddLib \$PARAVIEW_LIB_DIR/paraview-4.2
endif
# Additional binary path if running on Mac OS X
if ( -e \$PARAVIEW_BIN_DIR/paraview.app/Contents/MacOS ) then
_foamAddPath \$PARAVIEW_BIN_DIR/paraview.app/Contents/MacOS
endif
DOT_CSH_EOF
#finally, generate a .tgz file for systems where using rpm for installing packages
# as a non-root user might be a problem.
(mkdir -p %{_topdir}/TGZS/%{_target_cpu}; cd $RPM_BUILD_ROOT/%{_prefix}; tar -zcvf %{_topdir}/TGZS/%{_target_cpu}/%{name}-%{version}.tgz packages/%{name}-%{version})
%clean
%files
%defattr(-,root,root)
%{_installPrefix}

View file

@ -0,0 +1,201 @@
#------------------------------------------------------------------------------
# ========= |
# \\ / F ield | foam-extend: Open Source CFD
# \\ / O peration |
# \\ / A nd | For copyright notice see file Copyright
# \\/ M anipulation |
#------------------------------------------------------------------------------
# License
# This file is part of foam-extend.
#
# foam-extend is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# foam-extend is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with foam-extend. If not, see <http://www.gnu.org/licenses/>.
#
# Script
# RPM spec file for PyFoam-0.6.4
#
# Description
# RPM spec file for PyFoam version 0.6.4
#
# Author:
# Martin Beaudoin, Hydro-Quebec, (2013)
#
#------------------------------------------------------------------------------
# We grab the value of WM_THIRD_PARTY and WM_OPTIONS from the environment variable
%{expand:%%define _WM_THIRD_PARTY_DIR %(echo $WM_THIRD_PARTY_DIR)}
%{expand:%%define _WM_OPTIONS %(echo $WM_OPTIONS)}
# Disable the generation of debuginfo packages
%define debug_package %{nil}
# The topdir needs to point to the $WM_THIRD_PARTY/rpmbuild directory
%define _topdir %{_WM_THIRD_PARTY_DIR}/rpmBuild
%define _tmppath %{_topdir}/tmp
# Will install the package directly $WM_THIRD_PARTY_DIR
# Some comments about package relocation:
# By using this prefix for the Prefix: parameter in this file, you will make this
# package relocatable.
#
# This is fine, as long as your software is itself relocatable.
#
# Simply take note that libraries built with libtool are not relocatable because the
# prefix we specify will be hard-coded in the library .la files.
# Ref: http://sourceware.org/autobook/autobook/autobook_80.html
#
# In that case, if you ever change the value of the $WM_THIRD_PARTY_DIR, you will
# not be able to reutilize this RPM, even though it is relocatable. You will need to
# regenerate the RPM.
#
%define _prefix %{_WM_THIRD_PARTY_DIR}
%define name PyFoam
%define release 1
%define version 0.6.4
%define buildroot %{_topdir}/BUILD/%{name}-%{version}-root
BuildRoot: %{buildroot}
Summary: PyFoam
License: Unkown
Name: %{name}
Version: %{version}
Release: %{release}
BuildArch: noarch
URL: http://openfoamwiki.net/images/3/3b/PyFoam-0.6.4.tar.gz
Source: %url/%{name}-%{version}.tar.gz
Prefix: %{_prefix}
Group: Development/Tools
%define _installPrefix %{_prefix}/packages/%{name}-%{version}/platforms/noarch
%description
%{summary}
%prep
%setup -q -n PyFoam-%{version}
%build
# Make the shebang line portable
python setup.py build --executable "/usr/bin/env python"
%install
python setup.py install --prefix=$RPM_BUILD_ROOT/%{_installPrefix}
%define pythonVersion $(python -V 2>&1 | awk -F ' ' {'print $2'} | awk -F \. {'print $1 "." $2'})
# Creation of foam-extend specific .csh and .sh files"
echo ""
echo "Generating foam-extend specific .csh and .sh files for the package %{name}-%{version}"
echo ""
#
# Generate package specific .sh file for foam-extend
#
mkdir -p $RPM_BUILD_ROOT/%{_installPrefix}/etc
cat << DOT_SH_EOF > $RPM_BUILD_ROOT/%{_installPrefix}/etc/%{name}-%{version}.sh
# Load %{name}-%{version} libraries and binaries if available
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
export PYFOAM_DIR=\$WM_THIRD_PARTY_DIR/packages/%{name}-%{version}/platforms/noarch
export PYTHONPATH=\$PYFOAM_DIR/lib/python%{pythonVersion}/site-packages:\$PYTHONPATH
# Enable access to the package applications if present
[ -d \$PYFOAM_DIR/bin ] && _foamAddPath \$PYFOAM_DIR/bin
DOT_SH_EOF
#
# Generate package specific .csh file for foam-extend
#
cat << DOT_CSH_EOF > $RPM_BUILD_ROOT/%{_installPrefix}/etc/%{name}-%{version}.csh
# Load %{name}-%{version} libraries and binaries if available
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
setenv PYFOAM_DIR \$WM_THIRD_PARTY_DIR/packages/%{name}-%{version}/platforms/noarch
if ! \$?PYTHONPATH then
setenv PYTHONPATH \$PYFOAM_DIR/lib/python%{pythonVersion}/site-packages
else
setenv PYTHONPATH \$PYFOAM_DIR/lib/python%{pythonVersion}/site-packages:\$PYTHONPATH
endif
if ( -e \$PYFOAM_DIR/bin ) then
_foamAddPath \$PYFOAM_DIR/bin
endif
if \$?PYFOAM_SITE_DIR then
if ( -e \$PYFOAM_SITE_DIR/bin ) then
_foamAddPath \$PYFOAM_SITE_DIR/bin
endif
endif
DOT_CSH_EOF
cat << DOT_HARDCODED_SH_EOF > $RPM_BUILD_ROOT/%{_installPrefix}/etc/%{name}-%{version}_hardcoded.sh
# In this version of the configuration script, the paths are hardcoded,
# which makes it easier to load PyFoam without the foam-extend environment
# variables
#
# Load %{name}-%{version} libraries and binaries if available
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
export PYFOAM_DIR=$WM_THIRD_PARTY_DIR/packages/%{name}-%{version}/platforms/noarch
export PYTHONPATH=\$PYFOAM_DIR/lib/python%{pythonVersion}/site-packages:\$PYTHONPATH
# Enable access to the package applications if present
[ -d \$PYFOAM_DIR/bin ] && export PATH=\$PYFOAM_DIR/bin:\$PATH
DOT_HARDCODED_SH_EOF
#
# Generate package specific .csh file for foam-extend
#
cat << DOT_HARDCODED_CSH_EOF > $RPM_BUILD_ROOT/%{_installPrefix}/etc/%{name}-%{version}_hardcoded.csh
# In this version of the configuration script, the paths are hardcoded,
# which makes it easier to load PyFoam without the foam-extend environment
# variables
#
# Load %{name}-%{version} libraries and binaries if available
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
setenv PYFOAM_DIR $WM_THIRD_PARTY_DIR/packages/%{name}-%{version}/platforms/noarch
if ! \$?PYTHONPATH then
setenv PYTHONPATH \$PYFOAM_DIR/lib/python%{pythonVersion}/site-packages
else
setenv PYTHONPATH \$PYFOAM_DIR/lib/python%{pythonVersion}/site-packages:\$PYTHONPATH
endif
if ( -e \$PYFOAM_DIR/bin ) then
_foamAddPath \$PYFOAM_DIR/bin
endif
if \$?PYFOAM_SITE_DIR then
if ( -e \$PYFOAM_SITE_DIR/bin ) then
_foamAddPath \$PYFOAM_SITE_DIR/bin
endif
endif
DOT_HARDCODED_CSH_EOF
#finally, generate a .tgz file for systems where using rpm for installing packages
# as a non-root user might be a problem.
(mkdir -p %{_topdir}/TGZS/%{_target_cpu}; cd $RPM_BUILD_ROOT/%{_prefix}; tar -zcvf %{_topdir}/TGZS/%{_target_cpu}/%{name}-%{version}.tgz packages/%{name}-%{version})
%clean
rm -rf %{buildroot}
%files
%defattr(-,root,root)
%{_installPrefix}

174
ThirdParty/rpmBuild/SPECS/gcc-4.7.4.spec vendored Normal file
View file

@ -0,0 +1,174 @@
#------------------------------------------------------------------------------
# ========= |
# \\ / F ield | foam-extend: Open Source CFD
# \\ / O peration |
# \\ / A nd | For copyright notice see file Copyright
# \\/ M anipulation |
#------------------------------------------------------------------------------
# License
# This file is part of foam-extend.
#
# foam-extend is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# foam-extend is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with foam-extend. If not, see <http://www.gnu.org/licenses/>.
#
# Script
# RPM spec file for gcc-4.7.4
#
# Description
# RPM spec file for creating a relocatable RPM
#
# Author:
# Hrvoje Jasak, Wikki Ltd. (2015)
#
#------------------------------------------------------------------------------
# We grab the value of WM_THIRD_PARTY and WM_OPTIONS from the environment variable
%{expand:%%define _WM_THIRD_PARTY_DIR %(echo $WM_THIRD_PARTY_DIR)}
%{expand:%%define _WM_OPTIONS %(echo $WM_OPTIONS)}
# Disable the generation of debuginfo packages
%define debug_package %{nil}
# The topdir needs to point to the $WM_THIRD_PARTY/rpmbuild directory
%define _topdir %{_WM_THIRD_PARTY_DIR}/rpmBuild
%define _tmppath %{_topdir}/tmp
# Will install the package directly $WM_THIRD_PARTY_DIR
# Some comments about package relocation:
# By using this prefix for the Prefix: parameter in thi file, you will make this
# package relocatable.
#
# This is fine, as long as your software is itself relocatable.
#
# Simply take note that libraries built with libtool are not relocatable because the
# prefix we specify will be hard-coded in the library .la files.
# Ref: http://sourceware.org/autobook/autobook/autobook_80.html
#
# In that case, if you ever change the value of the $WM_THIRD_PARTY_DIR, you will
# not be able to reutilize this RPM, even though it is relocatable. You will need to
# regenerate the RPM.
#
%define _prefix %{_WM_THIRD_PARTY_DIR}
%define name gcc
%define release %{_WM_OPTIONS}
%define version 4.7.4
%define buildroot %{_topdir}/BUILD/%{name}-%{version}-root
BuildRoot: %{buildroot}
Summary: gcc
License: Unkown
Name: %{name}
Version: %{version}
Release: %{release}
URL: ftp://ftp.gnu.org/gnu/gcc/gcc-4.7.4
Source: %url/%{name}-%{version}.tar.gz
Prefix: %{_prefix}
Group: Development/Tools
%define _installPrefix %{_prefix}/packages/%{name}-%{version}/platforms/%{_WM_OPTIONS}
%description
%{summary}
%prep
%setup -q
%build
# export WM settings in a form that GNU configure recognizes
[ -n "$WM_CC" ] && export CC="$WM_CC"
[ -n "$WM_CXX" ] && export CXX="$WM_CXX"
[ -n "$WM_CFLAGS" ] && export CFLAGS="$WM_CFLAGS"
[ -n "$WM_CXXFLAGS" ] && export CXXFLAGS="$WM_CXXFLAGS"
[ -n "$WM_LDFLAGS" ] && export LDFLAGS="$WM_LDFLAGS"
GMP_VERSION=gmp-5.1.2
MPFR_VERSION=mpfr-3.1.2
MPC_VERSION=mpc-1.0.1
mkdir ./objBuildDir
cd ./objBuildDir
../configure \
--prefix=%{_installPrefix} \
--enable-languages=c,c++ \
--enable-shared \
--disable-multilib \
--with-mpc=$WM_THIRD_PARTY_DIR/packages/$MPC_VERSION/platforms/$WM_OPTIONS \
--with-gmp=$WM_THIRD_PARTY_DIR/packages/$GMP_VERSION/platforms/$WM_OPTIONS \
--with-mpfr=$WM_THIRD_PARTY_DIR/packages/$MPFR_VERSION/platforms/$WM_OPTIONS
[ -z "$WM_NCOMPPROCS" ] && WM_NCOMPPROCS=1
make -j $WM_NCOMPPROCS
%install
cd ./objBuildDir
make install DESTDIR=$RPM_BUILD_ROOT
# Creation of foam-extend specific .csh and .sh files"
echo ""
echo "Generating foam-extend specific .csh and .sh files for the package %{name}-%{version}"
echo ""
#
# Generate package specific .sh file for foam-extend
#
mkdir -p $RPM_BUILD_ROOT/%{_installPrefix}/etc
cat << DOT_SH_EOF > $RPM_BUILD_ROOT/%{_installPrefix}/etc/%{name}-%{version}.sh
# Load %{name}-%{version} libraries and binaries if available
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
export GCC_DIR=\$WM_THIRD_PARTY_DIR/packages/%{name}-%{version}/platforms/\$WM_OPTIONS
[ -d \$GCC_DIR/lib ] && _foamAddLib \$GCC_DIR/lib
[ -d \$GCC_DIR/lib64 ] && _foamAddLib \$GCC_DIR/lib64
# Enable access to the package applications if present
[ -d \$GCC_DIR/bin ] && _foamAddPath \$GCC_DIR/bin
DOT_SH_EOF
#
# Generate package specific .csh file for foam-extend
#
cat << DOT_CSH_EOF > $RPM_BUILD_ROOT/%{_installPrefix}/etc/%{name}-%{version}.csh
# Load %{name}-%{version} libraries and binaries if available
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
setenv GCC_DIR \$WM_THIRD_PARTY_DIR/packages/%{name}-%{version}/platforms/\$WM_OPTIONS
if ( -e \$GCC_DIR/lib ) then
_foamAddLib \$GCC_DIR/lib
endif
if ( -e \$GCC_DIR/lib64 ) then
_foamAddLib \$GCC_DIR/lib64
endif
if ( -e \$GCC_DIR/bin ) then
_foamAddPath \$GCC_DIR/bin
endif
DOT_CSH_EOF
#finally, generate a .tgz file for systems where using rpm for installing packages
# as a non-root user might be a problem.
(mkdir -p %{_topdir}/TGZS/%{_target_cpu}; cd $RPM_BUILD_ROOT/%{_prefix}; tar -zcvf %{_topdir}/TGZS/%{_target_cpu}/%{name}-%{version}.tgz packages/%{name}-%{version})
%clean
rm -rf %{buildroot}
%files
%defattr(-,root,root)
%{_installPrefix}

174
ThirdParty/rpmBuild/SPECS/gcc-4.9.2.spec vendored Normal file
View file

@ -0,0 +1,174 @@
#------------------------------------------------------------------------------
# ========= |
# \\ / F ield | foam-extend: Open Source CFD
# \\ / O peration |
# \\ / A nd | For copyright notice see file Copyright
# \\/ M anipulation |
#------------------------------------------------------------------------------
# License
# This file is part of foam-extend.
#
# foam-extend is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# foam-extend is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with foam-extend. If not, see <http://www.gnu.org/licenses/>.
#
# Script
# RPM spec file for gcc-4.9.2
#
# Description
# RPM spec file for creating a relocatable RPM
#
# Author:
# Hrvoje Jasak, Wikki Ltd. (2015)
#
#------------------------------------------------------------------------------
# We grab the value of WM_THIRD_PARTY and WM_OPTIONS from the environment variable
%{expand:%%define _WM_THIRD_PARTY_DIR %(echo $WM_THIRD_PARTY_DIR)}
%{expand:%%define _WM_OPTIONS %(echo $WM_OPTIONS)}
# Disable the generation of debuginfo packages
%define debug_package %{nil}
# The topdir needs to point to the $WM_THIRD_PARTY/rpmbuild directory
%define _topdir %{_WM_THIRD_PARTY_DIR}/rpmBuild
%define _tmppath %{_topdir}/tmp
# Will install the package directly $WM_THIRD_PARTY_DIR
# Some comments about package relocation:
# By using this prefix for the Prefix: parameter in thi file, you will make this
# package relocatable.
#
# This is fine, as long as your software is itself relocatable.
#
# Simply take note that libraries built with libtool are not relocatable because the
# prefix we specify will be hard-coded in the library .la files.
# Ref: http://sourceware.org/autobook/autobook/autobook_80.html
#
# In that case, if you ever change the value of the $WM_THIRD_PARTY_DIR, you will
# not be able to reutilize this RPM, even though it is relocatable. You will need to
# regenerate the RPM.
#
%define _prefix %{_WM_THIRD_PARTY_DIR}
%define name gcc
%define release %{_WM_OPTIONS}
%define version 4.9.2
%define buildroot %{_topdir}/BUILD/%{name}-%{version}-root
BuildRoot: %{buildroot}
Summary: gcc
License: Unkown
Name: %{name}
Version: %{version}
Release: %{release}
URL: ftp://ftp.gnu.org/gnu/gcc/gcc-4.9.2
Source: %url/%{name}-%{version}.tar.gz
Prefix: %{_prefix}
Group: Development/Tools
%define _installPrefix %{_prefix}/packages/%{name}-%{version}/platforms/%{_WM_OPTIONS}
%description
%{summary}
%prep
%setup -q
%build
# export WM settings in a form that GNU configure recognizes
[ -n "$WM_CC" ] && export CC="$WM_CC"
[ -n "$WM_CXX" ] && export CXX="$WM_CXX"
[ -n "$WM_CFLAGS" ] && export CFLAGS="$WM_CFLAGS"
[ -n "$WM_CXXFLAGS" ] && export CXXFLAGS="$WM_CXXFLAGS"
[ -n "$WM_LDFLAGS" ] && export LDFLAGS="$WM_LDFLAGS"
GMP_VERSION=gmp-5.1.2
MPFR_VERSION=mpfr-3.1.2
MPC_VERSION=mpc-1.0.1
mkdir ./objBuildDir
cd ./objBuildDir
../configure \
--prefix=%{_installPrefix} \
--enable-languages=c,c++ \
--enable-shared \
--disable-multilib \
--with-mpc=$WM_THIRD_PARTY_DIR/packages/$MPC_VERSION/platforms/$WM_OPTIONS \
--with-gmp=$WM_THIRD_PARTY_DIR/packages/$GMP_VERSION/platforms/$WM_OPTIONS \
--with-mpfr=$WM_THIRD_PARTY_DIR/packages/$MPFR_VERSION/platforms/$WM_OPTIONS
[ -z "$WM_NCOMPPROCS" ] && WM_NCOMPPROCS=1
make -j $WM_NCOMPPROCS
%install
cd ./objBuildDir
make install DESTDIR=$RPM_BUILD_ROOT
# Creation of foam-extend specific .csh and .sh files"
echo ""
echo "Generating foam-extend specific .csh and .sh files for the package %{name}-%{version}"
echo ""
#
# Generate package specific .sh file for foam-extend
#
mkdir -p $RPM_BUILD_ROOT/%{_installPrefix}/etc
cat << DOT_SH_EOF > $RPM_BUILD_ROOT/%{_installPrefix}/etc/%{name}-%{version}.sh
# Load %{name}-%{version} libraries and binaries if available
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
export GCC_DIR=\$WM_THIRD_PARTY_DIR/packages/%{name}-%{version}/platforms/\$WM_OPTIONS
[ -d \$GCC_DIR/lib ] && _foamAddLib \$GCC_DIR/lib
[ -d \$GCC_DIR/lib64 ] && _foamAddLib \$GCC_DIR/lib64
# Enable access to the package applications if present
[ -d \$GCC_DIR/bin ] && _foamAddPath \$GCC_DIR/bin
DOT_SH_EOF
#
# Generate package specific .csh file for foam-extend
#
cat << DOT_CSH_EOF > $RPM_BUILD_ROOT/%{_installPrefix}/etc/%{name}-%{version}.csh
# Load %{name}-%{version} libraries and binaries if available
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
setenv GCC_DIR \$WM_THIRD_PARTY_DIR/packages/%{name}-%{version}/platforms/\$WM_OPTIONS
if ( -e \$GCC_DIR/lib ) then
_foamAddLib \$GCC_DIR/lib
endif
if ( -e \$GCC_DIR/lib64 ) then
_foamAddLib \$GCC_DIR/lib64
endif
if ( -e \$GCC_DIR/bin ) then
_foamAddPath \$GCC_DIR/bin
endif
DOT_CSH_EOF
#finally, generate a .tgz file for systems where using rpm for installing packages
# as a non-root user might be a problem.
(mkdir -p %{_topdir}/TGZS/%{_target_cpu}; cd $RPM_BUILD_ROOT/%{_prefix}; tar -zcvf %{_topdir}/TGZS/%{_target_cpu}/%{name}-%{version}.tgz packages/%{name}-%{version})
%clean
rm -rf %{buildroot}
%files
%defattr(-,root,root)
%{_installPrefix}

155
ThirdParty/rpmBuild/SPECS/gmp-4.3.2.spec vendored Normal file
View file

@ -0,0 +1,155 @@
#------------------------------------------------------------------------------
# ========= |
# \\ / F ield | foam-extend: Open Source CFD
# \\ / O peration |
# \\ / A nd | For copyright notice see file Copyright
# \\/ M anipulation |
#------------------------------------------------------------------------------
# License
# This file is part of foam-extend.
#
# foam-extend is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# foam-extend is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with foam-extend. If not, see <http://www.gnu.org/licenses/>.
#
# Script
# RPM spec file for gmp-4.3.2
#
# Description
# RPM spec file for creating a relocatable RPM
#
# Author:
# Martin Beaudoin, Hydro-Quebec, (2010)
#
#------------------------------------------------------------------------------
# We grab the value of WM_THIRD_PARTY and WM_OPTIONS from the environment variable
%{expand:%%define _WM_THIRD_PARTY_DIR %(echo $WM_THIRD_PARTY_DIR)}
%{expand:%%define _WM_OPTIONS %(echo $WM_OPTIONS)}
# Disable the generation of debuginfo packages
%define debug_package %{nil}
# The topdir needs to point to the $WM_THIRD_PARTY/rpmbuild directory
%define _topdir %{_WM_THIRD_PARTY_DIR}/rpmBuild
%define _tmppath %{_topdir}/tmp
# Will install the package directly $WM_THIRD_PARTY_DIR
# Some comments about package relocation:
# By using this prefix for the Prefix: parameter in thi file, you will make this
# package relocatable.
#
# This is fine, as long as your software is itself relocatable.
#
# Simply take note that libraries built with libtool are not relocatable because the
# prefix we specify will be hard-coded in the library .la files.
# Ref: http://sourceware.org/autobook/autobook/autobook_80.html
#
# In that case, if you ever change the value of the $WM_THIRD_PARTY_DIR, you will
# not be able to reutilize this RPM, even though it is relocatable. You will need to
# regenerate the RPM.
#
%define _prefix %{_WM_THIRD_PARTY_DIR}
%define name gmp
%define release %{_WM_OPTIONS}
%define version 4.3.2
%define buildroot %{_topdir}/BUILD/%{name}-%{version}-root
BuildRoot: %{buildroot}
Summary: gmp
License: Unkown
Name: %{name}
Version: %{version}
Release: %{release}
URL: ftp://ftp.gnu.org/gnu/gmp
Source: %url/%{name}-%{version}.tar.bz2
Prefix: %{_prefix}
Group: Development/Tools
%define _installPrefix %{_prefix}/packages/%{name}-%{version}/platforms/%{_WM_OPTIONS}
%description
%{summary}
%prep
%setup -q
%build
# export WM settings in a form that GNU configure recognizes
[ -n "$WM_CC" ] && export CC="$WM_CC"
[ -n "$WM_CXX" ] && export CXX="$WM_CXX"
[ -n "$WM_CFLAGS" ] && export CFLAGS="$WM_CFLAGS"
[ -n "$WM_CXXFLAGS" ] && export CXXFLAGS="$WM_CXXFLAGS"
[ -n "$WM_LDFLAGS" ] && export LDFLAGS="$WM_LDFLAGS"
./configure --prefix=%{_installPrefix}
[ -z "$WM_NCOMPPROCS" ] && WM_NCOMPPROCS=1
make -j $WM_NCOMPPROCS
%install
make install DESTDIR=$RPM_BUILD_ROOT
# Creation of foam-extend specific .csh and .sh files"
echo ""
echo "Generating foam-extend specific .csh and .sh files for the package %{name}-%{version}"
echo ""
#
# Generate package specific .sh file for foam-extend
#
mkdir -p $RPM_BUILD_ROOT/%{_installPrefix}/etc
cat << DOT_SH_EOF > $RPM_BUILD_ROOT/%{_installPrefix}/etc/%{name}-%{version}.sh
# Load %{name}-%{version} libraries and binaries if available
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
export GMP_DIR=\$WM_THIRD_PARTY_DIR/packages/%{name}-%{version}/platforms/\$WM_OPTIONS
[ -d \$GMP_DIR/lib ] && _foamAddLib \$GMP_DIR/lib
# Enable access to the package applications if present
[ -d \$GMP_DIR/bin ] && _foamAddPath \$GMP_DIR/bin
DOT_SH_EOF
#
# Generate package specific .csh file for foam-extend
#
cat << DOT_CSH_EOF > $RPM_BUILD_ROOT/%{_installPrefix}/etc/%{name}-%{version}.csh
# Load %{name}-%{version} libraries and binaries if available
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
setenv GMP_DIR \$WM_THIRD_PARTY_DIR/packages/%{name}-%{version}/platforms/\$WM_OPTIONS
if ( -e \$GMP_DIR/lib ) then
_foamAddLib \$GMP_DIR/lib
endif
if ( -e \$GMP_DIR/bin ) then
_foamAddPath \$GMP_DIR/bin
endif
DOT_CSH_EOF
#finally, generate a .tgz file for systems where using rpm for installing packages
# as a non-root user might be a problem.
(mkdir -p %{_topdir}/TGZS/%{_target_cpu}; cd $RPM_BUILD_ROOT/%{_prefix}; tar -zcvf %{_topdir}/TGZS/%{_target_cpu}/%{name}-%{version}.tgz packages/%{name}-%{version})
%clean
rm -rf %{buildroot}
%files
%defattr(-,root,root)
%{_installPrefix}/etc
%{_installPrefix}/include
%{_installPrefix}/lib
%{_installPrefix}/share

View file

@ -0,0 +1,157 @@
#------------------------------------------------------------------------------
# ========= |
# \\ / F ield | foam-extend: Open Source CFD
# \\ / O peration |
# \\ / A nd | For copyright notice see file Copyright
# \\/ M anipulation |
#------------------------------------------------------------------------------
# License
# This file is part of foam-extend.
#
# foam-extend is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# foam-extend is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with foam-extend. If not, see <http://www.gnu.org/licenses/>.
#
# Script
# RPM spec file for mpfr-2.4.2
#
# Description
# RPM spec file for creating a relocatable RPM
#
# Author:
# Martin Beaudoin, Hydro-Quebec, (2010)
#
#------------------------------------------------------------------------------
# We grab the value of WM_THIRD_PARTY and WM_OPTIONS from the environment variable
%{expand:%%define _WM_THIRD_PARTY_DIR %(echo $WM_THIRD_PARTY_DIR)}
%{expand:%%define _WM_OPTIONS %(echo $WM_OPTIONS)}
# Disable the generation of debuginfo packages
%define debug_package %{nil}
# The topdir needs to point to the $WM_THIRD_PARTY/rpmbuild directory
%define _topdir %{_WM_THIRD_PARTY_DIR}/rpmBuild
%define _tmppath %{_topdir}/tmp
# Will install the package directly $WM_THIRD_PARTY_DIR
# Some comments about package relocation:
# By using this prefix for the Prefix: parameter in thi file, you will make this
# package relocatable.
#
# This is fine, as long as your software is itself relocatable.
#
# Simply take note that libraries built with libtool are not relocatable because the
# prefix we specify will be hard-coded in the library .la files.
# Ref: http://sourceware.org/autobook/autobook/autobook_80.html
#
# In that case, if you ever change the value of the $WM_THIRD_PARTY_DIR, you will
# not be able to reutilize this RPM, even though it is relocatable. You will need to
# regenerate the RPM.
#
%define _prefix %{_WM_THIRD_PARTY_DIR}
%define name mpfr
%define release %{_WM_OPTIONS}
%define version 2.4.2
%define buildroot %{_topdir}/BUILD/%{name}-%{version}-root
BuildRoot: %{buildroot}
Summary: mpfr
License: Unkown
Name: %{name}
Version: %{version}
Release: %{release}
URL: http://www.mpfr.org/mpfr-current
Source: %url/%{name}-%{version}.tar.gz
Prefix: %{_prefix}
Group: Development/Tools
%define _installPrefix %{_prefix}/packages/%{name}-%{version}/platforms/%{_WM_OPTIONS}
%description
%{summary}
%prep
%setup -q
%build
# export WM settings in a form that GNU configure recognizes
[ -n "$WM_CC" ] && export CC="$WM_CC"
[ -n "$WM_CXX" ] && export CXX="$WM_CXX"
[ -n "$WM_CFLAGS" ] && export CFLAGS="$WM_CFLAGS"
[ -n "$WM_CXXFLAGS" ] && export CXXFLAGS="$WM_CXXFLAGS"
[ -n "$WM_LDFLAGS" ] && export LDFLAGS="$WM_LDFLAGS"
GMP_VERSION=gmp-5.1.2
./configure \
--prefix=%{_installPrefix} \
--with-gmp=$WM_THIRD_PARTY_DIR/packages/$GMP_VERSION/platforms/$WM_OPTIONS
[ -z "$WM_NCOMPPROCS" ] && WM_NCOMPPROCS=1
make -j $WM_NCOMPPROCS
%install
make install DESTDIR=$RPM_BUILD_ROOT
# Creation of foam-extend specific .csh and .sh files"
echo ""
echo "Generating foam-extend specific .csh and .sh files for the package %{name}-%{version}"
echo ""
#
# Generate package specific .sh file for foam-extend
#
mkdir -p $RPM_BUILD_ROOT/%{_installPrefix}/etc
cat << DOT_SH_EOF > $RPM_BUILD_ROOT/%{_installPrefix}/etc/%{name}-%{version}.sh
# Load %{name}-%{version} libraries and binaries if available
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
export MPFR_DIR=\$WM_THIRD_PARTY_DIR/packages/%{name}-%{version}/platforms/\$WM_OPTIONS
[ -d \$MPFR_DIR/lib ] && _foamAddLib \$MPFR_DIR/lib
# Enable access to the package applications if present
[ -d \$MPFR_DIR/bin ] && _foamAddPath \$MPFR_DIR/bin
DOT_SH_EOF
#
# Generate package specific .csh file for foam-extend
#
cat << DOT_CSH_EOF > $RPM_BUILD_ROOT/%{_installPrefix}/etc/%{name}-%{version}.csh
# Load %{name}-%{version} libraries and binaries if available
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
setenv MPFR_DIR \$WM_THIRD_PARTY_DIR/packages/%{name}-%{version}/platforms/\$WM_OPTIONS
if ( -e \$MPFR_DIR/lib ) then
_foamAddLib \$MPFR_DIR/lib
endif
if ( -e \$MPFR_DIR/bin ) then
_foamAddPath \$MPFR_DIR/bin
endif
DOT_CSH_EOF
#finally, generate a .tgz file for systems where using rpm for installing packages
# as a non-root user might be a problem.
(mkdir -p %{_topdir}/TGZS/%{_target_cpu}; cd $RPM_BUILD_ROOT/%{_prefix}; tar -zcvf %{_topdir}/TGZS/%{_target_cpu}/%{name}-%{version}.tgz packages/%{name}-%{version})
%clean
rm -rf %{buildroot}
%files
%defattr(-,root,root)
%{_installPrefix}

View file

@ -0,0 +1,183 @@
#------------------------------------------------------------------------------
# ========= |
# \\ / F ield | foam-extend: Open Source CFD
# \\ / O peration |
# \\ / A nd | For copyright notice see file Copyright
# \\/ M anipulation |
#------------------------------------------------------------------------------
# License
# This file is part of foam-extend.
#
# foam-extend is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# foam-extend is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with foam-extend. If not, see <http://www.gnu.org/licenses/>.
#
# Script
# RPM spec file for qt-everywhere-opensource-src-4.8.6
#
# Description
# RPM spec file for creating a relocatable RPM
#
# Author:
# Martin Beaudoin, Hydro-Quebec, (2014)
#
#------------------------------------------------------------------------------
# We grab the value of WM_THIRD_PARTY and WM_OPTIONS from the environment variable
%{expand:%%define _WM_THIRD_PARTY_DIR %(echo $WM_THIRD_PARTY_DIR)}
%{expand:%%define _WM_OPTIONS %(echo $WM_OPTIONS)}
# Disable the generation of debuginfo packages
%define debug_package %{nil}
# The topdir needs to point to the $WM_THIRD_PARTY/rpmbuild directory
%define _topdir %{_WM_THIRD_PARTY_DIR}/rpmBuild
%define _tmppath %{_topdir}/tmp
# Will install the package directly $WM_THIRD_PARTY_DIR
# Some comments about package relocation:
# By using this prefix for the Prefix: parameter in this file, you will make this
# package relocatable.
#
# This is fine, as long as your software is itself relocatable.
#
# Simply take note that libraries built with libtool are not relocatable because the
# prefix we specify will be hard-coded in the library .la files.
# Ref: http://sourceware.org/autobook/autobook/autobook_80.html
#
# In that case, if you ever change the value of the $WM_THIRD_PARTY_DIR, you will
# not be able to reutilize this RPM, even though it is relocatable. You will need to
# regenerate the RPM.
#
%define _prefix %{_WM_THIRD_PARTY_DIR}
%define name qt-everywhere-opensource-src
%define release %{_WM_OPTIONS}
%define version 4.8.6
%define buildroot %{_topdir}/BUILD/%{name}-%{version}-root
%define _unpackaged_files_terminate_build 0
%define _missing_doc_files_terminate_build 0
BuildRoot: %{buildroot}
Summary: qt-everywhere-opensource-src
License: Unkown
Name: %{name}
Version: %{version}
Release: %{release}
URL: http://download.qt-project.org/official_releases/qt/4.8/4.8.6
Source: %url/%{name}-%{version}.tar.gz
Prefix: %{_prefix}
Group: Development/Tools
%define _installPrefix %{_prefix}/packages/%{name}-%{version}/platforms/%{_WM_OPTIONS}
%description
%{summary}
%prep
%setup -q
%build
# export WM settings in a form that GNU configure recognizes
[ -n "$WM_CC" ] && export CC="$WM_CC"
[ -n "$WM_CXX" ] && export CXX="$WM_CXX"
[ -n "$WM_CFLAGS" ] && export CFLAGS="$WM_CFLAGS"
[ -n "$WM_CXXFLAGS" ] && export CXXFLAGS="$WM_CXXFLAGS"
[ -n "$WM_LDFLAGS" ] && export LDFLAGS="$WM_LDFLAGS"
%ifos darwin
# Under Mac OS X, Qt 4.8 will only compile using the stock native g++ compiler supplied with Xcode.
# If your native Mac OS X g++ compiler is not located under /usr/bin, simply adjust the redefinition of the following variables accordingly.
# This is ugly, I know...
export CC=/usr/bin/gcc
export CXX=/usr/bin/g++
export PATH=/usr/bin:$PATH
%endif
./configure \
-opensource --confirm-license=yes \
-release -shared \
-nomake examples -nomake demos \
--prefix=%{_installPrefix}
# Explicitely specify LD_LIBRARY_PATH so it can find QT own libraries
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:%{_builddir}/%{name}-%{version}/lib
[ -z "$WM_NCOMPPROCS" ] && WM_NCOMPPROCS=1
make -j $WM_NCOMPPROCS
%install
# Makefiles generated by qmake do not to support the DESTDIR= option
# We need to use the INSTALL_ROOT= option instead
# Somehow, we need to clean up for the RPM to install properly
rm -rf %{_installPrefix}
make install INSTALL_ROOT=$RPM_BUILD_ROOT
# Creation of foam-extend specific .csh and .sh files"
echo ""
echo "Generating foam-extend specific .csh and .sh files for the package %{name}-%{version}"
echo ""
#
# Generate package specific .sh file for foam-extend
#
mkdir -p $RPM_BUILD_ROOT/%{_installPrefix}/etc
cat << DOT_SH_EOF > $RPM_BUILD_ROOT/%{_installPrefix}/etc/%{name}-%{version}.sh
# Load %{name}-%{version} libraries and binaries if available
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
export QT_DIR=\$WM_THIRD_PARTY_DIR/packages/%{name}-%{version}/platforms/\$WM_OPTIONS
export QT_BIN_DIR=\$QT_DIR/bin
export QT_LIB_DIR=\$QT_DIR/lib
# Enable access to the runtime package applications
[ -d \$QT_BIN_DIR ] && _foamAddPath \$QT_BIN_DIR
[ -d \$QT_LIB_DIR ] && _foamAddLib \$QT_LIB_DIR
DOT_SH_EOF
#
# Generate package specific .csh file for foam-extend
#
cat << DOT_CSH_EOF > $RPM_BUILD_ROOT/%{_installPrefix}/etc/%{name}-%{version}.csh
# Load %{name}-%{version} libraries and binaries if available
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
setenv QT_DIR \$WM_THIRD_PARTY_DIR/packages/%{name}-%{version}/platforms/\$WM_OPTIONS
setenv QT_BIN_DIR \$QT_DIR/bin
setenv QT_LIB_DIR \$QT_DIR/lib
if ( -e \$QT_BIN_DIR ) then
_foamAddPath \$QT_BIN_DIR
endif
if ( -e \$QT_LIB_DIR ) then
_foamAddLib \$QT_LIB_DIR
endif
DOT_CSH_EOF
#finally, generate a .tgz file for systems where using rpm for installing packages
# as a non-root user might be a problem.
(mkdir -p %{_topdir}/TGZS/%{_target_cpu}; cd $RPM_BUILD_ROOT/%{_prefix}; tar -zcvf %{_topdir}/TGZS/%{_target_cpu}/%{name}-%{version}.tgz packages/%{name}-%{version})
%clean
rm -rf %{buildroot}
%files
%defattr(-,root,root)
%{_installPrefix}

View file

@ -57,8 +57,6 @@ int main(int argc, char *argv[])
# include "readPISOControls.H"
# include "checkTotalVolume.H"
runTime++;
Info<< "Time = " << runTime.timeName() << nl << endl;
bool meshChanged = mesh.update();
@ -72,6 +70,7 @@ int main(int argc, char *argv[])
}
# include "volContinuity.H"
# include "meshCourantNo.H"
// Solve potential flow equations
adjustPhi(phi, U, p);
@ -129,6 +128,65 @@ int main(int argc, char *argv[])
<< " min: " << gMin(magU.internalField()) << endl;
}
if (args.optionFound("writep"))
{
// Find reference patch
label refPatch = -1;
scalar maxMagU = 0;
// Go through all velocity patches and find the one that fixes
// velocity to the largest value
forAll (U.boundaryField(), patchI)
{
const fvPatchVectorField& Upatch = U.boundaryField()[patchI];
if (Upatch.fixesValue())
{
// Calculate mean velocity
scalar u = sum(mag(Upatch));
label patchSize = Upatch.size();
reduce(u, sumOp<scalar>());
reduce(patchSize, sumOp<label>());
if (patchSize > 0)
{
scalar curMag = u/patchSize;
if (curMag > maxMagU)
{
refPatch = patchI;
maxMagU = curMag;
}
}
}
}
if (refPatch > -1)
{
// Calculate reference pressure
const fvPatchVectorField& Upatch = U.boundaryField()[refPatch];
const fvPatchScalarField& pPatch = p.boundaryField()[refPatch];
scalar patchE = sum(mag(pPatch + 0.5*magSqr(Upatch)));
label patchSize = Upatch.size();
reduce(patchE, sumOp<scalar>());
reduce(patchSize, sumOp<label>());
scalar e = patchE/patchSize;
Info<< "Using reference patch " << refPatch
<< " with mag(U) = " << maxMagU
<< " p + 0.5*U^2 = " << e << endl;
p.internalField() = e - 0.5*magSqr(U.internalField());
p.correctBoundaryConditions();
}
}
runTime.write();
Info<< "ExecutionTime = " << runTime.elapsedCpuTime() << " s"

View file

@ -0,0 +1,3 @@
sonicDyMFoam.C
EXE = $(FOAM_APPBIN)/sonicDyMFoam

View file

@ -0,0 +1,21 @@
EXE_INC = \
-I$(LIB_SRC)/finiteVolume/lnInclude \
-I$(LIB_SRC)/thermophysicalModels/basic/lnInclude \
-I$(LIB_SRC)/thermophysicalModels/specie/lnInclude \
-I$(LIB_SRC)/turbulenceModels/compressible/turbulenceModel \
-I$(LIB_SRC)/dynamicMesh/dynamicFvMesh/lnInclude \
-I$(LIB_SRC)/dynamicMesh/dynamicMesh/lnInclude \
-I$(LIB_SRC)/meshTools/lnInclude
EXE_LIBS = \
-lbasicThermophysicalModels \
-lspecie \
-lcompressibleTurbulenceModel \
-lcompressibleRASModels \
-lcompressibleLESModels \
-lfiniteVolume \
-ldynamicFvMesh \
-ltopoChangerFvMesh \
-ldynamicMesh \
-lmeshTools \
-llduSolvers

View file

@ -0,0 +1,24 @@
fvVectorMatrix UEqn
(
fvm::ddt(rho, U)
+ fvm::div(phi, U)
+ turbulence->divDevRhoReff(U)
);
if (oCorr == nOuterCorr - 1)
{
if (mesh.solutionDict().relax("UFinal"))
{
UEqn.relax(mesh.solutionDict().relaxationFactor("UFinal"));
}
else
{
UEqn.relax(1);
}
}
else
{
UEqn.relax();
}
solve(UEqn == -fvc::grad(p));

View file

@ -0,0 +1,65 @@
Info<< "Reading thermophysical properties\n" << endl;
autoPtr<basicPsiThermo> pThermo
(
basicPsiThermo::New(mesh)
);
basicPsiThermo& thermo = pThermo();
volScalarField& T = const_cast<volScalarField&>(thermo.T());
volScalarField& p = thermo.p();
volScalarField& e = thermo.e();
const volScalarField& psi = thermo.psi();
volScalarField rho
(
IOobject
(
"rho",
runTime.timeName(),
mesh
),
thermo.rho()
);
Info<< "Reading field U\n" << endl;
volVectorField U
(
IOobject
(
"U",
runTime.timeName(),
mesh,
IOobject::MUST_READ,
IOobject::AUTO_WRITE
),
mesh
);
# include "compressibleCreatePhi.H"
// Store energy source term for under-relaxation
volScalarField pDivU
(
IOobject
(
"pDivU",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::NO_WRITE
),
p*fvc::div(phi/fvc::interpolate(rho))
);
Info<< "Creating turbulence model\n" << endl;
autoPtr<compressible::turbulenceModel> turbulence
(
compressible::turbulenceModel::New
(
rho,
U,
phi,
thermo
)
);

View file

@ -0,0 +1,43 @@
{
fvScalarMatrix eEqn
(
fvm::ddt(rho, e)
+ fvm::div(phi, e)
- fvm::laplacian(turbulence->alphaEff(), e)
==
- fvm::SuSp(pDivU/e, e)
// viscous heating?
);
if (oCorr == nOuterCorr - 1)
{
if (mesh.solutionDict().relax("eFinal"))
{
eEqn.relax(mesh.solutionDict().relaxationFactor("eFinal"));
}
else
{
eEqn.relax(1);
}
}
else
{
eEqn.relax();
}
eEqn.solve();
// Bound the energy using TMin and TMax
{
dimensionedScalar Tstd("Tstd", dimTemperature, specie::Tstd());
volScalarField Cv = thermo.Cv();
volScalarField R = thermo.Cp() - Cv;
e = Foam::min(e, TMax*Cv + R*Tstd);
e = Foam::max(e, TMin*Cv + R*Tstd);
e.correctBoundaryConditions();
}
thermo.correct();
}

View file

@ -0,0 +1,17 @@
{
// Bound the velocity
volScalarField magU = mag(U);
if (max(magU) > UMax)
{
volScalarField Ulimiter =
pos(magU - UMax)*UMax/(magU + smallU)
+ neg(magU - UMax);
Ulimiter.max(scalar(0));
Ulimiter.min(scalar(1));
U *= Ulimiter;
U.correctBoundaryConditions();
}
}

View file

@ -0,0 +1,69 @@
{
U = UEqn.H()/UEqn.A();
# include "limitU.H"
surfaceScalarField phid
(
"phid",
fvc::interpolate(psi)*
(
(fvc::interpolate(U) & mesh.Sf())
- fvc::meshPhi(rho, U)
)
);
for (int nonOrth=0; nonOrth<=nNonOrthCorr; nonOrth++)
{
// Store pressure for under-relaxation
p.storePrevIter();
fvScalarMatrix pEqn
(
fvm::ddt(psi, p)
+ fvm::div(phid, p)
- fvm::laplacian(rho/UEqn.A(), p)
);
if
(
// oCorr == nOuterCorr - 1
corr == nCorr - 1
&& nonOrth == nNonOrthCorr
)
{
pEqn.solve
(
mesh.solutionDict().solver(p.name() + "Final")
);
}
else
{
pEqn.solve(mesh.solutionDict().solver(p.name()));
}
if (nonOrth == nNonOrthCorr)
{
phi = pEqn.flux();
}
// Bound the pressure
if (min(p) < pMin || max(p) > pMax)
{
p.max(pMin);
p.min(pMax);
p.correctBoundaryConditions();
}
// Relax the pressure
p.relax();
}
# include "compressibleContinuityErrs.H"
U -= fvc::grad(p)/UEqn.A();
U.correctBoundaryConditions();
# include "limitU.H"
}

View file

@ -0,0 +1,14 @@
# include "readTimeControls.H"
# include "readPIMPLEControls.H"
bool correctPhi = false;
if (pimple.found("correctPhi"))
{
correctPhi = Switch(pimple.lookup("correctPhi"));
}
bool checkMeshCourantNo = false;
if (pimple.found("checkMeshCourantNo"))
{
checkMeshCourantNo = Switch(pimple.lookup("checkMeshCourantNo"));
}

View file

@ -0,0 +1,20 @@
// Read field bounds
dictionary fieldBounds = mesh.solutionDict().subDict("fieldBounds");
// Pressure bounds
dimensionedScalar pMin("pMin", dimPressure, 0);
dimensionedScalar pMax("pMax", dimPressure, GREAT);
fieldBounds.lookup("p") >> pMin.value() >> pMax.value();
// Temperature bounds
dimensionedScalar TMin("TMin", dimTemperature, 0);
dimensionedScalar TMax("TMax", dimTemperature, GREAT);
fieldBounds.lookup("T") >> TMin.value() >> TMax.value();
// Velocity bound
dimensionedScalar UMax("UMax", dimVelocity, GREAT);
fieldBounds.lookup(U.name()) >> UMax.value();
dimensionedScalar smallU("smallU", dimVelocity, 1e-10);

View file

@ -22,27 +22,36 @@ License
along with foam-extend. If not, see <http://www.gnu.org/licenses/>.
Application
sonicFoamAutoMotion
sonicDyMFoam
Description
Transient solver for trans-sonic/supersonic, laminar flow of a
compressible gas with mesh motion..
Transient solver for trans-sonic/supersonic for laminar or flow of a
compressible gas with support for mesh motion and topological changes
Uses the flexible PIMPLE (PISO-SIMPLE) solution for time-resolved and
pseudo-transient simulations.
Updated from sonicFoamAutoMotion by Hrvoje Jasak
Author
Hrvoje Jasak, Wikki Ltd. All rights reserved.
\*---------------------------------------------------------------------------*/
#include "fvCFD.H"
#include "motionSolver.H"
#include "dynamicFvMesh.H"
#include "specie.H"
#include "basicPsiThermo.H"
#include "turbulenceModel.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
int main(int argc, char *argv[])
{
# include "setRootCase.H"
# include "createTime.H"
# include "createMesh.H"
# include "readThermodynamicProperties.H"
# include "readTransportProperties.H"
# include "createDynamicFvMesh.H"
# include "createFields.H"
# include "initContinuityErrs.H"
@ -50,77 +59,77 @@ int main(int argc, char *argv[])
Info<< "\nStarting time loop\n" << endl;
autoPtr<Foam::motionSolver> motionPtr = motionSolver::New(mesh);
for (runTime++; !runTime.end(); runTime++)
while (runTime.run())
{
# include "readControls.H"
# include "readFieldBounds.H"
# include "compressibleCourantNo.H"
# include "setDeltaT.H"
runTime++;
Info<< "Time = " << runTime.timeName() << nl << endl;
# include "readPISOControls.H"
# include "compressibleCourantNo.H"
bool meshChanged = mesh.update();
mesh.movePoints(motionPtr->newPoints());
# include "volContinuity.H"
# include "rhoEqn.H"
fvVectorMatrix UEqn
(
fvm::ddt(rho, U)
+ fvm::div(phi, U)
- fvm::laplacian(mu, U)
);
solve(UEqn == -fvc::grad(p));
solve
(
fvm::ddt(rho, e)
+ fvm::div(phi, e)
- fvm::laplacian(mu, e)
==
- p*fvc::div(phi/fvc::interpolate(rho) + fvc::meshPhi(rho, U))
+ mu*magSqr(symm(fvc::grad(U)))
);
T = e/Cv;
psi = 1.0/(R*T);
// --- PISO loop
for (int corr = 0; corr < nCorr; corr++)
if (checkMeshCourantNo)
{
U = UEqn.H()/UEqn.A();
surfaceScalarField phid
(
"phid",
fvc::interpolate(psi)*
(
(fvc::interpolate(U) & mesh.Sf()) - fvc::meshPhi(rho, U)
)
);
for (int nonOrth=0; nonOrth<=nNonOrthCorr; nonOrth++)
{
fvScalarMatrix pEqn
(
fvm::ddt(psi, p)
+ fvm::div(phid, p)
- fvm::laplacian(rho/UEqn.A(), p)
);
pEqn.solve();
phi = pEqn.flux();
}
# include "compressibleContinuityErrs.H"
U -= fvc::grad(p)/UEqn.A();
U.correctBoundaryConditions();
# include "meshCourantNo.H"
}
rho = psi*p;
// Mesh motion update
if (meshChanged)
{
T.correctBoundaryConditions();
p.correctBoundaryConditions();
e.correctBoundaryConditions();
thermo.correct();
rho = thermo.rho();
if (correctPhi)
{
// # include "correctPhi.H"
}
}
if (meshChanged)
{
# include "compressibleCourantNo.H"
}
// --- PIMPLE loop
label oCorr = 0;
do
{
// Under-relax pDivU term
pDivU.storePrevIter();
pDivU =
p*fvc::div
(
phi/fvc::interpolate(rho)
+ fvc::meshPhi(rho, U)
);
pDivU.relax();
# include "rhoEqn.H"
# include "eEqn.H"
# include "UEqn.H"
// --- PISO loop
for (int corr = 0; corr < nCorr; corr++)
{
# include "pEqn.H"
}
// Recalculate density
rho = thermo.rho();
turbulence->correct();
} while (++oCorr < nOuterCorr);
runTime.write();

View file

@ -1,8 +1,8 @@
fvVectorMatrix UEqn
(
fvm::ddt(rho, U)
+ fvm::div(phi, U)
+ turbulence->divDevRhoReff(U)
+ fvm::div(phi, U)
+ turbulence->divDevRhoReff(U)
);
solve(UEqn == -fvc::grad(p));

View file

@ -1,3 +0,0 @@
sonicFoamAutoMotion.C
EXE = $(FOAM_APPBIN)/sonicFoamAutoMotion

View file

@ -1,10 +0,0 @@
EXE_INC = \
-I$(LIB_SRC)/finiteVolume/lnInclude \
-I$(LIB_SRC)/dynamicMesh/dynamicMesh/lnInclude \
-I$(LIB_SRC)/meshTools/lnInclude
EXE_LIBS = \
-lfiniteVolume \
-ldynamicMesh \
-lmeshTools \
-llduSolvers

View file

@ -1,103 +0,0 @@
Info<< "Reading field p\n" << endl;
volScalarField p
(
IOobject
(
"p",
runTime.timeName(),
mesh,
IOobject::MUST_READ,
IOobject::AUTO_WRITE
),
mesh
);
Info<< "Reading field T\n" << endl;
volScalarField T
(
IOobject
(
"T",
runTime.timeName(),
mesh,
IOobject::MUST_READ,
IOobject::AUTO_WRITE
),
mesh
);
Info<< "Calculating field e from T\n" << endl;
volScalarField e
(
IOobject
(
"e",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::NO_WRITE
),
Cv*T,
T.boundaryField().types()
);
Info<< "Reading field U\n" << endl;
volVectorField U
(
IOobject
(
"U",
runTime.timeName(),
mesh,
IOobject::MUST_READ,
IOobject::AUTO_WRITE
),
mesh
);
volScalarField psi
(
IOobject
(
"psi",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::NO_WRITE
),
1.0/(R*T)
);
psi.oldTime();
volScalarField rho
(
IOobject
(
"rho",
runTime.timeName(),
mesh
),
psi*p
);
# include "compressibleCreatePhi.H"
Info<< "Creating field phid\n" << endl;
surfaceScalarField phid
(
IOobject
(
"phid",
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::NO_WRITE
),
phi/fvc::interpolate(p),
phi.boundaryField().types()
);

View file

@ -1,23 +0,0 @@
Info<< "Reading thermodynamicProperties\n" << endl;
IOdictionary thermodynamicProperties
(
IOobject
(
"thermodynamicProperties",
runTime.constant(),
mesh,
IOobject::MUST_READ,
IOobject::NO_WRITE
)
);
dimensionedScalar R
(
thermodynamicProperties.lookup("R")
);
dimensionedScalar Cv
(
thermodynamicProperties.lookup("Cv")
);

View file

@ -2,19 +2,19 @@
dictionary fieldBounds = mesh.solutionDict().subDict("fieldBounds");
// Pressure bounds
dimensionedScalar pMin("pMin", p.dimensions(), 0);
dimensionedScalar pMax("pMax", p.dimensions(), 0);
dimensionedScalar pMin("pMin", dimPressure, 0);
dimensionedScalar pMax("pMax", dimPressure, GREAT);
fieldBounds.lookup(p.name()) >> pMin.value() >> pMax.value();
fieldBounds.lookup("p") >> pMin.value() >> pMax.value();
// Temperature bounds
dimensionedScalar TMin("TMin", T.dimensions(), 0);
dimensionedScalar TMax("TMax", T.dimensions(), 0);
dimensionedScalar TMin("TMin", dimTemperature, 0);
dimensionedScalar TMax("TMax", dimTemperature, GREAT);
fieldBounds.lookup(T.name()) >> TMin.value() >> TMax.value();
fieldBounds.lookup("T") >> TMin.value() >> TMax.value();
// Velocity bound
dimensionedScalar UMax("UMax", U.dimensions(), 0);
dimensionedScalar UMax("UMax", dimVelocity, GREAT);
fieldBounds.lookup(U.name()) >> UMax.value();
dimensionedScalar smallU("smallU", dimVelocity, 1e-10);

View file

@ -2,19 +2,19 @@
dictionary fieldBounds = mesh.solutionDict().subDict("fieldBounds");
// Pressure bounds
dimensionedScalar pMin("pMin", p.dimensions(), 0);
dimensionedScalar pMax("pMax", p.dimensions(), 0);
dimensionedScalar pMin("pMin", dimPressure, 0);
dimensionedScalar pMax("pMax", dimPressure, GREAT);
fieldBounds.lookup(p.name()) >> pMin.value() >> pMax.value();
fieldBounds.lookup("p") >> pMin.value() >> pMax.value();
// Temperature bounds
dimensionedScalar TMin("TMin", T.dimensions(), 0);
dimensionedScalar TMax("TMax", T.dimensions(), 0);
dimensionedScalar TMin("TMin", dimTemperature, 0);
dimensionedScalar TMax("TMax", dimTemperature, GREAT);
fieldBounds.lookup(T.name()) >> TMin.value() >> TMax.value();
fieldBounds.lookup("T") >> TMin.value() >> TMax.value();
// Velocity bound
dimensionedScalar UMax("UMax", U.dimensions(), 0);
dimensionedScalar UMax("UMax", dimVelocity, GREAT);
fieldBounds.lookup(U.name()) >> UMax.value();
dimensionedScalar smallU("smallU", dimVelocity, 1e-10);

View file

@ -1,19 +1,20 @@
// Read field bounds
dictionary fieldBounds = mesh.solutionDict().subDict("fieldBounds");
// Pressure bounds
dimensionedScalar pMin("pMin", p.dimensions(), 0);
dimensionedScalar pMax("pMax", p.dimensions(), 0);
dimensionedScalar pMin("pMin", dimPressure, 0);
dimensionedScalar pMax("pMax", dimPressure, GREAT);
fieldBounds.lookup(p.name()) >> pMin.value() >> pMax.value();
fieldBounds.lookup("p") >> pMin.value() >> pMax.value();
// Temperature bounds
dimensionedScalar TMin("TMin", T.dimensions(), 0);
dimensionedScalar TMax("TMax", T.dimensions(), 0);
dimensionedScalar TMin("TMin", dimTemperature, 0);
dimensionedScalar TMax("TMax", dimTemperature, GREAT);
fieldBounds.lookup(T.name()) >> TMin.value() >> TMax.value();
fieldBounds.lookup("T") >> TMin.value() >> TMax.value();
// Velocity bound
dimensionedScalar UrelMax("UrelMax", Urel.dimensions(), 0);
dimensionedScalar UrelMax("UrelMax", dimVelocity, GREAT);
fieldBounds.lookup(Urel.name()) >> UrelMax.value();
dimensionedScalar smallUrel("smallUrel", dimVelocity, 1e-10);

View file

@ -1,7 +1,8 @@
// Momentum equation
fvVectorMatrix UEqn
(
fvm::div(phi, U)
fvm::ddt(U)
+ fvm::div(phi, U)
+ turbulence->divDevReff(U)
);

View file

@ -0,0 +1,32 @@
{
// Bound the pressure
dimensionedScalar p1 = min(p);
dimensionedScalar p2 = max(p);
if (p1 < pMin || p2 > pMax)
{
Info<< "p: " << p1.value() << " " << p2.value()
<< ". Bounding." << endl;
p.max(pMin);
p.min(pMax);
p.correctBoundaryConditions();
}
// Bound the velocity
volScalarField magU = mag(U);
dimensionedScalar U1 = max(magU);
if (U1 > UMax)
{
Info<< "U: " << U1.value() << ". Bounding." << endl;
volScalarField Ulimiter = pos(magU - UMax)*UMax/(magU + smallU)
+ neg(magU - UMax);
Ulimiter.max(scalar(0));
Ulimiter.min(scalar(1));
U *= Ulimiter;
U.correctBoundaryConditions();
}
}

View file

@ -0,0 +1,8 @@
// Check convergence
if (maxResidual < convergenceCriterion)
{
Info<< "reached convergence criterion: " << convergenceCriterion << endl;
runTime.writeAndEnd();
Info<< "latestTime = " << runTime.timeName() << endl;
}

View file

@ -45,7 +45,7 @@ volVector4Field Up
runTime.timeName(),
mesh,
IOobject::NO_READ,
IOobject::AUTO_WRITE
IOobject::NO_WRITE
),
mesh,
dimensionedVector4("zero", dimless, vector4::zero)

View file

@ -0,0 +1,4 @@
// initialize values for convergence checks
scalar maxResidual = 0;
scalar convergenceCriterion = 0;

View file

@ -52,11 +52,14 @@ int main(int argc, char *argv[])
# include "createMesh.H"
# include "createFields.H"
# include "initContinuityErrs.H"
# include "readBlockSolverControls.H"
# include "initConvergenceCheck.H"
Info<< "\nStarting time loop\n" << endl;
while (runTime.loop())
{
# include "readBlockSolverControls.H"
# include "readFieldBounds.H"
Info<< "Time = " << runTime.timeName() << nl << endl;
p.storePrevIter();
@ -74,7 +77,7 @@ int main(int argc, char *argv[])
# include "couplingTerms.H"
// Solve the block matrix
UpEqn.solve();
maxResidual = cmptMax(UpEqn.solve().initialResidual());
// Retrieve solution
UpEqn.retrieveSolution(0, U.internalField());
@ -87,6 +90,8 @@ int main(int argc, char *argv[])
# include "continuityErrs.H"
# include "boundPU.H"
p.relax();
turbulence->correct();
@ -95,6 +100,8 @@ int main(int argc, char *argv[])
Info<< "ExecutionTime = " << runTime.elapsedCpuTime() << " s"
<< " ClockTime = " << runTime.elapsedClockTime() << " s"
<< nl << endl;
# include "convergenceCheck.H"
}
Info<< "End\n" << endl;

View file

@ -1,3 +1,15 @@
label pRefCell = 0;
scalar pRefValue = 0;
setRefCell(p, mesh.solutionDict().subDict("blockSolver"), pRefCell, pRefValue);
label pRefCell = 0;
scalar pRefValue = 0;
setRefCell
(
p,
mesh.solutionDict().subDict("blockSolver"),
pRefCell,
pRefValue
);
mesh.solutionDict().subDict("blockSolver").readIfPresent
(
"convergence",
convergenceCriterion
);

View file

@ -0,0 +1,14 @@
// Read field bounds
dictionary fieldBounds = mesh.solutionDict().subDict("fieldBounds");
// Pressure bounds
dimensionedScalar pMin("pMin", p.dimensions(), 0);
dimensionedScalar pMax("pMax", p.dimensions(), 0);
fieldBounds.lookup(p.name()) >> pMin.value() >> pMax.value();
// Velocity bound
dimensionedScalar UMax("UMax", U.dimensions(), 0);
fieldBounds.lookup(U.name()) >> UMax.value();
dimensionedScalar smallU("smallU", dimVelocity, 1e-10);

View file

@ -43,7 +43,7 @@ fvVectorMatrix UbEqn(Ub, Ub.dimensions()*dimVol/dimTime);
UbEqn =
(
(scalar(1) + Cvm*rhob*alpha/rhob)*
(scalar(1) + Cvm*rhoa*alpha/rhob)*
(
fvm::ddt(Ub)
+ fvm::div(phib, Ub, "div(phib,Ub)")
@ -61,7 +61,7 @@ fvVectorMatrix UbEqn(Ub, Ub.dimensions()*dimVol/dimTime);
// g // Buoyancy term transfered to p-equation
- fvm::Sp(alpha/rhob*dragCoef, Ub)
//+ alpha/rhob*dragCoef*Ua // Explicit drag transfered to p-equation
+ alpha/rhob*(liftCoeff + Cvm*rhob*DDtUa)
+ alpha/rhob*(liftCoeff + Cvm*rhoa*DDtUa)
);
UbEqn.relax();

View file

@ -35,7 +35,7 @@ Description
#include "wallFvPatch.H"
#include "EulerDdtScheme.H"
#include "CrankNicholsonDdtScheme.H"
#include "CrankNicolsonDdtScheme.H"
#include "backwardDdtScheme.H"
#include "tetFemMatrices.H"
@ -480,7 +480,7 @@ bool freeSurface::movePoints(const scalarField& interfacePhi)
if
(
ddtScheme
== fv::CrankNicholsonDdtScheme<vector>::typeName
== fv::CrankNicolsonDdtScheme<vector>::typeName
)
{
sweptVolCorr *= (1.0/2.0)*DB().deltaT().value();
@ -890,7 +890,7 @@ bool freeSurface::moveMeshPoints()
if
(
ddtScheme
== fv::CrankNicholsonDdtScheme<vector>::typeName
== fv::CrankNicolsonDdtScheme<vector>::typeName
)
{
sweptVolCorr *= (1.0/2.0)*DB().deltaT().value();

View file

@ -0,0 +1,166 @@
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | cfMesh: A library for mesh generation
\\ / O peration |
\\ / A nd | Author: Franjo Juretic (franjo.juretic@c-fields.com)
\\/ M anipulation | Copyright (C) Creative Fields, Ltd.
-------------------------------------------------------------------------------
License
This file is part of cfMesh.
cfMesh is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation; either version 3 of the License, or (at your
option) any later version.
cfMesh is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with cfMesh. If not, see <http://www.gnu.org/licenses/>.
Description
Creates surface patches from surface subsets
\*---------------------------------------------------------------------------*/
#include "argList.H"
#include "Time.H"
#include "triSurf.H"
#include "triSurfaceCopyParts.H"
#include "demandDrivenData.H"
#include "OFstream.H"
using namespace Foam;
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
void exportFeatureEdges
(
const triSurf& origSurf,
const fileName& edgeFileName
)
{
OFstream file(edgeFileName);
const pointField& points = origSurf.points();
labelList newPointLabel(points.size(), -1);
label nPoints(0);
const edgeLongList& featureEdges = origSurf.featureEdges();
forAll(featureEdges, feI)
{
const edge& e = featureEdges[feI];
if( newPointLabel[e[0]] == -1 )
newPointLabel[e[0]] = nPoints++;
if( newPointLabel[e[1]] == -1 )
newPointLabel[e[1]] = nPoints++;
}
pointField pCopy(nPoints);
forAll(newPointLabel, pI)
{
if( newPointLabel[pI] < 0 )
continue;
pCopy[newPointLabel[pI]] = points[pI];
}
//- write the header
file << "# vtk DataFile Version 3.0\n";
file << "vtk output\n";
file << "ASCII\n";
file << "DATASET POLYDATA\n";
//- write points
file << "POINTS " << pCopy.size() << " float\n";
forAll(pCopy, pI)
{
const point& p = pCopy[pI];
file << p.x() << ' ' << p.y() << ' ' << p.z() << '\n';
}
file << "\nLINES " << featureEdges.size()
<< ' ' << 3*featureEdges.size() << nl;
forAll(featureEdges, edgeI)
{
const edge& e = featureEdges[edgeI];
file << "2 " << newPointLabel[e[0]]
<< token::SPACE << newPointLabel[e[1]] << nl;
}
file << nl;
if( !file )
FatalErrorIn
(
"void exportFeatureEdges(const triSurf&, const fileName&)"
) << "Writting of feature edges failed!" << exit(FatalError);
}
int main(int argc, char *argv[])
{
argList::noParallel();
argList::validArgs.clear();
argList::validArgs.append("input surface file");
argList::validArgs.append("output surface file");
argList::validOptions.insert("exportSubsets", "");
argList::validOptions.insert("exportFeatureEdges", "");
argList args(argc, argv);
fileName inFileName(args.args()[1]);
fileName outFileName(args.args()[2]);
fileName outFileNoExt = outFileName.lessExt();
fileName outExtension = outFileName.ext();
Info << "Out file no ext " << outFileNoExt << endl;
Info << "Extension " << outExtension << endl;
//- read the inout surface
triSurf origSurf(inFileName);
//- write the surface in the requated format
origSurf.writeSurface(outFileName);
//- export surface subsets as separate surface meshes
if( args.options().found("exportSubsets") )
{
DynList<label> subsetIDs;
origSurf.facetSubsetIndices(subsetIDs);
triSurfaceCopyParts copyParts(origSurf);
forAll(subsetIDs, subsetI)
{
//- get the name of the subset
triSurf copySurf;
wordList subsetName(1);
subsetName[0] = origSurf.facetSubsetName(subsetIDs[subsetI]);
//- create a surface mesh corresponding to the subset
copyParts.copySurface(subsetName, copySurf);
//- write the mesh on disk
fileName fName = outFileNoExt+"_facetSubset_"+subsetName[0];
fName += '.'+outExtension;
copySurf.writeSurface(fName);
}
}
if( args.options().found("exportFeatureEdges") )
{
fileName fName = outFileNoExt+"_featureEdges";
fName += ".vtk";
exportFeatureEdges(origSurf, fName);
}
Info << "End\n" << endl;
return 0;
}
// ************************************************************************* //

View file

@ -0,0 +1,3 @@
FMSToSurface.C
EXE = $(FOAM_APPBIN)/FMSToSurface

View file

@ -0,0 +1,9 @@
EXE_INC = \
-I$(FOAM_SRC)/mesh/cfMesh/meshLibrary/lnInclude \
-I$(FOAM_SRC)/meshTools/lnInclude \
-I$(FOAM_SRC)/triSurface/lnInclude
EXE_LIBS = \
-ltriSurface \
-lmeshTools \
-lmeshLibrary

View file

@ -0,0 +1,545 @@
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | cfMesh: A library for mesh generation
\\ / O peration |
\\ / A nd | Author: Franjo Juretic (franjo.juretic@c-fields.com)
\\/ M anipulation | Copyright (C) Creative Fields, Ltd.
-------------------------------------------------------------------------------
License
This file is part of cfMesh.
cfMesh is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation; either version 3 of the License, or (at your
option) any later version.
cfMesh is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with cfMesh. If not, see <http://www.gnu.org/licenses/>.
Description
cfMesh utility to convert a surface file to VTK multiblock dataset
format, including the patches, feature edges and surface features.
Author
Ivor Clifford <ivor.clifford@psi.ch>
\*---------------------------------------------------------------------------*/
#include "argList.H"
#include "autoPtr.H"
#include "OFstream.H"
#include "triSurf.H"
#include "triSurfModifier.H"
#include "xmlTag.H"
using namespace Foam;
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
//- Write the supplied pointList in serial vtkPolyData format
void writePointsToVTK
(
const fileName& fn,
const string& title,
const UList<point>& points
)
{
xmlTag xmlRoot("VTKFile");
xmlRoot.addAttribute("type", "PolyData");
xmlTag& xmlPolyData = xmlRoot.addChild("PolyData");
xmlTag& xmlPiece = xmlPolyData.addChild("Piece");
xmlPiece.addAttribute("NumberOfPoints", points.size());
xmlTag& xmlPoints = xmlPiece.addChild("Points");
xmlTag& xmlPointData = xmlPoints.addChild("DataArray");
xmlPointData.addAttribute("type", "Float32");
xmlPointData.addAttribute("NumberOfComponents", 3);
xmlPointData.addAttribute("format", "ascii");
xmlPointData << points;
OFstream os(fn);
os << xmlRoot << endl;
Info << "Created " << fn << endl;
}
//- Write the supplied addressed pointList in serial vtkPolyData format
void writePointsToVTK
(
const fileName& fn,
const string& title,
const UList<point>& points,
unallocLabelList& addr
)
{
// Create subaddressed points
pointField newPoints(addr.size());
forAll(addr, i)
{
newPoints[i] = points[addr[i]];
}
writePointsToVTK
(
fn,
title,
newPoints
);
}
//- Write the supplied edgeList in serial vtkPolyData format
void writeEdgesToVTK
(
const fileName& fn,
const string& title,
const UList<point>& points,
const LongList<edge>& edges
)
{
labelList connectivity(edges.size());
forAll(edges, edgeI)
{
connectivity[edgeI] = 2*(edgeI+1);
}
xmlTag xmlRoot("VTKFile");
xmlRoot.addAttribute("type", "PolyData");
xmlTag& xmlPolyData = xmlRoot.addChild("PolyData");
xmlTag& xmlPiece = xmlPolyData.addChild("Piece");
xmlPiece.addAttribute("NumberOfPoints", points.size());
xmlPiece.addAttribute("NumberOfLines", edges.size());
xmlTag& xmlPoints = xmlPiece.addChild("Points");
xmlTag& xmlPointData = xmlPoints.addChild("DataArray");
xmlPointData.addAttribute("type", "Float32");
xmlPointData.addAttribute("NumberOfComponents", 3);
xmlPointData.addAttribute("format", "ascii");
xmlPointData << points;
xmlTag& xmlEdges = xmlPiece.addChild("Lines");
xmlTag& xmlEdgeData = xmlEdges.addChild("DataArray");
xmlEdgeData.addAttribute("type", "Int32");
xmlEdgeData.addAttribute("Name", "connectivity");
xmlEdgeData.addAttribute("format", "ascii");
xmlEdgeData << edges;
xmlTag& xmlConnectData = xmlEdges.addChild("DataArray");
xmlConnectData.addAttribute("type", "Int32");
xmlConnectData.addAttribute("Name", "offsets");
xmlConnectData.addAttribute("format", "ascii");
xmlConnectData << connectivity;
OFstream os(fn);
os << xmlRoot << endl;
Info << "Created " << fn << endl;
}
//- Write the supplied edgeList subset in serial vtkPolyData format
void writeEdgesToVTK
(
const fileName& fn,
const string& title,
const UList<point>& points,
const LongList<edge>& edges,
const unallocLabelList& addr
)
{
// Remove unused points and create subaddressed edges
DynamicList<point> newPoints;
labelList newPointAddr(points.size(), -1);
LongList<edge> newEdges(addr.size());
forAll(addr, addrI)
{
label edgeI = addr[addrI];
const edge& curEdge = edges[edgeI];
edge& newEdge = newEdges[addrI];
forAll(curEdge, i)
{
label pointId = curEdge[i];
if (newPointAddr[pointId] == -1)
{
newPoints.append(points[pointId]);
newPointAddr[pointId] = newPoints.size()-1;
}
newEdge[i] = newPointAddr[pointId];
}
}
writeEdgesToVTK
(
fn,
title,
newPoints,
newEdges
);
}
//- Write the supplied facet list in serial vtkPolyData format
void writeFacetsToVTK
(
const fileName& fn,
const string& title,
const UList<point>& points,
const LongList<labelledTri>& facets
)
{
labelList connectivity(facets.size());
forAll(facets, faceI)
{
connectivity[faceI] = 3*(faceI+1);
}
labelList regionData(facets.size());
forAll(facets, faceI)
{
regionData[faceI] = facets[faceI].region();
}
xmlTag xmlRoot("VTKFile");
xmlRoot.addAttribute("type", "PolyData");
xmlTag& xmlPolyData = xmlRoot.addChild("PolyData");
xmlTag& xmlPiece = xmlPolyData.addChild("Piece");
xmlPiece.addAttribute("NumberOfPoints", points.size());
xmlPiece.addAttribute("NumberOfPolys", facets.size());
xmlTag& xmlPoints = xmlPiece.addChild("Points");
xmlTag& xmlPointData = xmlPoints.addChild("DataArray");
xmlPointData.addAttribute("type", "Float32");
xmlPointData.addAttribute("NumberOfComponents", 3);
xmlPointData.addAttribute("format", "ascii");
xmlPointData << points;
xmlTag& xmlPolys = xmlPiece.addChild("Polys");
xmlTag& xmlPolyDataArray = xmlPolys.addChild("DataArray");
xmlPolyDataArray.addAttribute("type", "Int32");
xmlPolyDataArray.addAttribute("Name", "connectivity");
xmlPolyDataArray.addAttribute("format", "ascii");
xmlPolyDataArray << facets;
xmlTag& xmlConnectData = xmlPolys.addChild("DataArray");
xmlConnectData.addAttribute("type", "Int32");
xmlConnectData.addAttribute("Name", "offsets");
xmlConnectData.addAttribute("format", "ascii");
xmlConnectData << connectivity;
xmlTag& xmlCellData = xmlPiece.addChild("CellData");
xmlTag& xmlCellDataArray = xmlCellData.addChild("DataArray");
xmlCellDataArray.addAttribute("type", "Int32");
xmlCellDataArray.addAttribute("Name", "region");
xmlCellDataArray.addAttribute("format", "ascii");
xmlCellDataArray << regionData;
OFstream os(fn);
os << xmlRoot << endl;
Info << "Created " << fn << endl;
}
//- Write an addressed subset of the supplied facet list
//- in serial vtkPolyData format
void writeFacetsToVTK
(
const fileName& fn,
const string& title,
const pointField& points,
const LongList<labelledTri>& facets,
const unallocLabelList& addr
)
{
// Remove unused points and create subaddressed facets
DynamicList<point> newPoints;
labelList newPointAddr(points.size(), -1);
LongList<labelledTri> newFacets(addr.size());
forAll(addr, addrI)
{
label faceI = addr[addrI];
const labelledTri& facet = facets[faceI];
const FixedList<label, 3>& pointIds = facet;
FixedList<label, 3> newPointIds;
forAll(pointIds, i)
{
label pointId = pointIds[i];
if (newPointAddr[pointId] == -1)
{
newPoints.append(points[pointId]);
newPointAddr[pointId] = newPoints.size()-1;
}
newPointIds[i] = newPointAddr[pointId];
}
newFacets[addrI] = labelledTri
(
newPointIds[0],
newPointIds[1],
newPointIds[2],
facet.region()
);
}
writeFacetsToVTK
(
fn,
title,
newPoints,
newFacets
);
}
int main(int argc, char *argv[])
{
argList::noParallel();
argList::validArgs.clear();
argList::validArgs.append("input surface file");
argList::validArgs.append("output prefix");
argList args(argc, argv);
// Process commandline arguments
fileName inFileName(args.args()[1]);
fileName outPrefix(args.args()[2]);
// Read original surface
triSurf origSurf(inFileName);
const pointField& points = origSurf.points();
const LongList<labelledTri>& facets = origSurf.facets();
const LongList<edge>& edges = origSurf.featureEdges();
const geometricSurfacePatchList& patches = origSurf.patches();
label index = 0;
// Create file structure for multiblock dataset
mkDir(outPrefix);
mkDir(outPrefix + "/patches");
mkDir(outPrefix + "/pointSubsets");
mkDir(outPrefix + "/edgeSubsets");
mkDir(outPrefix + "/faceSubsets");
// Create VTK multiblock dataset file
xmlTag xmlRoot("VTKFile");
xmlRoot.addAttribute("type", "vtkMultiBlockDataSet");
xmlRoot.addAttribute("version", "1.0");
xmlRoot.addAttribute("byte_order", "LittleEndian");
xmlTag& xmlDataSet = xmlRoot.addChild("vtkMultiBlockDataSet");
// Write faces and feature edges
{
fileName fn = outPrefix / "facets.vtp";
writeFacetsToVTK
(
outPrefix / "facets.vtp",
outPrefix,
points,
facets
);
xmlTag& tag = xmlDataSet.addChild("DataSet");
tag.addAttribute("index", Foam::name(index++));
tag.addAttribute("name", "facets");
tag.addAttribute("file", fn);
}
{
fileName fn = outPrefix / "featureEdges.vtp";
writeEdgesToVTK
(
outPrefix / "featureEdges.vtp",
"featureEdges",
points,
edges
);
xmlTag& tag = xmlDataSet.addChild("DataSet");
tag.addAttribute("index", Foam::name(index++));
tag.addAttribute("name", "featureEdges");
tag.addAttribute("file", fn);
}
// Write patches
// Create patch addressing
List<DynamicList<label> > patchAddr(patches.size());
forAll(facets, faceI)
{
patchAddr[facets[faceI].region()].append(faceI);
}
{
xmlTag& xmlBlock = xmlDataSet.addChild("Block");
xmlBlock.addAttribute("index", Foam::name(index++));
xmlBlock.addAttribute("name", "patches");
forAll(patches, patchI)
{
word patchName = patches[patchI].name();
fileName fn = outPrefix / "patches" / patchName + ".vtp";
writeFacetsToVTK
(
fn,
patchName,
points,
facets,
patchAddr[patchI]
);
xmlTag& tag = xmlBlock.addChild("DataSet");
tag.addAttribute("index", Foam::name(patchI));
tag.addAttribute("name", patchName);
tag.addAttribute("file", fn);
}
}
// Write point subsets
{
xmlTag& xmlBlock = xmlDataSet.addChild("Block");
xmlBlock.addAttribute("index", Foam::name(index++));
xmlBlock.addAttribute("name", "pointSubsets");
DynList<label> subsetIndices;
labelList subsetAddr;
origSurf.pointSubsetIndices(subsetIndices);
forAll(subsetIndices, id)
{
word subsetName = origSurf.pointSubsetName(id);
origSurf.pointsInSubset(id, subsetAddr);
fileName fn = outPrefix / "pointSubsets" / subsetName + ".vtp";
writePointsToVTK
(
fn,
subsetName,
points,
subsetAddr
);
xmlTag& tag = xmlBlock.addChild("DataSet");
tag.addAttribute("index", Foam::name(id));
tag.addAttribute("name", subsetName);
tag.addAttribute("file", fn);
}
}
// Write edge subsets
{
xmlTag& xmlBlock = xmlDataSet.addChild("Block");
xmlBlock.addAttribute("index", Foam::name(index++));
xmlBlock.addAttribute("name", "edgeSubsets");
DynList<label> subsetIndices;
labelList subsetAddr;
origSurf.edgeSubsetIndices(subsetIndices);
forAll(subsetIndices, id)
{
word subsetName = origSurf.edgeSubsetName(id);
origSurf.edgesInSubset(id, subsetAddr);
fileName fn = outPrefix / "edgeSubsets" / subsetName + ".vtp";
writeEdgesToVTK
(
fn,
subsetName,
points,
edges,
subsetAddr
);
xmlTag& tag = xmlBlock.addChild("DataSet");
tag.addAttribute("index", Foam::name(id));
tag.addAttribute("name", subsetName);
tag.addAttribute("file", fn);
}
}
// Write facet subsets
{
xmlTag& xmlBlock = xmlDataSet.addChild("Block");
xmlBlock.addAttribute("index", Foam::name(index++));
xmlBlock.addAttribute("name", "faceSubsets");
DynList<label> subsetIndices;
labelList subsetAddr;
origSurf.facetSubsetIndices(subsetIndices);
forAll(subsetIndices, id)
{
word subsetName = origSurf.facetSubsetName(id);
origSurf.facetsInSubset(id, subsetAddr);
fileName fn = outPrefix / "faceSubsets" / subsetName + ".vtp";
writeFacetsToVTK
(
fn,
subsetName,
points,
facets,
subsetAddr
);
xmlTag& tag = xmlBlock.addChild("DataSet");
tag.addAttribute("index", Foam::name(id));
tag.addAttribute("name", subsetName);
tag.addAttribute("file", fn);
}
}
OFstream os(outPrefix + ".vtm");
os << xmlRoot << endl;
Info << "Created " << outPrefix + ".vtm" << endl;
Info << "End\n" << endl;
return 0;
}
// ************************************************************************* //

View file

@ -0,0 +1,3 @@
FMSToVTK.C
EXE = $(FOAM_APPBIN)/FMSToVTK

View file

@ -0,0 +1,9 @@
EXE_INC = \
-I$(FOAM_SRC)/mesh/cfMesh/meshLibrary/lnInclude \
-I$(FOAM_SRC)/meshTools/lnInclude \
-I$(FOAM_SRC)/triSurface/lnInclude
EXE_LIBS = \
-ltriSurface \
-lmeshLibrary \
-lmeshTools

View file

@ -0,0 +1,297 @@
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | foam-extend: Open Source CFD
\\ / O peration |
\\ / A nd | For copyright notice see file Copyright
\\/ M anipulation |
-------------------------------------------------------------------------------
License
This file is part of foam-extend.
foam-extend is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
foam-extend is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with foam-extend. If not, see <http://www.gnu.org/licenses/>.
Class
Foam::xmlTag
Description
Simple XML tag class allowing child tags and attributes. Specialized
output stream operators are provided to display or write the XML
structure.
Author
Ivor Clifford <ivor.clifford@psi.ch>
\*---------------------------------------------------------------------------*/
#ifndef XMLtag_H
#define XMLtag_H
#include "OStringStream.H"
#include "HashTable.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
namespace Foam
{
/*---------------------------------------------------------------------------*\
Class xmlTag Declaration
\*---------------------------------------------------------------------------*/
class xmlTag
:
public OStringStream
{
// Private data
//- Tag name
word name_;
//- Attributes
HashTable<string> attributes_;
//- Child tags
DynamicList<xmlTag> children_;
public:
// Constructors
//- Null constructor
xmlTag()
:
OStringStream(),
name_("unknown"),
attributes_(),
children_()
{}
//- Construct given tag name
xmlTag(const word& name)
:
OStringStream(),
name_(name),
attributes_(),
children_()
{}
//- Construct as copy
xmlTag(const xmlTag& tag)
:
OStringStream(tag),
name_(tag.name_),
attributes_(tag.attributes_),
children_(tag.children_)
{}
//- Destructor
~xmlTag()
{}
// Member Functions
//- Add an attribute
template<class T>
void addAttribute(const keyType& key, const T& value)
{
OStringStream os;
os << value;
attributes_.insert(key, os.str());
};
//- Add a fileName attribute
void addAttribute(const keyType& key, const fileName& value)
{
attributes_.insert(key, value);
};
//- Add a string attribute
void addAttribute(const keyType& key, const string& value)
{
attributes_.insert(key, value);
};
//- Add a word attribute
void addAttribute(const keyType& key, const word& value)
{
attributes_.insert(key, value);
};
//- Add a child
xmlTag& addChild(const xmlTag& tag)
{
children_.append(tag);
return children_[children_.size()-1];
};
//- Create and add a new child
xmlTag& addChild(const word& name)
{
return addChild(xmlTag(name));
}
// Member Operators
void operator=(const xmlTag& tag)
{
name_ = tag.name_;
attributes_ = tag.attributes_;
children_ = tag.children_;
OStringStream::rewind();
Foam::operator<<(*this, tag.str().c_str());
};
// Friend IOstream Operators
friend Ostream& operator<<(Ostream&, const xmlTag&);
template<class Form, class Cmpt, int nCmpt>
friend xmlTag& operator<<(xmlTag&, const VectorSpace<Form, Cmpt, nCmpt>&);
friend xmlTag& operator<<(xmlTag&, const labelledTri&);
template<class T, unsigned Size>
friend xmlTag& operator<<(xmlTag&, const FixedList<T, Size>&);
template<class T>
friend xmlTag& operator<<(xmlTag&, const LongList<T>&);
template<class T>
friend xmlTag& operator<<(xmlTag&, const UList<T>&);
};
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
//- Write the tag in XML format to the supplied output stream
Ostream& operator<<(Ostream& os, const xmlTag& tag)
{
// Tag name
os << indent << '<' << tag.name_;
// Attributes and text
for
(
HashTable<string>::const_iterator iter = tag.attributes_.cbegin();
iter != tag.attributes_.cend();
++iter
)
{
os << token::SPACE << iter.key() << '=' << iter();
}
if (tag.str().size() || tag.children_.size())
{
os << '>' << nl;
// Children
os.incrIndent();
forAll(tag.children_, i)
{
os << tag.children_[i];
}
os.decrIndent();
// Tag text
os << tag.str().c_str();
// Close tag
os << indent << "</" << tag.name_ << '>' << endl;
}
else
{
// Empty element tag
os << "/>" << endl;
}
return os;
}
//- Append the supplied data to the tag text
template<class T>
xmlTag& operator<<(xmlTag& tag, const UList<T>& data)
{
forAll(data, i)
{
tag << data[i] << token::SPACE;
}
tag << nl;
return tag;
}
//- Append the supplied data to the tag text
template<class T>
xmlTag& operator<<(xmlTag& tag, const LongList<T>& data)
{
forAll(data, i)
{
tag << data[i] << token::SPACE;
}
tag << nl;
return tag;
}
//- Append the supplied data to the tag text
template<class Form, class Cmpt, int nCmpt>
xmlTag& operator<<(xmlTag& tag, const VectorSpace<Form, Cmpt, nCmpt>& data)
{
forAll(data, i)
{
tag << data[i] << token::SPACE;
}
tag << nl;
return tag;
}
//- Append the supplied data to the tag text
template<class T, unsigned Size>
xmlTag& operator<<(xmlTag& tag, const FixedList<T, Size>& data)
{
forAll(data, i)
{
tag << data[i] << token::SPACE;
}
tag << nl;
return tag;
}
//- Append the supplied data to the tag text
xmlTag& operator<<(xmlTag& tag, const labelledTri& data)
{
const triFace& tFace = data;
return tag << tFace;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
} // End namespace Foam
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
#endif
// ************************************************************************* //

View file

@ -0,0 +1,3 @@
importSurfaceAsSubset.C
EXE = $(FOAM_APPBIN)/importSurfaceAsSubset

View file

@ -0,0 +1,9 @@
EXE_INC = \
-I$(FOAM_SRC)/mesh/cfMesh/meshLibrary/lnInclude \
-I$(LIB_SRC)/triSurface/lnInclude \
-I$(LIB_SRC)/meshTools/lnInclude
EXE_LIBS = \
-ltriSurface \
-lmeshLibrary \
-lmeshTools

View file

@ -0,0 +1,84 @@
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | cfMesh: A library for mesh generation
\\ / O peration |
\\ / A nd | Author: Franjo Juretic (franjo.juretic@c-fields.com)
\\/ M anipulation | Copyright (C) Creative Fields, Ltd.
-------------------------------------------------------------------------------
License
This file is part of cfMesh.
cfMesh is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation; either version 3 of the License, or (at your
option) any later version.
cfMesh is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with cfMesh. If not, see <http://www.gnu.org/licenses/>.
Description
Finds feature edges and corners of a triangulated surface
\*---------------------------------------------------------------------------*/
#include "argList.H"
#include "IFstream.H"
#include "fileName.H"
#include "triSurf.H"
#include "OFstream.H"
#include "OSspecific.H"
#include "demandDrivenData.H"
#include "triSurfaceImportSurfaceAsSubset.H"
#include <cstdlib>
#include <sstream>
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
// Main program:
using namespace Foam;
int main(int argc, char *argv[])
{
argList::noParallel();
argList::validArgs.clear();
argList::validArgs.append("master surface file");
argList::validArgs.append("import surface file");
argList args(argc, argv);
fileName inFileName(args.args()[1]);
fileName importFileName(args.args()[2]);
triSurf originalSurface(inFileName);
triSurf importedSurface(importFileName);
triSurfaceImportSurfaceAsSubset importSurf(originalSurface);
importSurf.addSurfaceAsSubset(importedSurface, importFileName.lessExt());
if( inFileName.ext() == "fms" )
{
originalSurface.writeSurface(inFileName);
}
else
{
fileName newName = inFileName.lessExt();
newName.append(".fms");
Warning << "Writting surface as " << newName
<< " to preserve the subset!!" << endl;
originalSurface.writeSurface(newName);
}
Info << "End\n" << endl;
return 0;
}
// ************************************************************************* //

View file

@ -0,0 +1,3 @@
improveSymmetryPlanes.C
EXE = $(FOAM_APPBIN)/improveSymmetryPlanes

View file

@ -0,0 +1,7 @@
EXE_INC = \
-I$(FOAM_SRC)/mesh/cfMesh/meshLibrary/lnInclude \
-I$(LIB_SRC)/meshTools/lnInclude
EXE_LIBS = \
-lmeshTools \
-lmeshLibrary

View file

@ -0,0 +1,57 @@
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | cfMesh: A library for mesh generation
\\ / O peration |
\\ / A nd | Author: Franjo Juretic (franjo.juretic@c-fields.com)
\\/ M anipulation | Copyright (C) Creative Fields, Ltd.
-------------------------------------------------------------------------------
License
This file is part of cfMesh.
cfMesh is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation; either version 3 of the License, or (at your
option) any later version.
cfMesh is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with cfMesh. If not, see <http://www.gnu.org/licenses/>.
Description
Ensures that all mesh points belonging to a symmetryPlane are
in a plane.
\*---------------------------------------------------------------------------*/
#include "argList.H"
#include "Time.H"
#include "polyMeshGenModifier.H"
#include "symmetryPlaneOptimisation.H"
using namespace Foam;
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
int main(int argc, char *argv[])
{
# include "setRootCase.H"
# include "createTime.H"
polyMeshGen pmg(runTime);
pmg.read();
Info << "Starting optimisation of symmetry planes" << endl;
symmetryPlaneOptimisation(pmg).optimizeSymmetryPlanes();
Info << "Writing mesh" << endl;
pmg.write();
Info << "End\n" << endl;
return 0;
}
// ************************************************************************* //

View file

@ -0,0 +1,3 @@
mergeSurfacePatches.C
EXE = $(FOAM_APPBIN)/mergeSurfacePatches

View file

@ -0,0 +1,9 @@
EXE_INC = \
-I$(FOAM_SRC)/mesh/cfMesh/meshLibrary/lnInclude \
-I$(FOAM_SRC)/meshTools/lnInclude \
-I$(FOAM_SRC)/triSurface/lnInclude
EXE_LIBS = \
-ltriSurface \
-lmeshLibrary \
-lmeshTools

View file

@ -0,0 +1,403 @@
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | cfMesh: A library for mesh generation
\\ / O peration |
\\ / A nd | Author: Franjo Juretic (franjo.juretic@c-fields.com)
\\/ M anipulation | Copyright (C) Creative Fields, Ltd.
-------------------------------------------------------------------------------
License
This file is part of cfMesh.
cfMesh is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation; either version 3 of the License, or (at your
option) any later version.
cfMesh is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with cfMesh. If not, see <http://www.gnu.org/licenses/>.
Description
cfMesh utility to merge the supplied list of patches onto a single
patch.
Author
Ivor Clifford <ivor.clifford@psi.ch>
\*---------------------------------------------------------------------------*/
#include "argList.H"
#include "autoPtr.H"
#include "Time.H"
#include "triSurf.H"
#include "triSurfModifier.H"
#include "demandDrivenData.H"
#include "Pair.H"
using namespace Foam;
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
// Find the supplied list of patch names and return a list of patch Ids
void getPatchIds
(
const triSurf& origSurf,
const wordList& patchNames,
DynamicList<label>& patchIds
)
{
const geometricSurfacePatchList& origPatches = origSurf.patches();
// Create patch name map
HashSet<word> patchNameHash(patchNames);
// Find selected patches
label nFound = 0;
forAll(origPatches, patchI)
{
if (patchNameHash.found(origPatches[patchI].name()))
{
patchIds.append(patchI);
nFound++;
}
}
if (nFound != patchNames.size())
{
WarningIn("getPatchIds")
<< "Not all supplied patch names were found on the surface mesh" << endl;
}
}
// Copy all face subsets from one triSurf to another
void copyFaceSubsets
(
const triSurf& origSurf,
triSurf& newSurf
)
{
DynList<label> subsetIds;
origSurf.facetSubsetIndices(subsetIds);
forAll(subsetIds, subsetI)
{
label newSubsetId = newSurf.addFacetSubset
(
origSurf.facetSubsetName(subsetI)
);
labelList origFaces;
origSurf.facetsInSubset(subsetI, origFaces);
forAll(origFaces, faceI)
{
newSurf.addFacetToSubset
(
newSubsetId,
origFaces[faceI]
);
}
}
}
// Copy all edge subsets from one triSurf to another
void copyEdgeSubsets
(
const triSurf& origSurf,
triSurf& newSurf
)
{
DynList<label> subsetIds;
origSurf.edgeSubsetIndices(subsetIds);
forAll(subsetIds, subsetI)
{
label newSubsetId = newSurf.addEdgeSubset
(
origSurf.edgeSubsetName(subsetI)
);
labelList origEdges;
origSurf.edgesInSubset(subsetI, origEdges);
forAll(origEdges, faceI)
{
newSurf.addEdgeToSubset
(
newSubsetId,
origEdges[faceI]
);
}
}
}
// Copy all point subsets from one triSurf to another
void copyPointSubsets
(
const triSurf& origSurf,
triSurf& newSurf
)
{
DynList<label> subsetIds;
origSurf.pointSubsetIndices(subsetIds);
forAll(subsetIds, subsetI)
{
label newSubsetId = newSurf.addPointSubset
(
origSurf.pointSubsetName(subsetI)
);
labelList origPoints;
origSurf.pointsInSubset(subsetI, origPoints);
forAll(origPoints, faceI)
{
newSurf.addPointToSubset
(
newSubsetId,
origPoints[faceI]
);
}
}
}
// Merge the supplied list of patchIds onto a new patch
autoPtr<triSurf> mergeSurfacePatches
(
const triSurf& origSurf, // Surface
const UList<label>& patchIds, // Ids of patches to merge
const word& newPatchName, // Name of new (merged) patch
bool keepPatches // Keep the original patches - they will be emptied
)
{
const geometricSurfacePatchList& origPatches = origSurf.patches();
const LongList<labelledTri>& origFacets = origSurf.facets();
label newPatchId = origPatches.size();
// Determine new patch type
word newPatchType = origPatches[patchIds[0]].geometricType();
// Create patch addressing
List<DynamicList<label> > patchAddr(origPatches.size()+1);
forAll(origFacets, faceI)
{
patchAddr[origFacets[faceI].region()].append(faceI);
}
// Move selected patches to new patch
forAll(patchIds, patchI)
{
patchAddr[newPatchId].append(patchAddr[patchIds[patchI]]);
patchAddr[patchIds[patchI]].clear();
}
// Create new facets list
LongList<labelledTri> newFacets(origFacets.size());
labelList newFaceAddr(origFacets.size(), -1);
label patchCount = 0;
label faceI = 0;
forAll(patchAddr, patchI)
{
const unallocLabelList& addr = patchAddr[patchI];
if(addr.size())
{
forAll(addr, i)
{
newFacets[faceI] = origFacets[addr[i]];
newFacets[faceI].region() = patchCount;
newFaceAddr[addr[i]] = faceI;
faceI++;
}
}
if(addr.size() || keepPatches)
{
patchCount++;
}
}
// Create new patch list
geometricSurfacePatchList newPatches(patchCount);
patchCount = 0;
forAll(origPatches, patchI)
{
// Only add patches if they contain faces
if(patchAddr[patchI].size())
{
newPatches[patchCount] = origPatches[patchI];
newPatches[patchCount].index() = patchCount;
}
if(patchAddr[patchI].size() || keepPatches)
{
patchCount++;
}
}
// Add new patch if it contains faces
if(patchAddr[patchAddr.size()-1].size())
{
newPatches[patchCount] = geometricSurfacePatch
(
newPatchType,
newPatchName,
patchCount
);
}
if(patchAddr[patchAddr.size()-1].size() || keepPatches)
{
patchCount++;
}
// Create new surface
autoPtr<triSurf> newSurf
(
new triSurf
(
newFacets,
newPatches,
origSurf.featureEdges(),
origSurf.points()
)
);
// Transfer face subsets
copyFaceSubsets(origSurf, newSurf());
newSurf->updateFacetsSubsets(newFaceAddr);
// Transfer feature edge subsets
copyEdgeSubsets(origSurf, newSurf());
// Transfer point subsets
copyPointSubsets(origSurf, newSurf());
// Done
return newSurf;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
int main(int argc, char *argv[])
{
argList::noParallel();
argList::validArgs.clear();
argList::validArgs.append("input surface file");
argList::validArgs.append("new patch");
argList::validOptions.insert("patchNames", "list of names");
argList::validOptions.insert("patchIds", "list of patchIds");
argList::validOptions.insert("patchIdRange", "( start end )");
argList::validOptions.insert("output", "file name (default overwrite)");
argList::validOptions.insert("keep", "");
argList args(argc, argv);
// Process commandline arguments
fileName inFileName(args.args()[1]);
word newPatchName(args.args()[2]);
fileName outFileName(inFileName);
if( args.options().found("output") )
{
outFileName = args.options()["output"];
}
bool keepPatches = false;
if( args.options().found("keep") )
{
keepPatches = true;
}
// Read original surface
triSurf origSurf(inFileName);
// Get patch ids
DynamicList<label> patchIds;
if (args.options().found("patchNames"))
{
if (args.options().found("patchIds"))
{
FatalError() << "Cannot specify both patch names and ids"
<< Foam::abort(FatalError);
}
IStringStream is(args.options()["patchNames"]);
wordList patchNames(is);
getPatchIds
(
origSurf,
patchNames,
patchIds
);
}
if (args.options().found("patchIds"))
{
IStringStream is(args.options()["patchIds"]);
patchIds = labelList(is);
}
if (args.options().found("patchIds"))
{
IStringStream is(args.options()["patchIds"]);
patchIds.append(labelList(is));
}
if (args.options().found("patchIdRange"))
{
IStringStream is(args.options()["patchIdRange"]);
Pair<label> idRange(is);
for(label id = idRange.first(); id <= idRange.second(); id++)
{
patchIds.append(id);
}
}
if (!patchIds.size())
{
FatalError() << "No patches specified"
<< Foam::abort(FatalError);
}
// Merge patches
autoPtr<triSurf> newSurf = mergeSurfacePatches
(
origSurf,
patchIds,
newPatchName,
keepPatches
);
// Write new surface mesh
newSurf->writeSurface(outFileName);
Info << "Original surface patches: " << origSurf.patches().size() << endl;
Info << "Final surface patches: " << newSurf->patches().size() << endl;
Info << "Surface written to " << outFileName << endl;
Info << "End\n" << endl;
return 0;
}
// ************************************************************************* //

View file

@ -0,0 +1,131 @@
#!python
# =============================================================================
# Salome GEOM script to extract the feature edges from a body and add them
# to the group named 'featureEdges'.
# Tested on Salome 7.4.0 and python 2.7 on 64-bit Windows
#
# Author: Ivor Clifford <ivor.clifford@psi.ch>
#
# =============================================================================
def extractFeatureEdges(body, minFeatureAngle = 5):
'''
Find all feature edges on the supplied body and return them as a list
of edge ids.
body - A Salome solid, compound, shell or face object to find all
feature edges on.
minFeatureAngle - the angle (in degrees) between adjacent surfaces
above which the edge will be considered a feature angle.
'''
import salome
salome.salome_init()
import GEOM
from salome.geom import geomBuilder
geompy = geomBuilder.New(salome.myStudy)
# Check the body type
if not (body.GetShapeType() in [GEOM.SHELL, GEOM.SOLID, GEOM.FACE, GEOM.COMPOUND]):
raise RuntimeError('Supplied object is not a solid, shell or face.')
print 'Extracting edges of %s with feature angle > %g.' % (body.GetName(), minFeatureAngle)
# Extract basic info
faces = geompy.SubShapeAll(body, geompy.ShapeType["FACE"])
curves = geompy.SubShapeAll(body, geompy.ShapeType["EDGE"])
points = geompy.SubShapeAll(body, geompy.ShapeType["VERTEX"])
faceIds = geompy.GetSubShapesIDs(body, faces)
curveIds = geompy.GetSubShapesIDs(body, curves)
nodeIds = geompy.GetSubShapesIDs(body, points)
maxFaceId = max(faceIds)
maxCurveId = max(curveIds)
maxNodeId = max(nodeIds)
# Reverse mapping from curve id to local curve arrays
faceMap = [-1 for i in xrange(maxFaceId+1)]
for localId, id in enumerate(faceIds):
faceMap[id] = localId
curveMap = [-1 for i in xrange(maxCurveId+1)]
for localId, id in enumerate(curveIds):
curveMap[id] = localId
nodeMap = [-1 for i in xrange(maxNodeId+1)]
for localId, id in enumerate(nodeIds):
nodeMap[id] = localId
# Get curves on each face
faceCurveIds = [[curveMap[id] for id in geompy.GetSubShapesIDs(
body,
geompy.SubShapeAll(face, geompy.ShapeType["EDGE"])
)] for face in faces]
# Get faces attached to each curve
curveFaceIds = [[] for id in curveIds]
for faceI, ids in enumerate(faceCurveIds):
for id in ids:
curveFaceIds[id].append(faceI)
# Now that we have the connectivity for curves and faces find the
# feature edges
featureEdgeIds = []
for curveId, curve, adjFaceIds in zip(curveIds, curves, curveFaceIds):
if len(adjFaceIds) == 2:
# Curve with 2 adjacent faces - Test feature angle
# Determine break angle at each curve
# If greater than the feature edge angle, add the curve to group featureEdges
face1 = faces[adjFaceIds[0]]
face2 = faces[adjFaceIds[1]]
point = geompy.GetFirstVertex(curve) # Test at the first vertex
n1 = geompy.GetNormal(face1, point)
n2 = geompy.GetNormal(face2, point)
angle = geompy.GetAngle(n1, n2)
if angle > minFeatureAngle:
featureEdgeIds.append(curveId)
elif len(adjFaceIds) == 1:
# Curve on standalone face - Add by default
featureEdgeIds.append(curveId)
elif len(adjFaceIds) == 0:
# Standalone curve - Ignore
None
else:
raise RuntimeError('Curve found sharing %d faces. This is unexpected for fully enclosed bodies.' % len(adjFaceIds))
# Done
print "%d feature edges found" % len(featureEdgeIds)
return featureEdgeIds
# If run as a standalone script, use the current Salome GUI selection
# and add the feature edges to group named 'featureEdges'
if __name__ == '__main__':
import salome
salome.salome_init()
import GEOM
from salome.geom import geomBuilder
geompy = geomBuilder.New(salome.myStudy)
# Get current GUI selection
selected = salome.sg.getAllSelected()
if len(selected) != 1:
raise RuntimeError('A single solid, shell or face object must be selected.')
body = salome.myStudy.FindObjectID(selected[0]).GetObject()
# Get feature edges and add to the group 'featureEdges'
featureEdges = geompy.CreateGroup(body, geompy.ShapeType["EDGE"])
geompy.UnionIDs(featureEdges, extractFeatureEdges(body))
geompy.addToStudyInFather(body, featureEdges, 'featureEdges')
if salome.sg.hasDesktop():
salome.sg.updateObjBrowser(1)

View file

@ -0,0 +1,363 @@
#!python
# =============================================================================
# Python module for writing OpenFOAM feature edge and triSurface files from
# within Salome platform.
# Tested on Salome 7.4.0 and python 2.7 on 64-bit Windows
#
# Author: Ivor Clifford <ivor.clifford@psi.ch>
#
# =============================================================================
def foamHeader(className, objectName):
'''
Return the OpenFOAM file header block as a string.
'''
return '''/*--------------------------------*- C++ -*----------------------------------*\\
| ========= | |
| \\\\ / F ield | OpenFOAM: The Open Source CFD Toolbox |
| \\\\ / O peration | Version: 2.2.1 |
| \\\\ / A nd | Web: www.OpenFOAM.org |
| \\\\/ M anipulation | |
\\*---------------------------------------------------------------------------*/
FoamFile
{
version 2.0;
format ascii;
class %s;
object %s;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
''' % (className, objectName)
class triSurf:
def __init__(self, object = None, allEdges = False):
'''
Construct from the supplied Salome mesh.
object - the mesh object (must be a triangular surface mesh). If no
object is supplied, the current Salome selection is used.
allEdges - If true, all edges on the mesh are included as feature
edges, otherwise only edges contained in groups are included
NOTE: All face groups are assumed to represent patches. No face subsets
are written. All edge groups are added as feature edge subsets. All point
groups are added as point subsets.
'''
# =============================================================================
# Initialize salome
import salome
import SMESH, SALOMEDS
from salome.smesh import smeshBuilder
from operator import itemgetter
from collections import OrderedDict
import SMESH, SALOMEDS
# =============================================================================
# Get the Salome mesh object
if object is None:
selected = salome.sg.getAllSelected()
if len(selected) != 1:
raise RuntimeError('A single Salome mesh object must be selected.')
object = salome.myStudy.FindObjectID(selected[0]).GetObject()
try:
object.GetMesh()
except:
raise RuntimeError('Supplied object is not a Salome SMESH mesh.')
smesh = smeshBuilder.New(salome.myStudy)
mesh = smesh.Mesh(object)
print "Converting SMESH Mesh '%s'" % mesh.GetName()
# =============================================================================
# Get basic mesh info
nNodes = mesh.NbNodes()
nFaces = mesh.NbFaces()
nTris = mesh.NbTriangles()
nEdges = mesh.NbEdges()
nodeIds = mesh.GetNodesId()
faceIds = mesh.GetElementsByType(SMESH.FACE)
edgeIds = mesh.GetElementsByType(SMESH.EDGE)
# Check that mesh is strictly triangular
if nFaces != nTris:
raise RuntimeError('Mesh is not strictly triangular')
# Get patch and subset names & ids
# All SMESH.FACE groups are assumed to be patches
# All SMESH.EDGE groups are assumed to be feature subsets
# All SMESH.NODE groups are assumed to be point subsets
patches = OrderedDict()
pointSubsets = OrderedDict()
featureEdgeSubsets = OrderedDict()
for group in mesh.GetGroups():
if group.GetType() == SMESH.FACE:
patches[group.GetName()] = group.GetIDs()
elif group.GetType() == SMESH.EDGE:
featureEdgeSubsets[group.GetName()] = group.GetIDs()
elif group.GetType() == SMESH.NODE:
pointSubsets[group.GetName()] = group.GetIDs()
# =============================================================================
# Process faces and patches
# Get patchId for each face
lastPatchId = len(patches)
patchIds = [lastPatchId] * max(faceIds)
patchId = 0
for name, ids in patches.iteritems():
for faceId in ids:
if patchIds[faceId-1] == lastPatchId:
patchIds[faceId-1] = patchId
else:
print "Face %d is assigned to both groups %s and %s" % (faceId, name, patches.keys()[patchIds[faceId-1]])
raise RuntimeError('Groups of faces are not unique, i.e. they overlap.')
patchId += 1
# Compact and reorder patchIds to match faceIds
patchIds = [patchIds[faceId-1] for faceId in faceIds]
# Reorder faces by increasing group id
faceAndpatchIds = sorted(zip(faceIds, patchIds), key=itemgetter(1))
faceIds, patchIds = zip(*faceAndpatchIds)
# Add unused faces to the default patch
defaultFaces = [faceId for faceId, patchId in faceAndpatchIds if patchId == lastPatchId]
if len(defaultFaces) > 0:
patches['defaultFaces'] = defaultFaces
defaultFaces = None
# =============================================================================
# Process feature edges
if not allEdges:
edgeIds = []
for name, ids in featureEdgeSubsets.iteritems():
edgeIds += ids
edgeIds = list(set(edgeIds))
nEdges = len(edgeIds)
# Reverse mapping of edge ids since they aren't necessarily numbered 1..nEdges
if len(edgeIds):
edgeMap = [-1] * max(edgeIds)
else:
edgeMap = []
i=0
for edgeId in edgeIds:
edgeMap[edgeId-1] = i
i += 1
# =============================================================================
# Process nodes
# Reverse mapping of node ids since nodes aren't necessarily numbered 1..nNodes
nodeMap = [-1] * max(nodeIds)
i=0
for nodeId in nodeIds:
nodeMap[nodeId-1] = i
i += 1
# =============================================================================
self._mesh = mesh
self._nodeIds = nodeIds
self._edgeIds = edgeIds
self._faceIds = faceIds
self._nodeMap = nodeMap
self._edgeMap = edgeMap
self._faceMap = []
self._patches = patches
self._pointSubsets = pointSubsets
self._featureEdgeSubsets = featureEdgeSubsets
self._faceSubsets = {}
print 'Done'
def nNodes(self):
'''
Return the number of nodes
'''
return len(self._nodeIds)
def nEdges(self):
'''
Return the number of edges
'''
return len(self._edgeIds)
def nFacets(self):
'''
Return the number of triangular facets
'''
return len(self._faceIds)
def nPatches(self):
'''
Return the number of patches
'''
return len(self._patches)
def _writePatchDefs(self, f, typeName = 'wall'):
'''
Write the patch definitions to file as an OpenFOAM geometricSurfacePatchList.
NOTE: All patches are assumed to be walls.
'''
patches = self._patches
f.write('%d\n(\n' % len(patches))
for name in patches.iterkeys():
f.write('%s\t%s\n' % (name, typeName))
f.write(')\n')
def _writeNodes(self, f):
'''
Write the nodes to file as an OpenFOAM pointField.
'''
mesh = self._mesh
nodeIds = self._nodeIds
f.write('%d\n(\n' % len(nodeIds))
for x, y, z in [mesh.GetNodeXYZ(nodeId) for nodeId in nodeIds]:
f.write( '( %g %g %g )\n' % (x, y, z))
f.write(')\n')
def _writeFeatureEdges(self, f):
'''
Write the feature edges to file as an OpenFOAM edgeList.
'''
mesh = self._mesh
nodeMap = self._nodeMap
edgeIds = self._edgeIds
f.write('%d\n(\n' % len(edgeIds))
for edgeId in edgeIds:
nodes = mesh.GetElemNodes(edgeId)
f.write( '(' + ' '.join([str(nodeMap[nodeId-1]) for nodeId in nodes]) + ')\n')
f.write(')\n')
def _writeFacets(self, f):
'''
Write the facets to file as an OpenFOAM List of labelledTri.
'''
from itertools import chain
mesh = self._mesh
nodeMap = self._nodeMap
patches = self._patches
f.write('%d\n(\n' % sum([len(patch) for patch in patches.itervalues()]))
patchId = 0
for patchId, (patchName, faceIds) in enumerate(patches.iteritems()):
for faceId in faceIds:
nodes = mesh.GetElemNodes(faceId)
f.write( '((' + ' '.join([str(nodeMap[nodeId-1]) for nodeId in nodes]) + ') %d)\n' % patchId)
f.write(')\n')
def _writeSubsets(self, f, subsets, map, typeId):
'''
General function to write a subset to file as an OpenFOAM Map<meshSubset>.
'''
f.write('%d\n(\n' % len(subsets))
for name, ids in subsets.iteritems():
f.write('%s %s %d ( %s )\n' % (name, typeId, len(ids), ' '.join([str(map[id-1]) for id in ids])))
f.write(')\n')
def _writePointSubsets(self, f):
'''
Write the point subsets to file as and OpenFOAM Map<meshSubset>.
'''
self._writeSubsets(f, self._pointSubsets, self._nodeMap, '2')
def _writeFaceSubsets(self, f):
'''
Write the face subsets to file as and OpenFOAM Map<meshSubset>.
'''
self._writeSubsets(f, self._faceSubsets, self._faceMap, '4')
def _writeFeatureEdgeSubsets(self, f):
'''
Write the feature edge subsets to file as and OpenFOAM Map<meshSubset>.
'''
self._writeSubsets(f, self._featureEdgeSubsets, self._edgeMap, '8')
def writeEdgeMesh(self, fileName):
'''
Write to file as an OpenFOAM edgeMesh
fileName - The file name to write
'''
# Create file
f = open(fileName, 'wb') # NOTE: file opened as binary to ensure unix-style line breaks
# Write header
f.write(foamHeader('edgeMesh', self._mesh.GetName()))
self._writeNodes(f)
self._writeFeatureEdges(f)
f.close()
print 'edgeMesh written to %s' % fileName
def writeFtr(self, fileName):
'''
Write to file as an OpenFOAM cfMesh FTR file
fileName - the file name to write
'''
# Create file
f = open(fileName, 'wb') # NOTE: file opened as binary to ensure unix-style line breaks
self._writePatchDefs(f)
self._writeNodes(f)
self._writeFacets(f)
f.close()
print 'triSurf written to %s' % fileName
def writeFms(self, fileName):
'''
Write to file as an OpenFOAM cfMesh FMS file
fileName - the file name to write
'''
# Create file
f = open(fileName, 'wb') # NOTE: file opened as binary to ensure unix-style line breaks
self._writePatchDefs(f)
self._writeNodes(f)
self._writeFacets(f)
self._writeFeatureEdges(f)
self._writePointSubsets(f)
self._writeFaceSubsets(f)
self._writeFeatureEdgeSubsets(f)
f.close()
print 'triSurf written to %s' % fileName
if __name__ == '__main__':
import salome
salome.salome_init()
import SMESH, SALOMEDS

View file

@ -0,0 +1,3 @@
surfaceToFMS.C
EXE = $(FOAM_APPBIN)/surfaceToFMS

View file

@ -0,0 +1,9 @@
EXE_INC = \
-I$(FOAM_SRC)/mesh/cfMesh/meshLibrary/lnInclude \
-I$(LIB_SRC)/triSurface/lnInclude \
-I$(LIB_SRC)/meshTools/lnInclude
EXE_LIBS = \
-ltriSurface \
-lmeshLibrary \
-lmeshTools

View file

@ -0,0 +1,63 @@
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | cfMesh: A library for mesh generation
\\ / O peration |
\\ / A nd | Author: Franjo Juretic (franjo.juretic@c-fields.com)
\\/ M anipulation | Copyright (C) Creative Fields, Ltd.
-------------------------------------------------------------------------------
License
This file is part of cfMesh.
cfMesh is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation; either version 3 of the License, or (at your
option) any later version.
cfMesh is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with cfMesh. If not, see <http://www.gnu.org/licenses/>.
Description
Reads the specified surface and writes it in the fms format.
\*---------------------------------------------------------------------------*/
#include "argList.H"
#include "IFstream.H"
#include "fileName.H"
#include "triSurf.H"
#include "demandDrivenData.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
// Main program:
using namespace Foam;
int main(int argc, char *argv[])
{
argList::noParallel();
argList::validArgs.clear();
argList::validArgs.append("input surface file");
argList args(argc, argv);
const fileName inFileName(args.args()[1]);
if( inFileName.ext() == "fms" )
FatalError << "trying to convert a fms file to itself"
<< exit(FatalError);
fileName outFileName(inFileName.lessExt()+".fms");
const triSurf surface(inFileName);
surface.writeSurface(outFileName);
Info << "End\n" << endl;
return 0;
}
// ************************************************************************* //

View file

@ -1616,7 +1616,7 @@ int main(int argc, char *argv[])
// interior boundaries are handled via faceSets
// cell zones will only be written if there is more than one
if (writeZones && cellGroupZoneID.size()>1)
if (writeZones && cellGroupZoneID.size() > 1)
{
Info<< "Adding Zones" << endl;
List<pointZone*> pz(0);

View file

@ -38,7 +38,7 @@ FoamFile
// Tolerance used in matching faces. Absolute tolerance is span of
// face times this factor. To load incorrectly matches meshes set this
// to a higher value.
matchTolerance 1E-3;
matchTolerance 1e-3;
// Do a synchronisation of coupled points after creation of any patches.
// Note: this does not work with points that are on multiple coupled patches

View file

@ -69,6 +69,7 @@ int main(int argc, char *argv[])
mesh.update();
# include "checkVolContinuity.H"
# include "meshCourantNo.H"
if
(

View file

@ -62,10 +62,27 @@ int main(int argc, char *argv[])
mesh.update();
# include "checkVolContinuity.H"
# include "meshCourantNo.H"
if (runTime.timeIndex() % checkFrequency == 0)
{
mesh.checkMesh(true);
volScalarField magMeshCo
(
"magMeshCo",
fvc::surfaceSum
(
mag
(
mesh.phi()*
mesh.surfaceInterpolation::deltaCoeffs()/
mesh.magSf()
)
)
);
magMeshCo.write();
}
runTime.write();

View file

@ -78,7 +78,7 @@ void printEdgeStats(const primitiveMesh& mesh)
const edgeList& edges = mesh.edges();
forAll(edges, edgeI)
forAll (edges, edgeI)
{
const edge& e = edges[edgeI];
@ -88,19 +88,19 @@ void printEdgeStats(const primitiveMesh& mesh)
eVec /= eMag;
if (mag(eVec & x) > 1-edgeTol)
if (mag(eVec & x) > 1 - edgeTol)
{
minX = min(minX, eMag);
maxX = max(maxX, eMag);
nX++;
}
else if (mag(eVec & y) > 1-edgeTol)
else if (mag(eVec & y) > 1 - edgeTol)
{
minY = min(minY, eMag);
maxY = max(maxY, eMag);
nY++;
}
else if (mag(eVec & z) > 1-edgeTol)
else if (mag(eVec & z) > 1 - edgeTol)
{
minZ = min(minZ, eMag);
maxZ = max(maxZ, eMag);
@ -131,15 +131,15 @@ label axis(const vector& normal)
{
label axisIndex = -1;
if (mag(normal & point(1, 0, 0)) > (1-edgeTol))
if (mag(normal & point(1, 0, 0)) > (1 - edgeTol))
{
axisIndex = 0;
}
else if (mag(normal & point(0, 1, 0)) > (1-edgeTol))
else if (mag(normal & point(0, 1, 0)) > (1 - edgeTol))
{
axisIndex = 1;
}
else if (mag(normal & point(0, 0, 1)) > (1-edgeTol))
else if (mag(normal & point(0, 0, 1)) > (1 - edgeTol))
{
axisIndex = 2;
}
@ -194,13 +194,13 @@ label twoDNess(const polyMesh& mesh)
plane cellPlane(ctrs[0], ctrs[1], ctrs[otherCellI]);
forAll(ctrs, cellI)
forAll (ctrs, cellI)
{
const labelList& cEdges = mesh.cellEdges()[cellI];
scalar minLen = GREAT;
forAll(cEdges, i)
forAll (cEdges, i)
{
minLen = min(minLen, mesh.edges()[cEdges[i]].mag(mesh.points()));
}
@ -230,15 +230,15 @@ label twoDNess(const polyMesh& mesh)
// Mark boundary points
boolList boundaryPoint(mesh.allPoints().size(), false);
forAll(patches, patchI)
forAll (patches, patchI)
{
const polyPatch& patch = patches[patchI];
forAll(patch, patchFaceI)
forAll (patch, patchFaceI)
{
const face& f = patch[patchFaceI];
forAll(f, fp)
forAll (f, fp)
{
boundaryPoint[f[fp]] = true;
}
@ -248,7 +248,7 @@ label twoDNess(const polyMesh& mesh)
const edgeList& edges = mesh.edges();
forAll(edges, edgeI)
forAll (edges, edgeI)
{
const edge& e = edges[edgeI];
@ -263,7 +263,7 @@ label twoDNess(const polyMesh& mesh)
// 3. For all non-wedge patches: all faces either perp or aligned with
// cell-plane normal. (wedge patches already checked upon construction)
forAll(patches, patchI)
forAll (patches, patchI)
{
const polyPatch& patch = patches[patchI];
@ -349,7 +349,9 @@ int main(int argc, char *argv[])
// Select all cells
refCells.setSize(mesh.nCells());
forAll(mesh.cells(), cellI)
const cellList& c = mesh.cells();
forAll (c, cellI)
{
refCells[cellI] = cellI;
}
@ -440,11 +442,11 @@ int main(int argc, char *argv[])
// Create cellSet with added cells for easy inspection
cellSet newCells(mesh, "refinedCells", refCells.size());
forAll(oldToNew, oldCellI)
forAll (oldToNew, oldCellI)
{
const labelList& added = oldToNew[oldCellI];
forAll(added, i)
forAll (added, i)
{
newCells.insert(added[i]);
}
@ -482,14 +484,13 @@ int main(int argc, char *argv[])
+ " to cells in mesh at "
+ oldTimeName;
forAll(oldToNew, oldCellI)
forAll (oldToNew, oldCellI)
{
const labelList& added = oldToNew[oldCellI];
if (added.size())
{
forAll(added, i)
forAll (added, i)
{
newToOld[added[i]] = oldCellI;
}
@ -506,7 +507,6 @@ int main(int argc, char *argv[])
newToOld.write();
// Some statistics.
printEdgeStats(mesh);

View file

@ -48,6 +48,8 @@ int main(int argc, char *argv[])
wordList currDebug(debug::debugSwitches().toc());
wordList currInfo(debug::infoSwitches().toc());
wordList currOpt(debug::optimisationSwitches().toc());
wordList currTol(debug::tolerances().toc());
wordList currConst(debug::constants().toc());
if (args.optionFound("old") || args.optionFound("new"))
{
@ -68,6 +70,16 @@ int main(int argc, char *argv[])
controlDict.subDict("OptimisationSwitches").toc()
);
wordHashSet oldTol
(
controlDict.subDict("Tolerances").toc()
);
wordHashSet oldConst
(
controlDict.subDict("DimensionedConstants").toc()
);
wordHashSet hashset;
wordList listing;
@ -100,6 +112,18 @@ int main(int argc, char *argv[])
listing = hashset.toc();
sort(listing);
Info<< "old OptimisationSwitches: " << listing << endl;
hashset = wordHashSet(oldTol);
hashset -= wordHashSet(currTol);
listing = hashset.toc();
sort(listing);
Info<< "old Tolerances: " << listing << endl;
hashset = wordHashSet(oldConst);
hashset -= wordHashSet(currConst);
listing = hashset.toc();
sort(listing);
Info<< "old DimensionedConstants: " << listing << endl;
}
// list new switches
@ -125,25 +149,91 @@ int main(int argc, char *argv[])
listing = hashset.toc();
sort(listing);
Info<< "new OptimisationSwitches: " << listing << endl;
hashset = wordHashSet(currTol);
hashset -= wordHashSet(oldTol);
listing = hashset.toc();
sort(listing);
Info<< "new Tolerances: " << listing << endl;
hashset = wordHashSet(currConst);
hashset -= wordHashSet(oldConst);
listing = hashset.toc();
sort(listing);
Info<< "new DimensionedConstants: " << listing << endl;
}
}
else
{
IOobject::writeDivider(Info);
//IOobject::writeDivider(Info);
sort(currDebug);
Info<< "DebugSwitches: " << currDebug << endl;
Info << endl << "DebugSwitches: " << endl;
forAll(currDebug, dI)
{
Info << " "
<< currDebug[dI]
<< " : "
<< debug::debugSwitchFromDict(currDebug[dI].c_str(), 0)
<< endl;
}
sort(currInfo);
Info<< "InfoSwitches: " << currInfo << endl;
Info << endl << "InfoSwitches: " << endl;
forAll(currInfo, iI)
{
Info << " "
<< currInfo[iI]
<< " : "
<< debug::infoSwitchFromDict(currInfo[iI].c_str(), 0)
<< endl;
}
sort(currOpt);
Info<< "OptimisationSwitches: " << currOpt << endl;
Info << endl << "OptimisationSwitches: " << endl;
forAll(currOpt, oI)
{
if (currOpt[oI] == "commsType")
{
token commsTypeValue;
debug::optimisationSwitches().lookup("commsType", false, false).read(commsTypeValue);
Info << " " << "commsType : " << commsTypeValue << endl;
}
else
{
Info << " "
<< currOpt[oI]
<< " : "
<< debug::optimisationSwitchFromDict(currOpt[oI].c_str(), 0)
<< endl;
}
}
sort(currTol);
Info << endl << "Tolerances: " << endl;
forAll(currTol, tI)
{
Info << " "
<< currTol[tI]
<< " : "
<< debug::tolerancesFromDict(currTol[tI].c_str(), 0)
<< endl;
}
sort(currConst);
Info << endl << "Dimensioned Constants: " << endl;
forAll(currConst, tI)
{
Info << " "
<< currConst[tI]
<< " : "
<< debug::constantsFromDict(currConst[tI].c_str(), 0)
<< endl;
}
}
Info<< "done" << endl;
Info << endl << "Done." << endl;
return 0;
}

View file

@ -36,7 +36,6 @@ Description
int main(int argc, char *argv[])
{
# include "addTimeOptions.H"
# include "setRootCase.H"
@ -53,7 +52,7 @@ int main(int argc, char *argv[])
# include "createMesh.H"
# include "readMechanicalProperties.H"
for (label i=startTime; i<endTime; i++)
for (label i = startTime; i < endTime; i++)
{
runTime.setTime(Times[i], i);
@ -79,8 +78,8 @@ int main(int argc, char *argv[])
volTensorField gradU = fvc::grad(U);
volSymmTensorField sigma =
rho*(2.0*mu*symm(gradU) + lambda*I*tr(gradU));
volSymmTensorField sigma =
rho*(2.0*mu*symm(gradU) + lambda*I*tr(gradU));
volScalarField sigmaEq
(

View file

@ -36,6 +36,7 @@ Description
#include "basicPsiThermo.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
void Foam::calc(const argList& args, const Time& runTime, const fvMesh& mesh)
{
bool writeResults = !args.optionFound("noWrite");
@ -134,8 +135,6 @@ void Foam::calc(const argList& args, const Time& runTime, const fvMesh& mesh)
{
Info<< " Missing U or T" << endl;
}
Info<< "\nEnd\n" << endl;
}

View file

@ -1,101 +0,0 @@
#! /usr/bin/python
# debugmode=True
debugmode=False
from os import listdir,path,system
from popen2 import popen4
import sys
def svnCommand(cmd):
if debugmode:
print "SVN:",cmd
else:
system("svn "+cmd)
def rmEmpty(d):
if not path.isdir(d):
return False
else:
isEmpty=True
for f in listdir(d):
if f==".svn":
isEmpty=False
elif not rmEmpty(path.join(d,f)):
isEmpty=False
if isEmpty:
print "Removing ",d,"because it is empty"
if not debugmode:
system("rmdir "+d)
return isEmpty
start=sys.argv[1]
rmEmpty(start)
rein,raus=popen4("svn status "+start)
lines=rein.readlines()
rein.close()
raus.close()
modified=0
added=0
removed=0
conflicting=0
replaced=0
for l in lines:
status=l[0]
pstatus=l[1]
name=l[7:-1]
if status=="?":
print "Adding",name
svnCommand("add "+name)
elif status=="!":
print "Removing",name
svnCommand("delete "+name)
elif status=="M":
modified+=1
elif status=="A":
added+=1
elif status=="D":
removed+=1
elif status=="C":
conflicting+=1
elif status=="R":
replaced+=1
elif status=="~":
print "Problem with",name
print
print "Modified files:",modified
print "Added files:",added
print "Removed files:",removed
print "Conflicting files:",conflicting
print "Replaced files:",replaced
print
def checkEmptyDirs(current):
nrOfContents=0
for f in listdir(current):
if f==".svn":
continue
pfad=path.join(current,f)
if path.isdir(pfad):
if checkEmptyDirs(pfad):
nrOfContents+=1
else:
nrOfContents+=1
if nrOfContents==0:
print "Removing",current
svnCommand("remove "+current)
return False
else:
return True
checkEmptyDirs(start)

View file

@ -1,46 +0,0 @@
#! /usr/bin/python
import sys,re
from os import path
from subprocess import Popen,PIPE,call
import tarfile
if len(sys.argv)!=2:
print "Error: SVN-Url is needed"
sys.exit(-1)
url=sys.argv[1]
name=path.basename(url[:-1])
p=Popen(["svn","info",url],stdin=PIPE, stdout=PIPE, close_fds=True)
(child_stdout, child_stdin) = (p.stdout, p.stdin)
revision=-1
for l in child_stdout.readlines():
m=re.compile("Last Changed Rev: (.+)").match(l)
if m!=None:
revision=int(m.group(1))
if revision<0:
print "Invalid URL or stuff"
sys.exit(-1)
fullname="%s.r%d" % (name,revision)
l
print "Generating",fullname
retcode=call(["svn","export",url,fullname])
if retcode!=0:
print "Problem. Returncode",retcode
sys.exit(-1)
print "Tarring ...."
tar=tarfile.open(fullname+".tgz","w:gz")
tar.add(fullname,arcname=name)
tar.close()
print "Removing directory"
retcode=call(["rm","-rf",fullname])
print "Finished"

View file

@ -1,94 +0,0 @@
#!/usr/bin/python
# this script adds a set of SVN-properties to files and directories under
# a directory that is specified on the command line
from popen2 import popen2
import sys
import string
import glob
from os import path,listdir
svnCommand="svn"
isSVK=False
def runSvn(cmd):
raus,rein=popen2(svnCommand+" "+cmd)
result=raus.readlines()
rein.close()
raus.close()
return result
def getProperty(fName,property):
raw=runSvn("propget %s %s" % (property,fName))
return string.join(raw)
def setProperty(fName,property,value):
runSvn("propset %s \"%s\" %s" % (property,value,fName))
def addToListProperty(fName,property,value):
tmp=getProperty(fName,property)
lst=map(string.strip,string.split(tmp))
if not value in lst:
lst.append(value)
else:
return False
val=string.join(lst,"\n")
setProperty(fName,property,val)
return True
def addKeyword(fName,keyword):
return addToListProperty(fName,"svn:keywords",keyword)
def addIgnore(fName,keyword):
return addToListProperty(fName,"svn:ignore",keyword)
def recursivlyDoToFiles(directory,fileFilter,function,isDir=False,testSvn=True):
if testSvn and not isSVK:
if not path.exists(path.join(directory,".svn")):
return
for f in glob.glob(path.join(directory,fileFilter)):
if not path.isfile(f) and not path.isdir(f):
continue
if (isDir and path.isfile(f)) or (not isDir and path.isdir(f)):
continue
if isDir and testSvn and not isSVK:
if not path.exists(path.join(f,".svn")):
continue
if function(f):
print "....",f
for f in listdir(directory):
if f not in [".svn","lnInclude"]:
tmp=path.join(directory,f)
if path.isdir(tmp):
recursivlyDoToFiles(tmp,fileFilter,function,isDir=isDir,testSvn=testSvn)
if not path.exists(path.join(sys.argv[1],".svn")):
svnCommand="svk"
isSVK=True
print "\nAdding Id-keyword to Python-files"
recursivlyDoToFiles(sys.argv[1],"*.py",lambda x:addKeyword(x,"Id"))
print "\nAdding Id-keyword to C++-files"
recursivlyDoToFiles(sys.argv[1],"*.C",lambda x:addKeyword(x,"Id"))
print "\nAdding Id-keyword to C++-headers"
recursivlyDoToFiles(sys.argv[1],"*.H",lambda x:addKeyword(x,"Id"))
print "\nAdding *Opt to ignore-list for Make-directories"
recursivlyDoToFiles(sys.argv[1],"Make",lambda x:addIgnore(x,"*Opt"),isDir=True)
print "\nAdding *Debug to ignore-list for Make-directories"
recursivlyDoToFiles(sys.argv[1],"Make",lambda x:addIgnore(x,"*Debug"),isDir=True)
print "\nAdding lnInclude to ignore-list for all directories"
recursivlyDoToFiles(sys.argv[1],"*",lambda x:addIgnore(x,"lnInclude"),isDir=True)
print "\nAdding *.dep to ignore-list for all directories"
recursivlyDoToFiles(sys.argv[1],"*",lambda x:addIgnore(x,"*.dep"),isDir=True)

Binary file not shown.

View file

@ -25,8 +25,8 @@
# aliases.csh
#
# Description
# Aliases for working with OpenFOAM
# Sourced from OpenFOAM-??/etc/cshrc and/or ~/.cshrc
# Aliases for working with FOAM
# Sourced from FOAM-??/etc/cshrc and/or ~/.cshrc
#
#------------------------------------------------------------------------------

View file

@ -25,8 +25,8 @@
# aliases.sh
#
# Description
# Aliases for working with OpenFOAM
# Sourced from OpenFOAM-??/etc/bashrc and/or ~/.bashrc
# Aliases for working with FOAM
# Sourced from FOAM-??/etc/bashrc and/or ~/.bashrc
#
#------------------------------------------------------------------------------

View file

@ -26,7 +26,7 @@
#
# Description
# Setup file for Ensight 8.?
# Sourced from OpenFOAM-?.?/etc/bashrc
# Sourced from FOAM-?.?/etc/bashrc
#
#------------------------------------------------------------------------------

View file

@ -4,7 +4,7 @@
# \\ / O peration |
# \\ / A nd | For copyright notice see file Copyright
# \\/ M anipulation |
#-------------------------------------------------------------------------------
#------------------------------------------------------------------------------
# License
# This file is part of foam-extend.
#
@ -26,7 +26,7 @@
#
# Description
# Startup File for Ensight 8.?.
# Sourced from OpenFOAM-?.?/etc/cshrc
# Sourced from FOAM-?.?/etc/cshrc
#
#------------------------------------------------------------------------------

View file

@ -26,7 +26,7 @@
#
# Description
# Setup file for paraview-3.x
# Sourced from OpenFOAM-*/etc/bashrc
# Sourced from FOAM-*/etc/bashrc
#
# Note
# The env. variable 'ParaView_DIR' is required for building plugins

View file

@ -26,7 +26,7 @@
#
# Description
# Setup file for paraview-3.x
# Sourced from OpenFOAM-*/etc/cshrc
# Sourced from FOAM-*/etc/cshrc
#
# Note
# The env. variable 'ParaView_DIR' is required for building plugins

View file

@ -26,7 +26,7 @@
# etc/bashrc
#
# Description
# Startup file for OpenFOAM
# Startup file for FOAM
# Sourced from ~/.profile or ~/.bashrc
# Should be usable by any POSIX-compliant shell (eg, ksh)
#
@ -43,7 +43,7 @@ export FOAM_DEV=1
# USER EDITABLE PART
#
# either set $FOAM_INST_DIR before sourcing this file or set
# $foamInstall below to where OpenFOAM is installed
# $foamInstall below to where FOAM is installed
#
# Location of FOAM installation
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@ -379,7 +379,7 @@ SunOS)
cat <<USAGE
Your "$WM_ARCH" operating system is not supported by this release
of OpenFOAM. For further assistance, please contact www.extend-project.de
of foam-extend. For further assistance, please contact www.foam-extend.org
USAGE
;;

Some files were not shown because too many files have changed in this diff Show more