Commit a5b56e47 authored by Dave Moxey's avatar Dave Moxey
Browse files

Merge remote-tracking branch 'origin/master' into fix/petsc-deb9-ub1604

parents 8cdb0ecd a8fbc4da
......@@ -31,6 +31,10 @@ v4.4.0
file (!678)
- Extend ExtractDataToCoeffs to support interpolation between basis types for
quads and hexahedra (!682)
- Enabled MUMPS support in PETSc if a Fortran compiler was found and added 3D
support to the Helmholtz smoother used e.g. in FieldConverts C0Projection
module (!714)
- Fix bug in CMake Homebrew and MacPorts detection for OS X (!729)
**ADRSolver:**
- Add a projection equation system for C^0 projections (!675)
......@@ -44,11 +48,15 @@ v4.4.0
**IncNavierStokesSolver:**
- Add ability to simulate additional scalar fields (!624)
- Improve performance when using homogeneous dealiasing (!622)
- Fix linearised advection for full 3D cases (!708)
- Added a weak pressure formulation following Guermond & Shen (!713)
- Added a convective like outflow boundary condition from Dong (!713)
**FieldConvert:**
- Allow equi-spaced output for 1D and 2DH1D fields (!613)
- Update quality metric to include scaled Jacobian output (!695)
- Allow multiple XML files to be specified in InterpField module (!705)
- Fix issues with isocontour module (!719)
**NekMesh:**
- Modify curve module to allow for spline input (!628)
......@@ -69,6 +77,12 @@ v4.4.0
- Add flag to `insertsurface` process for non-conforming geometries (!700)
- Bug fix to get two meshgen regression tests working (!700)
- Remove libANN in deference to boost::geometry (!703)
- Refactor library to use NekMesh modules for CAD generation (!704)
- Add `varopti` process module to optimise meshes (!711)
- Add a mesh extract option to the linearise module to visualise the result
(!712)
- 2D to 3D mesh extrusion module (!715)
- Add new two-dimensional mesher from NACA code or step file (!720)
**FieldConvert:**
- Move all modules to a new library, FieldUtils, to support post-processing
......
CMAKE_MINIMUM_REQUIRED(VERSION 2.8.7)
CMAKE_MINIMUM_REQUIRED(VERSION 2.8.8)
SET(CMAKE_BUILD_TYPE Release CACHE STRING "Choose the type of build,
options are: None(CMAKE_CXX_FLAGS or CMAKE_C_FLAGS used) Debug Release
RelWithDebInfo MinSizeRel.")
PROJECT(Nektar++)
PROJECT(Nektar++ C CXX)
INCLUDE(CheckLanguage)
CHECK_LANGUAGE(Fortran)
IF(CMAKE_Fortran_COMPILER)
ENABLE_LANGUAGE(Fortran)
ELSE()
MESSAGE(STATUS "No Fortran support")
ENDIF()
# Helps organize projects in IDEs.
SET_PROPERTY(GLOBAL PROPERTY USE_FOLDERS ON)
......@@ -60,13 +70,20 @@ IF (APPLE)
SET(CMAKE_LIBRARY_PATH ${MACPORTS_PREFIX}/lib ${CMAKE_LIBRARY_PATH})
SET(CMAKE_INCLUDE_PATH ${MACPORTS_PREFIX}/include ${CMAKE_INCLUDE_PATH})
MESSAGE(STATUS "Detected MacPorts installation: ${MACPORTS_PREFIX}")
ELSE()
UNSET(MACPORTS_PREFIX)
ENDIF()
IF (${DETECT_HOMEBREW} EQUAL 0)
SET(CMAKE_LIBRARY_PATH ${HOMEBREW_PREFIX}/lib ${CMAKE_LIBRARY_PATH})
SET(CMAKE_INCLUDE_PATH ${HOMEBREW_PREFIX}/include ${CMAKE_INCLUDE_PATH})
MESSAGE(STATUS "Detected Homebrew installation: ${HOMEBREW_PREFIX}")
ELSE()
UNSET(HOMEBREW_PREFIX)
ENDIF()
UNSET(DETECT_HOMEBREW)
UNSET(DETECT_MACPORTS)
ENDIF()
# Attempt to retrieve git branch and SHA1 hash of current changeset.
......@@ -286,10 +303,6 @@ ELSE( NEKTAR_USE_MEMORY_POOLS )
REMOVE_DEFINITIONS(-DNEKTAR_MEMORY_POOL_ENABLED)
ENDIF( NEKTAR_USE_MEMORY_POOLS )
IF (NEKTAR_USE_MESHGEN)
ADD_DEFINITIONS(-DNEKTAR_USE_MESHGEN)
ENDIF()
SET(Boost_USE_STATIC_LIBS OFF)
IF( WIN32 )
# The auto-linking feature has problems with USE_STATIC_LIBS off, so we use
......
......@@ -24,16 +24,16 @@ if(NOT DEFINED OCE_DIR)
# Check for OSX needs to come first because UNIX evaluates to true on OSX
if(${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
if(DEFINED MACPORTS_PREFIX)
find_package(OCE 0.17 QUIET HINTS ${MACPORTS_PREFIX}/Library/Frameworks)
find_package(OCE 0.15 QUIET HINTS ${MACPORTS_PREFIX}/Library/Frameworks)
elseif(DEFINED HOMEBREW_PREFIX)
find_package(OCE 0.17 QUIET HINTS ${HOMEBREW_PREFIX}/Cellar/oce/*)
find_package(OCE 0.15 QUIET HINTS ${HOMEBREW_PREFIX}/Cellar/oce/*)
endif()
elseif(UNIX)
set(OCE_DIR "/usr/local/share/cmake/")
endif()
endif()
find_package(OCE 0.17 QUIET)
find_package(OCE 0.15 QUIET)
if(OCE_FOUND)
message(STATUS "-- OpenCASCADE Community Edition has been found.")
set(OCC_INCLUDE_DIR ${OCE_INCLUDE_DIRS})
......@@ -113,8 +113,8 @@ if(OCC_FOUND)
TKHLR
TKFeat
)
if(OCC_VERSION_STRING VERSION_LESS 6.8)
if(OCC_VERSION_STRING VERSION_LESS 6.7)
MESSAGE(SEND_ERROR "OCC version too low")
endif(OCC_VERSION_STRING VERSION_LESS 6.8)
endif(OCC_VERSION_STRING VERSION_LESS 6.7)
message(STATUS "-- Found OCE/OpenCASCADE with OCC version: ${OCC_VERSION_STRING}")
endif(OCC_FOUND)
......@@ -30,16 +30,30 @@ IF (NEKTAR_USE_PETSC)
SET(PETSC_C_COMPILER "${CMAKE_C_COMPILER}")
SET(PETSC_CXX_COMPILER "${CMAKE_CXX_COMPILER}")
SET(PETSC_Fortran_COMPILER "${CMAKE_Fortran_COMPILER}")
IF (NEKTAR_USE_MPI)
IF (NOT MPI_BUILTIN)
SET(PETSC_C_COMPILER "${MPI_C_COMPILER}")
SET(PETSC_CXX_COMPILER "${MPI_CXX_COMPILER}")
SET(PETSC_Fortran_COMPILER "${MPI_Fortran_COMPILER}")
ENDIF (NOT MPI_BUILTIN)
ELSE (NEKTAR_USE_MPI)
SET(PETSC_NO_MPI "--with-mpi=0")
ENDIF (NEKTAR_USE_MPI)
IF(CMAKE_Fortran_COMPILER)
IF(NEKTAR_USE_MPI AND NOT MPI_Fortran_COMPILER)
MESSAGE(ERROR "MPI_Fortran_COMPILER not set")
ENDIF()
# we use a MUMPS build in ordering here, in the future it might make
# sense to hook it up with metis/scotch since this MIGHT be faster
SET(PETSC_MUMPS --download-scalapack --download-mumps)
ELSE()
MESSAGE(WARNING "No Fortran support. Building PETSc without MUMPS support")
SET(PETSC_Fortran_COMPILER "0")
ENDIF()
EXTERNALPROJECT_ADD(
petsc-3.7.2
PREFIX ${TPSRC}
......@@ -52,17 +66,21 @@ IF (NEKTAR_USE_PETSC)
URL http://www.nektar.info/thirdparty/petsc-lite-3.7.2.tar.gz
URL_MD5 "26c2ff8eaaa9e49aea063f839f5daa7e"
CONFIGURE_COMMAND
OMPI_FC=${CMAKE_Fortran_COMPILER}
OMPI_CC=${CMAKE_C_COMPILER}
OMPI_CXX=${CMAKE_CXX_COMPILER}
${PYTHON_EXECUTABLE} ./configure
./configure
--with-fc=${PETSC_Fortran_COMPILER}
--with-cc=${PETSC_C_COMPILER}
--with-cxx=${PETSC_CXX_COMPILER}
--with-shared-libraries=1
--with-pic=1
--with-x=0
--with-ssl=0
--prefix=${TPDIST}
--with-petsc-arch=c-opt
--with-fc=0
${PETSC_MUMPS}
${PETSC_NO_MPI}
BUILD_COMMAND MAKEFLAGS= make)
......
......@@ -83,7 +83,7 @@
pages = {293-301},
year = {1996},
}
@article{CoRaNa98,
author = {M. Courtemanche\, R. J. Ramirez and S. Nattel},
title = {Ionic mechanisms underlying human atrial action potential properties: insights from a mathematical model},
......@@ -103,7 +103,7 @@
pages = {1501-1526},
year = {1991},
}
@article{TuPa06,
author = {K. H. W. J. ten Tusscher and A. V. Panfilov},
title = {Alternans and spiral breakup in a human ventricular tissue model},
......@@ -123,7 +123,7 @@
pages = {4331-51},
year = {2011},
}
@article{ShKa96,
title={Tetrahedral< i> hp</i> Finite Elements: Algorithms and Flow Simulations},
author={Sherwin, SJ and Karniadakis, G Em},
......@@ -345,6 +345,16 @@ year={2011}
year={2014}
}
@article{Dong15,
title={A convective-like energy-stable open boundary condition for simulation of incompressible flows},
author={S. Dong},
journal={Journal of Computational Physics},
volume={302},
pages={300-328},
year={2015}
}
@article{Ko07,
title = {Vectorized Matlab codes for linear two-dimensional elasticity},
author = {Koko, Jonas},
......@@ -368,9 +378,9 @@ year={2011}
}
@article{GuSh03,
Author="J.L. Guermond and J. Shen",
title="Velocity-correction projection methods for incompressible flows",
journal="SIAM J. Numer.\ Anal.",
Author="J.L. Guermond and J. Shen",
title="Velocity-correction projection methods for incompressible flows",
journal="SIAM J. Numer.\ Anal.",
volume=41,
pages = "112--134",
year=2003
......@@ -441,3 +451,27 @@ year={2011}
publisher = {Springer London}
}
@Article{BaPlGrSh16,
author = {Bao, Y. and Palacios, R. and Graham, M. and Sherwin, S.J. },
title = {Generalized “thick” strip modelling for vortex-induced vibration of long flexible cylinders},
journal = {J. Comp. Phys},
year = {2016},
volume = {321},
pages = {1079-1097},
}
@inproceedings{TuPeMo16,
abstract = {The generation of sufficiently high quality unstructured high-order meshes remains a significant obstacle in the adoption of high-order methods. However, there is little consensus on which approach is the most robust, fastest and produces the 'best' meshes. We aim to provide a route to investigate this question, by examining popular high-order mesh generation methods in the context of an efficient variational framework for the generation of curvilinear meshes. By considering previous works in a variational form, we are able to compare their characteristics and study their robustness. Alongside a description of the theory and practical implementation details, including an efficient multi-threading parallelisation strategy, we demonstrate the effectiveness of the framework, showing how it can be used for both mesh quality optimisation and untangling of invalid meshes.},
author = {Turner, M and Peir{\'{o}}, J and Moxey, D},
booktitle = {25th International Meshing Roundtable},
doi = {10.1016/j.proeng.2016.11.069},
file = {:Users/mike/Downloads/1-s2.0-S1877705816333781-main.pdf:pdf},
issn = {18777058},
keywords = {energy functional,high-order mesh generation,numerical optimization,variational mesh generation},
pages = {340--352},
title = {{A Variational Framework for High-Order Mesh Generation}},
url = {www.elsevier.com/locate/procedia%5Cnhttp://linkinghub.elsevier.com/retrieve/pii/S1877705816333781},
volume = {163},
year = {2016}
}
......@@ -69,7 +69,7 @@ format by issuing the command
\begin{lstlisting}[style=BashInputStyle]
FieldConvert in.fld out.fld:fld:format=Hdf5
\end{lstlisting}
%
%
\section{Range option \textit{-r}}
The Fieldconvert range option \inltt{-r} allows the user to specify
a sub-range of the mesh (computational domain) by using an
......@@ -121,7 +121,7 @@ possibly also Reynolds stresses) into single file;
\item \inltt{extract}: Extract a boundary field;
\item \inltt{homplane}: Extract a plane from 3DH1D expansions;
\item \inltt{homstretch}: Stretch a 3DH1D expansion by an integer factor;
\item \inltt{innerproduct}: take the inner product between one or a series of fields with another field (or series of fields).
\item \inltt{innerproduct}: take the inner product between one or a series of fields with another field (or series of fields).
\item \inltt{interpfield}: Interpolates one field to another, requires fromxml, fromfld to be defined;
\item \inltt{interppointdatatofld}: Interpolates given discrete data using a finite difference approximation to a fld file given an xml file;
\item \inltt{interppoints}: Interpolates a set of points to another, requires fromfld and fromxml to be defined, a line or plane of points can be defined;
......@@ -130,7 +130,7 @@ possibly also Reynolds stresses) into single file;
\item \inltt{qualitymetric}: Evaluate a quality metric of the underlying mesh to show mesh quality;
\item \inltt{meanmode}: Extract mean mode (plane zero) of 3DH1D expansions;
\item \inltt{pointdatatofld}: Given discrete data at quadrature points
project them onto an expansion basis and output fld file;
project them onto an expansion basis and output fld file;
\item \inltt{printfldnorms}: Print L2 and LInf norms to stdout;
\item \inltt{scalargrad}: Computes scalar gradient field;
\item \inltt{scaleinputfld}: Rescale input field by a constant factor;
......@@ -217,7 +217,7 @@ new field. To use this we simply run
In this case, we have produced a Tecplot file which contains the mesh and a
variable that contains the composite ID. To assist in boundary identification,
the input file \inlsh{mesh.xml} should be a surface XML file that can be
obtained through the \mc \inltt{extract} module (see section
obtained through the \nm \inltt{extract} module (see section
\ref{s:utilities:nekmesh:extract}).
\subsection{Sum two .fld files: \textit{addFld} module}
......@@ -234,8 +234,8 @@ which multiply the values of a given .fld file by a constant \inltt{value}.
is the associated session file, \inltt{file2.fld} is the .fld file which
is summed to \inltt{file1.fld} and finally \inltt{file3.fld} is the output
which contain the sum of the two .fld files.
\inltt{file3.fld} can be processed in a similar way as described
in section \ref{s:utilities:fieldconvert:sub:convert} to visualise
\inltt{file3.fld} can be processed in a similar way as described
in section \ref{s:utilities:fieldconvert:sub:convert} to visualise
the result either in Tecplot, Paraview or VisIt.
%
%
......@@ -249,8 +249,8 @@ use the \inltt{combineAvg} module of FieldConvert
file3.fld
\end{lstlisting}
%
\inltt{file3.fld} can be processed in a similar way as described
in section \ref{s:utilities:fieldconvert:sub:convert} to visualise
\inltt{file3.fld} can be processed in a similar way as described
in section \ref{s:utilities:fieldconvert:sub:convert} to visualise
the result either in Tecplot, Paraview or VisIt.
%
%
......@@ -325,8 +325,8 @@ of interest. Finally to process the surface file one can use
FieldConvert test-b0.xml test-b0.fld test-b0.dat
\end{lstlisting}
%
This will obviously generate a Tecplot output if a .dat file
is specified as last argument. A .vtu extension will produce
This will obviously generate a Tecplot output if a .dat file
is specified as last argument. A .vtu extension will produce
a Paraview or VisIt output.
%
%
......@@ -348,25 +348,25 @@ to visualise the result either in Tecplot, Paraview or VisIt.
To obtain a 2D expansion containing one of the planes of a
3DH1D field file, use the command:
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m homplane:planeid=value file.xml file.fld file-plane.fld
\end{lstlisting}
If the option \inltt{wavespace} is used, the Fourier coefficients
corresponding to \inltt{planeid} are obtained. The command in this case is:
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m homplane:wavespace:planeid=value file.xml \
file.fld file-plane.fld
\end{lstlisting}
The output file \inltt{file-plane.fld} can be processed in a similar
The output file \inltt{file-plane.fld} can be processed in a similar
way as described in section \ref{s:utilities:fieldconvert:sub:convert}
to visualise it either in Tecplot or in Paraview.
\subsection{Stretch a 3DH1D expansion: \textit{homstretch} module}
To stretch a 3DH1D expansion in the z-direction, use the command:
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m homstretch:factor=value file.xml file.fld file-stretch.fld
\end{lstlisting}
The number of modes in the resulting field can be chosen using the command-line
......@@ -374,7 +374,7 @@ parameter \inltt{output-points-hom-z}. Note that the output for
this module should always be a \inltt{.fld} file and this should not
be used in combination with other modules using a single command.
The output file \inltt{file-stretch.fld} can be processed in a similar
The output file \inltt{file-stretch.fld} can be processed in a similar
way as described in section \ref{s:utilities:fieldconvert:sub:convert}
to visualise it either in Tecplot or in Paraview.
......@@ -392,7 +392,7 @@ determine the inner product of these fields. The input option
\inltt{fromfld} must therefore be specified in this module.
Optional arguments for this module are \inltt{fields} which allow you to specify
the fields that you wish to use for the inner product, i.e.
the fields that you wish to use for the inner product, i.e.
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m innerproduct:fromfld=file1.fld:fields=''0,1,2'' file2.xml \
file2.fld out.stdout
......@@ -412,7 +412,7 @@ will take the inner product between a file names
field1\_0.fld, field1\_1.fld, field1\_2.fld and field1\_3.fld with
respect to field2.fld.
Analogously including the options \inltt{allfromflds}, i.e.
Analogously including the options \inltt{allfromflds}, i.e.
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m innerproduct:fromfld=file1.fld:multifldids=''0-3'':\
allfromflds file2.xml file2.fld out.stdout
......@@ -424,7 +424,7 @@ the unique inner products are evaluated so if four from fields are
given only the related trianuglar number $4\times5/2=10$ of inner
products are evaluated.
This option can be run in parallel.
This option can be run in parallel.
%
%
......@@ -544,7 +544,7 @@ $(x0,y0)$ to $(x1,y1)$ which can also be used in 3D by specifying $(x0,y0,z0)$
to $(x1,y1,z1)$.
An extraction of a plane of points can also be specified by
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m interppoints:fromxml=file1.xml:fromfld=file1.fld:\
plane=npts1,npts2,x0,y0,z0,x1,y1,z1,x2,y2,z2,x3,y3,z3
\end{lstlisting}
......@@ -553,13 +553,13 @@ direction and $(x0,y0,z0)$, $(x1,y1,z1)$, $(x2,y2,z2)$ and $(x3,y3,z3)$
define the plane of points specified in a clockwise or anticlockwise direction.
In addition an extraction of a box of points can also be specified by
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m interppoints:fromxml=file1.xml:fromfld=file1.fld:\
box=npts1,npts2,npts3,xmin,xmax,ymin,ymax,zmin,zmax
\end{lstlisting}
where \inltt{npts1,npts2,npts3} is the number of equispaced points in each
direction and $(xmin,ymin,zmin)$ and $(xmax,ymax,zmax3)$
define the limits of the box of points.
where \inltt{npts1,npts2,npts3} is the number of equispaced points in each
direction and $(xmin,ymin,zmin)$ and $(xmax,ymax,zmax3)$
define the limits of the box of points.
For the plane and box interpolation there is an additional optional
argument \inltt{cp=p0,q} which adds to the interpolated fields the value of
......@@ -568,7 +568,7 @@ pressure and $q$ is the free stream dynamics pressure. If the input
does not contain a field ``p'' or a velocity field ``u,v,w'' then $cp$
and $cp0$ are not evaluated accordingly
%
\begin{notebox}
\begin{notebox}
This module runs in parallel for the plane and box extraction of points. In this case a series of .dat files are generated that can be concatinated together. Other options do not run in parallel.
\end{notebox}
%
......@@ -611,7 +611,7 @@ have these as separate options.
In addition to the \inltt{smooth} or \inltt{globalcondense} options
you can specify \inltt{removesmallcontour}=100 which will remove
separate isocontours of less than 100 triangles.
separate isocontours of less than 100 triangles.
\begin{notebox}
Currently this option is only set up for triangles, quadrilaterals,
......@@ -633,7 +633,7 @@ keep.
The output file \inltt{jacenergy.fld} can be processed in a similar
way as described in section \ref{s:utilities:fieldconvert:sub:convert}
to visualise the result either in Tecplot, Paraview or VisIt.
to visualise the result either in Tecplot, Paraview or VisIt.
\subsection{Calculate mesh quality: \textit{qualitymetric} module}
......@@ -675,11 +675,11 @@ Two quality metrics are implemented that produce scalar fields $Q$:
To obtain a 2D expansion containing the mean mode (plane zero in Fourier space) of a
3DH1D field file, use the command:
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m meanmode file.xml file.fld file-mean.fld
\end{lstlisting}
The output file \inltt{file-mean.fld} can be processed in a similar
The output file \inltt{file-mean.fld} can be processed in a similar
way as described in section \ref{s:utilities:fieldconvert:sub:convert}
to visualise the result either in Tecplot or in Paraview or VisIt.
%
......@@ -697,7 +697,7 @@ FieldConvert --noequispaced -m pointdatatofld file.pts file.xml file.fld
This command will read in the points provided in the \inltt{file.pts}
and assume these are given at the same quadrature distribution as the
mesh and expansions defined in \inltt{file.xml} and output the field
to \inltt{file.fld}. If the points do not match an error will be dumped.
to \inltt{file.fld}. If the points do not match an error will be dumped.
The file \inltt{file.pts} which is assumed to be given by an interpolation from another source is of the form:
......@@ -720,7 +720,7 @@ point, the first, second, and third columns contains the
$x,y,z$-coordinate and subsequent columns contain the field values, in
this case the $p$-value So in the general case of $n$-dimensional
data, the $n$ coordinates are specified in the first $n$ columns
accordingly followed by the field data.
accordingly followed by the field data.
The default argument is to use the equipapced (but potentially
collapsed) coordinates which can be obtained from the command.
......@@ -755,7 +755,7 @@ this option.
\subsection{Print L2 and LInf norms: \textit{printfldnorms} module}
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m printfldnorms test.xml test.fld out.stdout
\end{lstlisting}
......@@ -920,7 +920,7 @@ replacing \inltt{<nprocs>} with the number of processors. For the
\inltt{.dat} and \inltt{.plt} outputs the current version will proudce
a single output file. However it is also sometimes useful to produce
multiple output files, one for each partition, and this
can be done by using the \inltt{writemultiplefiles} option, i.e.
can be done by using the \inltt{writemultiplefiles} option, i.e.
\begin{lstlisting}[style=BashInputStyle]
mpirun -np <nprocs> FieldConvert test.xml test.fld \
test.dat:dat:writemultiplefiles
......@@ -962,7 +962,7 @@ FieldConvert --nprocs 10 --procid 2 \
This call will only therefore consider the interpolation process across one
partition (namely, partition 2). To create the full interpolated field requires
a loop over each of the partitions, which, in a bash shell can be run as
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
for n in `seq 0 9`; do
FieldConvert --nprocs 10 --procid $n \
-m interpfield:fromxml=file1.xml:fromfld=file1.fld \
......@@ -975,7 +975,7 @@ of the different parallel partitions in files with names \inltt{P0000000.fld},
parallel field file. However, the \inltt{Info.xml} file, which contains the
information about which elements lie in each partition, is missing. This can be
generated by using the command
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert --nprocs 10 file2.xml file2.fld/Info.xml:info
\end{lstlisting}
Note the final \inltt{:info} extension on the last argument is necessary to tell
......@@ -988,7 +988,7 @@ input/output XML files.
Another approach to serially proessing a large file is to initially process the
file into multiple partitions. This can be done with the \inltt{--part-only}
option. So the command
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert --part-only 10 file.xml file.fld
\end{lstlisting}
will partition the mesh into 10 paritions and write each partition into a
......@@ -998,7 +998,7 @@ partitioned XML files \inltt{P0000000.xml}, \inltt{P0000001.xml}, \dots,
There is also a \inltt{--part-only-overlapping} option, which can be run in the
same fashion.
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert --part-only-overlapping 10 file.xml file.fld
\end{lstlisting}
In this mode, the mesh is partitioned into 10 partitions in a similar manner,
......
This diff is collapsed.
#SET(GraphSources
#SET(GraphSources
# GraphExample.cpp)
SET(MemoryManagerSources
SET(MemoryManagerSources
MemoryManager.cpp)
SET(PartitionAnalyseSources
PartitionAnalyse.cpp)
SET(FoundationSources
FoundationDemo.cpp)
FoundationDemo.cpp)
SET(NodalDemoSources NodalDemo.cpp)
SET(TimeIntegrationDemoSources
......
......@@ -151,10 +151,14 @@ int main(int argc, char *argv[])
{
util = new NodalUtilPrism(order, r, s, t);
}
else if(shape == eQuadrilateral)
{
util = new NodalUtilQuad(order, r, s);
}
ASSERTL1(util, "Unknown shape type!");
const int nPoints = r.num_elements();
const int dim = shape == eTriangle ? 2 : 3;
const int dim = (shape == eTriangle || shape == eQuadrilateral) ? 2 : 3;
if (vm.count("integral"))
{
......@@ -175,6 +179,9 @@ int main(int argc, char *argv[])
exact = -0.5 * (sin(1.0) + cos(1.0) + M_E * M_E *
(sin(1.0) - cos(1.0))) / M_E;
break;
case eQuadrilateral:
exact = 2.0 * (M_E - 1.0 / M_E) * sin(1.0);
break;
case eTetrahedron:
exact = 1.0 / M_E - 1.0 / M_E / M_E / M_E;
break;
......
......@@ -107,8 +107,9 @@ void InputDat::Process(po::variables_map &vm)
while (!datFile.eof())
{
getline(datFile, line);
if (line.find("VARIABLES") != string::npos)
string linetest = line;
boost::to_upper(linetest);
if (linetest.find("VARIABLES") != string::npos)
{
std::size_t pos = line.find('=');
pos++;
......@@ -138,10 +139,9 @@ void InputDat::Process(po::variables_map &vm)
while (!datFile.eof())
{
getline(datFile, line);
if ((line.find("ZONE") != string::npos) ||
(line.find("Zone") != string::npos) ||
(line.find("zone") != string::npos))
string linetest = line;
boost::to_upper(linetest);
if ((linetest.find("ZONE") != string::npos))
{
ReadTecplotFEBlockZone(datFile, line, pts, ptsConn);
}
......
......@@ -795,16 +795,20 @@ void OutputTecplot::WriteTecplotConnectivity(std::ofstream &outfile)
}
else
{
int cnt = 1;
for (int i = 0; i < m_conn.size(); ++i)
{
const int nConn = m_conn[i].num_elements();
for (int j = 0; j < nConn; ++j)
for (int j = 0; j < nConn; ++j,++cnt)
{
outfile << m_conn[i][j] + 1 << " ";
if (!(cnt % 1000))
{
outfile << std::endl;
}
}
outfile << endl;