Commit c5d533fc authored by Chris Cantwell's avatar Chris Cantwell
Browse files

Merge branch 'master' into feature/cell-model-translate

parents 7f6555df 2db0ae69
......@@ -34,6 +34,12 @@ v4.4.0
- Enabled MUMPS support in PETSc if a Fortran compiler was found and added 3D
support to the Helmholtz smoother used e.g. in FieldConverts C0Projection
module (!714)
- Fix bug in `Vmath::FillWhiteNoise` which caused `ForcingNoise` to have
a repeated pattern (!718)
- Fix bug in the calculation of the RHS magnitude in CG solver (!721)
- Fix bug in CMake Homebrew and MacPorts detection for OS X (!729)
- Fix bug in FieldUtils when using half mode expansions (!734)
- Fix bug in CMake PETSc detection for Ubuntu 16.04/Debian 9 (!735)
**ADRSolver:**
- Add a projection equation system for C^0 projections (!675)
......@@ -50,6 +56,7 @@ v4.4.0
- Fix linearised advection for full 3D cases (!708)
- Added a weak pressure formulation following Guermond & Shen (!713)
- Added a convective like outflow boundary condition from Dong (!713)
- Added the ability to specifiy Womersley boundary conditions for pulsatile flow (!472)
**CardiacEPSolver:**
- Added a Python translator utility to generate cell models from CellML (!723)
......@@ -58,6 +65,7 @@ v4.4.0
- Allow equi-spaced output for 1D and 2DH1D fields (!613)
- Update quality metric to include scaled Jacobian output (!695)
- Allow multiple XML files to be specified in InterpField module (!705)
- Fix issues with isocontour module (!719)
**NekMesh:**
- Modify curve module to allow for spline input (!628)
......@@ -79,10 +87,22 @@ v4.4.0
- Bug fix to get two meshgen regression tests working (!700)
- Remove libANN in deference to boost::geometry (!703)
- Refactor library to use NekMesh modules for CAD generation (!704)
- Add `varopti` process module to optimise meshes (!711)
- Add a mesh extract option to the linearise module to visualise the result
(!712)
- 2D to 3D mesh extrusion module (!715)
- Add new two-dimensional mesher from NACA code or step file (!720)
- Fix inverted boundary layer in 2D (!736)
- More sensible element sizing with boundary layers in 2D (!736)
- Change variable names in mcf file to make more sense (!736)
- Fix issues in varopti module so that in can be compiled without meshgen on
(!736)
- Replace LAPACK Eigenvalue calculation with handwritten function in
varopti (!738)
- Improved node-colouring algorithm for better load-balancing
in varopti (!738)
- Simplified calculation of the energy functional in varopti for improved
performance (!738)
**FieldConvert:**
- Move all modules to a new library, FieldUtils, to support post-processing
......@@ -91,6 +111,9 @@ v4.4.0
- Add module to add composite ID of elements as a field (!674)
- Add reader for Nek5000 field files (!680)
**Tester:**
- Fix output not displayed on segfault or system error (!745)
v4.3.5
------
**Library:**
......@@ -119,6 +142,12 @@ v4.3.4
**IncNavierStokesSolver:**
- Fix 2nd order time-integration for VCSMapping (!687)
v4.3.4
------
**Library:**
- Fix performance issue with `v_ExtractDataToCoeffs` for post-processing of large
simulations (!672)
v4.3.3
------
**Library**:
......
......@@ -70,13 +70,20 @@ IF (APPLE)
SET(CMAKE_LIBRARY_PATH ${MACPORTS_PREFIX}/lib ${CMAKE_LIBRARY_PATH})
SET(CMAKE_INCLUDE_PATH ${MACPORTS_PREFIX}/include ${CMAKE_INCLUDE_PATH})
MESSAGE(STATUS "Detected MacPorts installation: ${MACPORTS_PREFIX}")
ELSE()
UNSET(MACPORTS_PREFIX)
ENDIF()
IF (${DETECT_HOMEBREW} EQUAL 0)
SET(CMAKE_LIBRARY_PATH ${HOMEBREW_PREFIX}/lib ${CMAKE_LIBRARY_PATH})
SET(CMAKE_INCLUDE_PATH ${HOMEBREW_PREFIX}/include ${CMAKE_INCLUDE_PATH})
MESSAGE(STATUS "Detected Homebrew installation: ${HOMEBREW_PREFIX}")
ELSE()
UNSET(HOMEBREW_PREFIX)
ENDIF()
UNSET(DETECT_HOMEBREW)
UNSET(DETECT_MACPORTS)
ENDIF()
# Attempt to retrieve git branch and SHA1 hash of current changeset.
......@@ -132,6 +139,8 @@ OPTION(NEKTAR_BUILD_PACKAGES "Build Nektar++ binary packages" OFF)
MARK_AS_ADVANCED(NEKTAR_BUILD_PACKAGES)
OPTION(NEKTAR_TEST_ALL "Include full set of regression tests to this build." OFF)
OPTION(NEKTAR_TEST_USE_HOSTFILE "Use a hostfile to explicitly specify number of
slots." OFF)
# Meshing utilities and library
IF (NOT WIN32)
......
......@@ -24,16 +24,16 @@ if(NOT DEFINED OCE_DIR)
# Check for OSX needs to come first because UNIX evaluates to true on OSX
if(${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
if(DEFINED MACPORTS_PREFIX)
find_package(OCE 0.17 QUIET HINTS ${MACPORTS_PREFIX}/Library/Frameworks)
find_package(OCE 0.15 QUIET HINTS ${MACPORTS_PREFIX}/Library/Frameworks)
elseif(DEFINED HOMEBREW_PREFIX)
find_package(OCE 0.17 QUIET HINTS ${HOMEBREW_PREFIX}/Cellar/oce/*)
find_package(OCE 0.15 QUIET HINTS ${HOMEBREW_PREFIX}/Cellar/oce/*)
endif()
elseif(UNIX)
set(OCE_DIR "/usr/local/share/cmake/")
endif()
endif()
find_package(OCE 0.17 QUIET)
find_package(OCE 0.15 QUIET)
if(OCE_FOUND)
message(STATUS "-- OpenCASCADE Community Edition has been found.")
set(OCC_INCLUDE_DIR ${OCE_INCLUDE_DIRS})
......@@ -113,8 +113,8 @@ if(OCC_FOUND)
TKHLR
TKFeat
)
if(OCC_VERSION_STRING VERSION_LESS 6.8)
if(OCC_VERSION_STRING VERSION_LESS 6.7)
MESSAGE(SEND_ERROR "OCC version too low")
endif(OCC_VERSION_STRING VERSION_LESS 6.8)
endif(OCC_VERSION_STRING VERSION_LESS 6.7)
message(STATUS "-- Found OCE/OpenCASCADE with OCC version: ${OCC_VERSION_STRING}")
endif(OCC_FOUND)
......@@ -225,6 +225,13 @@ show :
else ()
set (PETSC_LIBRARY_VEC "NOTFOUND" CACHE INTERNAL "Cleared" FORCE) # There is no libpetscvec
petsc_find_library (SINGLE petsc)
# Debian 9/Ubuntu 16.04 uses _real and _complex extensions when using libraries in /usr/lib/petsc.
if (NOT PETSC_LIBRARY_SINGLE)
petsc_find_library (SINGLE petsc_real)
endif()
if (NOT PETSC_LIBRARY_SINGLE)
petsc_find_library (SINGLE petsc_complex)
endif()
foreach (pkg SYS VEC MAT DM KSP SNES TS ALL)
set (PETSC_LIBRARIES_${pkg} "${PETSC_LIBRARY_SINGLE}")
endforeach ()
......
......@@ -83,7 +83,7 @@
pages = {293-301},
year = {1996},
}
@article{CoRaNa98,
author = {M. Courtemanche\, R. J. Ramirez and S. Nattel},
title = {Ionic mechanisms underlying human atrial action potential properties: insights from a mathematical model},
......@@ -103,7 +103,7 @@
pages = {1501-1526},
year = {1991},
}
@article{TuPa06,
author = {K. H. W. J. ten Tusscher and A. V. Panfilov},
title = {Alternans and spiral breakup in a human ventricular tissue model},
......@@ -123,7 +123,7 @@
pages = {4331-51},
year = {2011},
}
@article{ShKa96,
title={Tetrahedral< i> hp</i> Finite Elements: Algorithms and Flow Simulations},
author={Sherwin, SJ and Karniadakis, G Em},
......@@ -378,9 +378,9 @@ year={2011}
}
@article{GuSh03,
Author="J.L. Guermond and J. Shen",
title="Velocity-correction projection methods for incompressible flows",
journal="SIAM J. Numer.\ Anal.",
Author="J.L. Guermond and J. Shen",
title="Velocity-correction projection methods for incompressible flows",
journal="SIAM J. Numer.\ Anal.",
volume=41,
pages = "112--134",
year=2003
......@@ -460,3 +460,18 @@ year={2011}
pages = {1079-1097},
}
@inproceedings{TuPeMo16,
abstract = {The generation of sufficiently high quality unstructured high-order meshes remains a significant obstacle in the adoption of high-order methods. However, there is little consensus on which approach is the most robust, fastest and produces the 'best' meshes. We aim to provide a route to investigate this question, by examining popular high-order mesh generation methods in the context of an efficient variational framework for the generation of curvilinear meshes. By considering previous works in a variational form, we are able to compare their characteristics and study their robustness. Alongside a description of the theory and practical implementation details, including an efficient multi-threading parallelisation strategy, we demonstrate the effectiveness of the framework, showing how it can be used for both mesh quality optimisation and untangling of invalid meshes.},
author = {Turner, M and Peir{\'{o}}, J and Moxey, D},
booktitle = {25th International Meshing Roundtable},
doi = {10.1016/j.proeng.2016.11.069},
file = {:Users/mike/Downloads/1-s2.0-S1877705816333781-main.pdf:pdf},
issn = {18777058},
keywords = {energy functional,high-order mesh generation,numerical optimization,variational mesh generation},
pages = {340--352},
title = {{A Variational Framework for High-Order Mesh Generation}},
url = {www.elsevier.com/locate/procedia%5Cnhttp://linkinghub.elsevier.com/retrieve/pii/S1877705816333781},
volume = {163},
year = {2016}
}
......@@ -872,6 +872,83 @@ the session file:
\item \inltt{SVVDiffCoeff}: sets the SVV diffusion coefficient (default value = 0.1)
\end{itemize}
\subsection{Womersley Boundary Condition}
It is possible to define the time-dependent Womersley velocity profile
for pulsatile flow in a pipe. The modulation of the velocity profile
is based on the desired peak or centerline velocity which can be
represented by a Fourier expansion $U_{max}=A(\omega_n)e^{i\omega_n
t}$ where $A$ are the Fourier modes and $\omega $ the frequency. The
womersely solution is then defined as:
$$ w(r,t) = A_0(1-(r/R)^2) + \sum_{n=1}^N
\tilde{A_n}[1-\frac{J_0(i^{3/2}\alpha_n r/R)}{J_0(i^{3/2}
\alpha)}]e^{i\omega_n t} $$
where the womersley number $\alpha$ is defined:
$$ \alpha_n = R\sqrt{\frac{2\pi n}{T\nu}}$$
and $\tilde{A_n}$ ($n=1:N$)are the Fourier coefficients scaled in the
following way:
$$ \tilde{A_n} = 2A_n/[1 - \frac{1}{J_0(i^{3/2}\alpha)}] $$
The Womersley velocity profile is implemented in the following way:
\begin{lstlisting}[style=XMLStyle]
<REGION REF="0">
<D VAR="u" USERDEFINEDTYPE="Womersley:WomParams.xml" VALUE="0" />
<D VAR="v" USERDEFINEDTYPE="Womersley:WomParams.xml" VALUE="0" />
<D VAR="w" USERDEFINEDTYPE="Womersley:WomParams.xml" VALUE="0" />
<N VAR="p" USERDEFINEDTYPE="H" VALUE="0" />
</REGION>
\end{lstlisting}
A file containing the Fourier coefficients, $\tilde{A}$, must be in
the directory where the solver is called from. The name of the file is
defined by the string given in the attribute \inltt{USERDEFINEDTYPE}
after the ``:'' and contains the real and imaginary coefficients. This
file has the format
\begin{lstlisting}[style=XMLStyle]
<NEKTAR>
<WOMERSLEYBC>
<WOMPARAMS>
<W PROPERTY="Radius" VALUE="0.5" />
<W PROPERTY="Period" VALUE="1.0" />
<W PROPERTY="axisnormal" VALUE="0.0,0.0,1.0" />
<W PROPERTY="axispoint" VALUE="0.0,0.0,0.0" />
</WOMPARAMS>
<FOURIERCOEFFS>
<F ID="0"> 0.600393641193, 0.0 </F>
<F ID="1"> -0.277707172935, 0.0767582715413 </F>
<F ID="2"> -0.0229953131146, 0.0760936232478 </F>
<F ID="3"> 0.00858135174058, 0.017089888642 </F>
<F ID="4"> 0.0140332527651, 0.0171575122496 </F>
<F ID="5"> 0.0156970122129, -0.00547357750345 </F>
<F ID="6"> 0.00473626554238, -0.00498786519876 </F>
<F ID="7"> 0.00204434981523, -0.00614566561937 </F>
<F ID="8"> -0.000274697215201, 0.000153571881197 </F>
<F ID="9"> -0.000148037910774, 2.68919619581e-05 </F>
</FOURIERCOEFFS>
</WOMERSLEYBC>
</NEKTAR>
\end{lstlisting}
Each value of $\tilde{A}$ is provided in the \inltt{FOURIERCOEFFS}
section and provided as separate entries containing the real and
imaginary components, i.e. the mean component provided above is
$0.600393641193,0.0$.
Similarly in the \inltt{WOMPARAMS} section the key parameters of the boundary condition are also provided as:
\begin{itemize}
\item \inltt{RADIUS} is the radius of the boundary.
\item \inltt{PERIOD} is the cycle time period,
\item \inltt{AXISNORMAL} defines the normal direction to the boundary,
\item \inltt{AXISPOINT} defines a coordinate in the boundary centre,
\end{itemize}
\subsection{Forcing}
\subsubsection{MovingBody}
......
......@@ -69,7 +69,7 @@ format by issuing the command
\begin{lstlisting}[style=BashInputStyle]
FieldConvert in.fld out.fld:fld:format=Hdf5
\end{lstlisting}
%
%
\section{Range option \textit{-r}}
The Fieldconvert range option \inltt{-r} allows the user to specify
a sub-range of the mesh (computational domain) by using an
......@@ -121,7 +121,7 @@ possibly also Reynolds stresses) into single file;
\item \inltt{extract}: Extract a boundary field;
\item \inltt{homplane}: Extract a plane from 3DH1D expansions;
\item \inltt{homstretch}: Stretch a 3DH1D expansion by an integer factor;
\item \inltt{innerproduct}: take the inner product between one or a series of fields with another field (or series of fields).
\item \inltt{innerproduct}: take the inner product between one or a series of fields with another field (or series of fields).
\item \inltt{interpfield}: Interpolates one field to another, requires fromxml, fromfld to be defined;
\item \inltt{interppointdatatofld}: Interpolates given discrete data using a finite difference approximation to a fld file given an xml file;
\item \inltt{interppoints}: Interpolates a set of points to another, requires fromfld and fromxml to be defined, a line or plane of points can be defined;
......@@ -130,7 +130,7 @@ possibly also Reynolds stresses) into single file;
\item \inltt{qualitymetric}: Evaluate a quality metric of the underlying mesh to show mesh quality;
\item \inltt{meanmode}: Extract mean mode (plane zero) of 3DH1D expansions;
\item \inltt{pointdatatofld}: Given discrete data at quadrature points
project them onto an expansion basis and output fld file;
project them onto an expansion basis and output fld file;
\item \inltt{printfldnorms}: Print L2 and LInf norms to stdout;
\item \inltt{scalargrad}: Computes scalar gradient field;
\item \inltt{scaleinputfld}: Rescale input field by a constant factor;
......@@ -217,7 +217,7 @@ new field. To use this we simply run
In this case, we have produced a Tecplot file which contains the mesh and a
variable that contains the composite ID. To assist in boundary identification,
the input file \inlsh{mesh.xml} should be a surface XML file that can be
obtained through the \mc \inltt{extract} module (see section
obtained through the \nm \inltt{extract} module (see section
\ref{s:utilities:nekmesh:extract}).
\subsection{Sum two .fld files: \textit{addFld} module}
......@@ -234,8 +234,8 @@ which multiply the values of a given .fld file by a constant \inltt{value}.
is the associated session file, \inltt{file2.fld} is the .fld file which
is summed to \inltt{file1.fld} and finally \inltt{file3.fld} is the output
which contain the sum of the two .fld files.
\inltt{file3.fld} can be processed in a similar way as described
in section \ref{s:utilities:fieldconvert:sub:convert} to visualise
\inltt{file3.fld} can be processed in a similar way as described
in section \ref{s:utilities:fieldconvert:sub:convert} to visualise
the result either in Tecplot, Paraview or VisIt.
%
%
......@@ -249,8 +249,8 @@ use the \inltt{combineAvg} module of FieldConvert
file3.fld
\end{lstlisting}
%
\inltt{file3.fld} can be processed in a similar way as described
in section \ref{s:utilities:fieldconvert:sub:convert} to visualise
\inltt{file3.fld} can be processed in a similar way as described
in section \ref{s:utilities:fieldconvert:sub:convert} to visualise
the result either in Tecplot, Paraview or VisIt.
%
%
......@@ -325,8 +325,8 @@ of interest. Finally to process the surface file one can use
FieldConvert test-b0.xml test-b0.fld test-b0.dat
\end{lstlisting}
%
This will obviously generate a Tecplot output if a .dat file
is specified as last argument. A .vtu extension will produce
This will obviously generate a Tecplot output if a .dat file
is specified as last argument. A .vtu extension will produce
a Paraview or VisIt output.
%
%
......@@ -348,25 +348,25 @@ to visualise the result either in Tecplot, Paraview or VisIt.
To obtain a 2D expansion containing one of the planes of a
3DH1D field file, use the command:
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m homplane:planeid=value file.xml file.fld file-plane.fld
\end{lstlisting}
If the option \inltt{wavespace} is used, the Fourier coefficients
corresponding to \inltt{planeid} are obtained. The command in this case is:
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m homplane:wavespace:planeid=value file.xml \
file.fld file-plane.fld
\end{lstlisting}
The output file \inltt{file-plane.fld} can be processed in a similar
The output file \inltt{file-plane.fld} can be processed in a similar
way as described in section \ref{s:utilities:fieldconvert:sub:convert}
to visualise it either in Tecplot or in Paraview.
\subsection{Stretch a 3DH1D expansion: \textit{homstretch} module}
To stretch a 3DH1D expansion in the z-direction, use the command:
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m homstretch:factor=value file.xml file.fld file-stretch.fld
\end{lstlisting}
The number of modes in the resulting field can be chosen using the command-line
......@@ -374,7 +374,7 @@ parameter \inltt{output-points-hom-z}. Note that the output for
this module should always be a \inltt{.fld} file and this should not
be used in combination with other modules using a single command.
The output file \inltt{file-stretch.fld} can be processed in a similar
The output file \inltt{file-stretch.fld} can be processed in a similar
way as described in section \ref{s:utilities:fieldconvert:sub:convert}
to visualise it either in Tecplot or in Paraview.
......@@ -392,7 +392,7 @@ determine the inner product of these fields. The input option
\inltt{fromfld} must therefore be specified in this module.
Optional arguments for this module are \inltt{fields} which allow you to specify
the fields that you wish to use for the inner product, i.e.
the fields that you wish to use for the inner product, i.e.
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m innerproduct:fromfld=file1.fld:fields=''0,1,2'' file2.xml \
file2.fld out.stdout
......@@ -412,7 +412,7 @@ will take the inner product between a file names
field1\_0.fld, field1\_1.fld, field1\_2.fld and field1\_3.fld with
respect to field2.fld.
Analogously including the options \inltt{allfromflds}, i.e.
Analogously including the options \inltt{allfromflds}, i.e.
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m innerproduct:fromfld=file1.fld:multifldids=''0-3'':\
allfromflds file2.xml file2.fld out.stdout
......@@ -424,7 +424,7 @@ the unique inner products are evaluated so if four from fields are
given only the related trianuglar number $4\times5/2=10$ of inner
products are evaluated.
This option can be run in parallel.
This option can be run in parallel.
%
%
......@@ -544,7 +544,7 @@ $(x0,y0)$ to $(x1,y1)$ which can also be used in 3D by specifying $(x0,y0,z0)$
to $(x1,y1,z1)$.
An extraction of a plane of points can also be specified by
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m interppoints:fromxml=file1.xml:fromfld=file1.fld:\
plane=npts1,npts2,x0,y0,z0,x1,y1,z1,x2,y2,z2,x3,y3,z3
\end{lstlisting}
......@@ -553,13 +553,13 @@ direction and $(x0,y0,z0)$, $(x1,y1,z1)$, $(x2,y2,z2)$ and $(x3,y3,z3)$
define the plane of points specified in a clockwise or anticlockwise direction.
In addition an extraction of a box of points can also be specified by
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m interppoints:fromxml=file1.xml:fromfld=file1.fld:\
box=npts1,npts2,npts3,xmin,xmax,ymin,ymax,zmin,zmax
\end{lstlisting}
where \inltt{npts1,npts2,npts3} is the number of equispaced points in each
direction and $(xmin,ymin,zmin)$ and $(xmax,ymax,zmax3)$
define the limits of the box of points.
where \inltt{npts1,npts2,npts3} is the number of equispaced points in each
direction and $(xmin,ymin,zmin)$ and $(xmax,ymax,zmax3)$
define the limits of the box of points.
For the plane and box interpolation there is an additional optional
argument \inltt{cp=p0,q} which adds to the interpolated fields the value of
......@@ -568,7 +568,7 @@ pressure and $q$ is the free stream dynamics pressure. If the input
does not contain a field ``p'' or a velocity field ``u,v,w'' then $cp$
and $cp0$ are not evaluated accordingly
%
\begin{notebox}
\begin{notebox}
This module runs in parallel for the plane and box extraction of points. In this case a series of .dat files are generated that can be concatinated together. Other options do not run in parallel.
\end{notebox}
%
......@@ -611,7 +611,7 @@ have these as separate options.
In addition to the \inltt{smooth} or \inltt{globalcondense} options
you can specify \inltt{removesmallcontour}=100 which will remove
separate isocontours of less than 100 triangles.
separate isocontours of less than 100 triangles.
\begin{notebox}
Currently this option is only set up for triangles, quadrilaterals,
......@@ -633,7 +633,7 @@ keep.
The output file \inltt{jacenergy.fld} can be processed in a similar
way as described in section \ref{s:utilities:fieldconvert:sub:convert}
to visualise the result either in Tecplot, Paraview or VisIt.
to visualise the result either in Tecplot, Paraview or VisIt.
\subsection{Calculate mesh quality: \textit{qualitymetric} module}
......@@ -675,11 +675,11 @@ Two quality metrics are implemented that produce scalar fields $Q$:
To obtain a 2D expansion containing the mean mode (plane zero in Fourier space) of a
3DH1D field file, use the command:
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m meanmode file.xml file.fld file-mean.fld
\end{lstlisting}
The output file \inltt{file-mean.fld} can be processed in a similar
The output file \inltt{file-mean.fld} can be processed in a similar
way as described in section \ref{s:utilities:fieldconvert:sub:convert}
to visualise the result either in Tecplot or in Paraview or VisIt.
%
......@@ -697,7 +697,7 @@ FieldConvert --noequispaced -m pointdatatofld file.pts file.xml file.fld
This command will read in the points provided in the \inltt{file.pts}
and assume these are given at the same quadrature distribution as the
mesh and expansions defined in \inltt{file.xml} and output the field
to \inltt{file.fld}. If the points do not match an error will be dumped.
to \inltt{file.fld}. If the points do not match an error will be dumped.
The file \inltt{file.pts} which is assumed to be given by an interpolation from another source is of the form:
......@@ -720,7 +720,7 @@ point, the first, second, and third columns contains the
$x,y,z$-coordinate and subsequent columns contain the field values, in
this case the $p$-value So in the general case of $n$-dimensional
data, the $n$ coordinates are specified in the first $n$ columns
accordingly followed by the field data.
accordingly followed by the field data.
The default argument is to use the equipapced (but potentially
collapsed) coordinates which can be obtained from the command.
......@@ -755,7 +755,7 @@ this option.
\subsection{Print L2 and LInf norms: \textit{printfldnorms} module}
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m printfldnorms test.xml test.fld out.stdout
\end{lstlisting}
......@@ -920,7 +920,7 @@ replacing \inltt{<nprocs>} with the number of processors. For the
\inltt{.dat} and \inltt{.plt} outputs the current version will proudce
a single output file. However it is also sometimes useful to produce
multiple output files, one for each partition, and this
can be done by using the \inltt{writemultiplefiles} option, i.e.
can be done by using the \inltt{writemultiplefiles} option, i.e.
\begin{lstlisting}[style=BashInputStyle]
mpirun -np <nprocs> FieldConvert test.xml test.fld \
test.dat:dat:writemultiplefiles
......@@ -962,7 +962,7 @@ FieldConvert --nprocs 10 --procid 2 \
This call will only therefore consider the interpolation process across one
partition (namely, partition 2). To create the full interpolated field requires
a loop over each of the partitions, which, in a bash shell can be run as
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
for n in `seq 0 9`; do
FieldConvert --nprocs 10 --procid $n \
-m interpfield:fromxml=file1.xml:fromfld=file1.fld \
......@@ -975,7 +975,7 @@ of the different parallel partitions in files with names \inltt{P0000000.fld},
parallel field file. However, the \inltt{Info.xml} file, which contains the
information about which elements lie in each partition, is missing. This can be
generated by using the command
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert --nprocs 10 file2.xml file2.fld/Info.xml:info
\end{lstlisting}
Note the final \inltt{:info} extension on the last argument is necessary to tell
......@@ -988,7 +988,7 @@ input/output XML files.
Another approach to serially proessing a large file is to initially process the
file into multiple partitions. This can be done with the \inltt{--part-only}
option. So the command
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert --part-only 10 file.xml file.fld
\end{lstlisting}
will partition the mesh into 10 paritions and write each partition into a
......@@ -998,7 +998,7 @@ partitioned XML files \inltt{P0000000.xml}, \inltt{P0000001.xml}, \dots,
There is also a \inltt{--part-only-overlapping} option, which can be run in the
same fashion.
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert --part-only-overlapping 10 file.xml file.fld
\end{lstlisting}
In this mode, the mesh is partitioned into 10 partitions in a similar manner,
......
This diff is collapsed.
......@@ -495,7 +495,7 @@ where the step time is used as variable. For example, the function
</FUNCTION>
\end{lstlisting}
For .pts files, the time consuming computation of interpolation weights in only
For .pts files, the time consuming computation of interpolation weights is only
performed for the first timestep. The weights are stored and reused in all subsequent steps,
which is why all consecutive .pts files must use the same ordering, number and location of
data points.
......@@ -674,4 +674,4 @@ will be the y-axis and the z-axis.
%%% Local Variables:
%%% mode: latex
%%% TeX-master: "../user-guide"
%%% End:
\ No newline at end of file
%%% End:
......@@ -190,7 +190,10 @@ state of the solution fields at at given timestep. This can subsequently be used
for restarting the simulation or examining time-dependent behaviour. This
produces a sequence of files, by default named \inltt{session\_*.chk}, where
\inltt{*} is replaced by a counter. The initial condition is written to
\inltt{session\_0.chk}.
\inltt{session\_0.chk}. Existing files are not overwritten, but renamed to e.g.
\inltt{session\_0.bak0.chk}. In case this file already exists, too, the \inltt{chk}-file
is renamed to \inltt{session\_0.bak*.chk} and so on.
\begin{notebox}
This functionality is equivalent to setting the \inltt{IO\_CheckSteps}
......
#SET(GraphSources
#SET(GraphSources
# GraphExample.cpp)
SET(MemoryManagerSources
SET(MemoryManagerSources
MemoryManager.cpp)
SET(PartitionAnalyseSources
PartitionAnalyse.cpp)
SET(FoundationSources
FoundationDemo.cpp)
FoundationDemo.cpp)
SET(NodalDemoSources NodalDemo.cpp)
SET(TimeIntegrationDemoSources
......
......@@ -151,10 +151,14 @@ int main(int argc, char *argv[])
{
util = new NodalUtilPrism(order, r, s, t);
}
else if(shape == eQuadrilateral)
{
util = new NodalUtilQuad(order, r, s);
}
ASSERTL1(util, "Unknown shape type!");
const int nPoints = r.num_elements();
const int dim = shape == eTriangle ? 2 : 3;
const int dim = (shape == eTriangle || shape == eQuadrilateral) ? 2 : 3;
if (vm.count("integral"))
{
......@@ -175,6 +179,9 @@ int main(int argc, char *argv[])
exact = -0.5 * (sin(1.0) + cos(1.0) + M_E * M_E *