Commit 905e496a authored by David Moxey's avatar David Moxey

Merge remote-tracking branch 'upstream/master' into feature/boost-std-cleanup

parents 7d9e5e48 0ae8ff65
......@@ -10,6 +10,9 @@ v5.0.0
- Fix ThridpartyCCM options (!802)
- Fix Windows CRLF tokens in GEO reader and improve comment handling (!805)
- Use chrono in Timer (!807)
- Fix caching of FUNCTION tags that read from file and provide the same
functionality in FUNCTIONs defined for forcings (!759)
- Added native support for csv files in addititon to pts (!760)
**NekMesh**:
- Add feature to read basic 2D geo files as CAD (!731)
......@@ -21,22 +24,16 @@ v5.0.0
- Fix issue with reading CCM files due to definition of default arrays
rather than a vector (!797)
- Fix inverted triangles and small memory issue in surface meshing (!798)
- Additional curve types in GEO reader: BSpline, Circle, Ellipse (!800)
**FieldConvert**:
- Add input module for Semtex field files (!777)
- Fixed interppoints module (!760)
- Move StreamFunction utility to a FieldConvert module (!809)
**Documentation**:
- Added the developer-guide repository as a submodule (!751)
v4.4.2
------
**NekMesh**:
- Fix uninitialised memory bug in Nek5000 input module (!801)
**Library**
- Fix ability to set default implementation in Collections and added an option
to set eNoCollections in FieldConvert as default (!789)
v4.4.1
------
**Library**
......@@ -54,6 +51,12 @@ v4.4.1
- Fix deadlock with HDF5 input (!786)
- Fix missing entriess in LibUtilities::kPointsTypeStr (!792)
- Fix compiler warnings with CommDataType (!793)
- Fix ability to set default implementation in Collections and added an option
to set eNoCollections in FieldConvert as default (!789)
- Fix performance issue in ProcessIsoContour in relation to memory consumption
(!821)
- Fix performance issue with ExtractPhysToBndElmt (!796)
- Fix available classes being listed multiple times (!817)
**FieldConvert:**
- Fix issue with field ordering in the interppointdatatofld module (!754)
......@@ -69,10 +72,14 @@ v4.4.1
- Fix issue with older rea input files (!765)
- Fix memory leak in variational optimiser, add small optimisations (!785)
- Check the dimensionality of the CAD system before running the 2D generator (!780)
- Fix uninitialised memory bug in Nek5000 input module (!801)
**IncNavierStokesSolver**
- Fix an initialisation issue when using an additional advective field (!779)
**Utilities**
- Fix vtkToFld missing dependency which prevented compiling with VTK 7.1 (!808)
**Packaging**
- Added missing package for FieldUtils library (!755)
......
......@@ -60,8 +60,9 @@ IF (NEKTAR_USE_FFTW)
# Test if FFTW path is a system path. Only add to include path if not an
# implicitly defined CXX include path (due to GCC 6.x now providing its own
# version of some C header files and -isystem reorders include paths).
GET_FILENAME_COMPONENT(X ${CMAKE_CXX_IMPLICIT_INCLUDE_DIRECTORIES} ABSOLUTE)
GET_FILENAME_COMPONENT(X "${CMAKE_CXX_IMPLICIT_INCLUDE_DIRECTORIES}" ABSOLUTE)
GET_FILENAME_COMPONENT(Y ${FFTW_INCLUDE_DIR} ABSOLUTE)
IF (NOT Y MATCHES ".*${X}.*")
INCLUDE_DIRECTORIES(SYSTEM ${FFTW_INCLUDE_DIR})
ENDIF()
......
......@@ -475,3 +475,34 @@ volume = {163},
year = {2016}
}
@article{rospjo16,
title={Eigensolution analysis of spectral/hp continuous Galerkin approximations to advection--diffusion problems: Insights into spectral vanishing viscosity},
author={Moura, RC and Sherwin, SJ and Peir{\'o}, Joaquim},
journal={Journal of Computational Physics},
volume={307},
pages={401--422},
year={2016},
publisher={Elsevier}
}
@article{yvsiouei93,
title={Legendre pseudospectral viscosity method for nonlinear conservation laws},
author={Maday, Yvon and Kaber, Sidi M Ould and Tadmor, Eitan},
journal={SIAM Journal on Numerical Analysis},
volume={30},
number={2},
pages={321--342},
year={1993},
publisher={SIAM}
}
@article{rosh06,
title={Stabilisation of spectral/hp element methods through spectral vanishing viscosity: Application to fluid mechanics modelling},
author={Kirby, Robert M and Sherwin, Spencer J},
journal={Computer methods in applied mechanics and engineering},
volume={195},
number={23},
pages={3128--3144},
year={2006},
publisher={Elsevier}
}
\ No newline at end of file
......@@ -221,7 +221,7 @@ to screen;
\item \inltt{TInf} farfield temperature (i.e. $T_{\infty}$). Default value = 288.15 $K$;
\item \inltt{Twall} temperature at the wall when isothermal boundary
conditions are employed (i.e. $T_{w}$). Default value = 300.15$K$;
\item \inltt{uint} farfield $X$-component of the velocity (i.e. $u_{\infty}$). Default value = 0.1 $m/s$;
\item \inltt{uInf} farfield $X$-component of the velocity (i.e. $u_{\infty}$). Default value = 0.1 $m/s$;
\item \inltt{vInf} farfield $Y$-component of the velocity (i.e. $v_{\infty}$). Default value = 0.0 $m/s$;
\item \inltt{wInf} farfield $Z$-component of the velocity (i.e. $w_{\infty}$). Default value = 0.0 $m/s$;
\item \inltt{mu} dynamic viscosity (i.e. $\mu_{\infty}$). Default value = 1.78e-05 $Pa s$;
......
......@@ -830,7 +830,6 @@ the advection term using the pressure inverse mass matrix. It can be used just i
<I PROPERTY="SmoothAdvection" VALUE="True"/>
\end{lstlisting}
\item \inltt{SpectralVanishingViscosity}: activates a stabilization technique
which increases the viscosity on the modes with the highest frequencies.
\begin{lstlisting}[style=XMLStyle]
......@@ -839,7 +838,34 @@ which increases the viscosity on the modes with the highest frequencies.
In a Quasi-3D simulation, this will affect both the Fourier and the spectral/hp expansions.
To activate them independently, use \inltt{SpectralVanishingViscositySpectralHP}
and \inltt{SpectralVanishingViscosityHomo1D}.
and \inltt{SpectralVanishingViscosityHomo1D}. \\
There are three spectral vanishing viscosity kernels available:
\begin{center}
\footnotesize
\begin{tabular}{lcc}
\toprule
{SVV Kernel} & {\texttt{SpectralVanishingViscosity}} \\
\midrule
Exponential Kernel & \texttt{True} \\
Power Kernel & \texttt{PowerKernel} \\
DG Kernel & \texttt{DGKernel} \\
\bottomrule
\end{tabular}
\end{center}
The Exponential kernel is based on the work of Maday et al. \cite{yvsiouei93},
its extension to 2D can be found in \cite{rosh06}. A diffusion coefficient can
be specified which defines the base magnitude of the viscosity; this parameter
is scaled by $h/p$. SVV viscosity is activated for expansion modes greater than
the product of the cut-off ratio and the expansion order. The Power kernel is a
smooth function with no cut-off frequency; it focusses on a narrower band of
higher expansion modes as the polynomial order increases. The cut-off ratio
parameter for the Power kernel corresponds to the power ratio, see Moura et al.
\cite{rospjo16}. The DG-Kernel is an attempt to match the dissipation of CG-SVV
to DG schemes of lower expansion orders. This kernel does not require any parameters
although the diffusion coefficient can still be modified.
\item \inltt{DEALIASING}: activates the 3/2 padding rule on the advection term
of a Quasi-3D simulation.
......@@ -856,7 +882,6 @@ stabilize the simulation. This method is based on the work of Kirby and Sherwin
\end{itemize}
\subsection{Parameters}
The following parameters can be specified in the \inltt{PARAMETERS} section of
the session file:
......@@ -869,7 +894,7 @@ the session file:
\item \inltt{MinSubSteps}: perform a minimum number of substeps in sub-stepping algorithm (default is 1)
\item \inltt{MaxSubSteps}: perform a maxmimum number of substeps in sub-stepping algorithm otherwise exit (default is 100)
\item \inltt{SVVCutoffRatio}: sets the ratio of Fourier frequency not affected by the SVV technique (default value = 0.75, i.e. the first 75\% of frequency are not damped)
\item \inltt{SVVDiffCoeff}: sets the SVV diffusion coefficient (default value = 0.1)
\item \inltt{SVVDiffCoeff}: sets the SVV diffusion coefficient (default value = 0.1 (Exponential and Power kernel), 1 (DG-Kernel))
\end{itemize}
\subsection{Womersley Boundary Condition}
......
......@@ -214,6 +214,7 @@ openany, % A chapter may start on either a recto or verso page.
}
\lstset{%
escapeinside={(*}{*)},%
breaklines=true,
}
\usepackage{tikz}
......
......@@ -162,13 +162,15 @@ possibly also Reynolds stresses) into single file;
\item \inltt{concatenate}: Concatenate a \nekpp binary output (.chk or .fld) field file into single file (deprecated);
\item \inltt{equispacedoutput}: Write data as equi-spaced output using simplices to represent the data for connecting points;
\item \inltt{extract}: Extract a boundary field;
\item \inltt{gradient}: Computes gradient of fields;
\item \inltt{homplane}: Extract a plane from 3DH1D expansions;
\item \inltt{homstretch}: Stretch a 3DH1D expansion by an integer factor;
\item \inltt{innerproduct}: take the inner product between one or a series of fields with another field (or series of fields).
\item \inltt{interpfield}: Interpolates one field to another, requires fromxml, fromfld to be defined;
\item \inltt{interppointdatatofld}: Interpolates given discrete data using a finite difference approximation to a fld file given an xml file;
\item \inltt{interppoints}: Interpolates a set of points to another, requires fromfld and fromxml to be defined, a line or plane of points can be defined;
\item \inltt{isocontour}: Extract an isocontour of ``fieldid'' variable and at value ``fieldvalue''. Optionally ``fieldstr'' can be specified for a string defiition or ``smooth'' for smoothing;
\item \inltt{interppoints}: Interpolates a field to a set of points. Requires fromfld, fromxml to be defined, and a topts, line, plane or box of target points;
\item \inltt{interpptstopts}: Interpolates a set of points to another. Requires a topts, line, plane or box of target points;
\item \inltt{isocontour}: Extract an isocontour of ``fieldid'' variable and at value ``fieldvalue''. Optionally ``fieldstr'' can be specified for a string definition or ``smooth'' for smoothing;
\item \inltt{jacobianenergy}: Shows high frequency energy of Jacobian;
\item \inltt{qualitymetric}: Evaluate a quality metric of the underlying mesh to show mesh quality;
\item \inltt{meanmode}: Extract mean mode (plane zero) of 3DH1D expansions;
......@@ -178,6 +180,7 @@ possibly also Reynolds stresses) into single file;
\item \inltt{scalargrad}: Computes scalar gradient field;
\item \inltt{scaleinputfld}: Rescale input field by a constant factor;
\item \inltt{shear}: Computes time-averaged shear stress metrics: TAWSS, OSI, transWSS, TAAFI, TACFI, WSSG;
\item \inltt{streamfunction}: Calculates stream function of a 2D incompressible flow.
\item \inltt{surfdistance}: Computes height of a prismatic boundary layer mesh and projects onto the surface (for e.g. $y^+$ calculation).
\item \inltt{vorticity}: Computes the vorticity field.
\item \inltt{wss}: Computes wall shear stress field.
......@@ -502,10 +505,14 @@ To interpolate discrete point data to a field, use the interppointdatatofld modu
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m interppointdatatofld:frompts=file1.pts file1.xml file1.fld
\end{lstlisting}
or alternatively for csv data:
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m interppointdatatofld:frompts=file1.csv file1.xml file1.fld
\end{lstlisting}
%
This command will interpolate the data from \inltt{file1.pts} to the mesh
This command will interpolate the data from \inltt{file1.pts} (\inltt{file1.csv}) to the mesh
and expansions defined in \inltt{file1.xml} and output the field to \inltt{file1.fld}.
The file \inltt{file.pts} is of the form:
The file \inltt{file.pts} must be of the form:
%
\begin{lstlisting}[style=XMLStyle]
<?xml version="1.0" encoding="utf-8" ?>
......@@ -526,11 +533,20 @@ the second one contains the $a$-values, the third the $b$-values and so on.
In case of $n$-dimensional data, the $n$ coordinates are specified in the first $n$
columns accordingly.
%
An equivalent csv file is:
\begin{lstlisting}[style=BashInputStyle]
# x, a, b, c
1.0000,-1.0000,1.0000,-0.7778
2.0000,-0.9798,0.9798,-0.7980
3.0000,-0.9596,0.9596,-0.8182
4.0000,-0.9394,0.9394,-0.8384
\end{lstlisting}
%
In order to interpolate 1D data to a $n$D field, specify the matching coordinate in
the output field using the \inltt{interpcoord} argument:
%
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m interppointdatatofld:frompts=1D-file1.pts:interppointdatatofld=1 \
FieldConvert -m interppointdatatofld:frompts=1D-file1.pts:interpcoord=1 \
3D-file1.xml 3D-file1.fld
\end{lstlisting}
%
......@@ -568,6 +584,15 @@ The format of the file \inltt{file2.pts} is of the same form as for the
</POINTS>
</NEKTAR>
\end{lstlisting}
Similar to the \textit{interppointdatatofld} module, the \inltt{.pts} file can
be interchanged with a \inltt{.csv} file:
\begin{lstlisting}[style=BashInputStyle]
# x, y
0.0,0.0
0.5,0.0
1.0,0.0
\end{lstlisting}
There are three optional arguments \inltt{clamptolowervalue},
\inltt{clamptouppervalue} and \inltt{defaultvalue} the first two clamp the
interpolation between these two values and the third defines the default
......@@ -610,6 +635,40 @@ and $cp0$ are not evaluated accordingly
\begin{notebox}
This module runs in parallel for the line, plane and box extraction of points.
\end{notebox}
%
%
%
\subsection{Interpolate a set of points to another: \textit{interpptstopts} module}
You can interpolate one set of points to another using the following command:
\begin{lstlisting}[style=BashInputStyle]
FieldConvert file1.pts -m interpptstopts:topts=file2.pts file2.dat
\end{lstlisting}
This command will interpolate the data in \inltt{file1.pts} to a new set
of points defined in \inltt{file2.pts} and output it to
\inltt{file2.dat}.
Similarly to the \textit{interppoints} module, the target point distribution
can also be specified using the \inltt{line}, \inltt{plane} or \inltt{box}
options. The optional arguments \inltt{clamptolowervalue},
\inltt{clamptouppervalue}, \inltt{defaultvalue} and \inltt{cp} are also
supported with the same meaning as in \textit{interppoints}.
One useful application for this module is with 3DH1D expansions, for which
currently the \textit{interppoints} module does not work. In this case, we can
use for example
\begin{lstlisting}[style=BashInputStyle]
FieldConvert file1.xml file1.fld -m interpptstopts:\
plane=npts1,npts2,x0,y0,z0,x1,y1,z1,x2,y2,z2,x3,y3,z3 \
file2.dat
\end{lstlisting}
With this usage, the \textit{equispacedoutput} module will be automatically
called to interpolate the field to a set of equispaced points in each element.
The result is then interpolated to a plane by the \textit{interpptstopts} module.
\begin{notebox}
This module does not work in parallel.
\end{notebox}
%
%
%
......@@ -759,7 +818,8 @@ point, the first, second, and third columns contains the
$x,y,z$-coordinate and subsequent columns contain the field values, in
this case the $p$-value So in the general case of $n$-dimensional
data, the $n$ coordinates are specified in the first $n$ columns
accordingly followed by the field data.
accordingly followed by the field data. Alternatively, the \inltt{file.pts}
can be interchanged with a csv file.
The default argument is to use the equispaced (but potentially
collapsed) coordinates which can be obtained from the command.
......@@ -850,6 +910,26 @@ The argument \inltt{N} and \inltt{fromfld} are compulsory arguments that respect
The input \inltt{.fld} files are the outputs of the \textit{wss} module. If they do not contain the surface normals (an optional output of the \textit{wss} modle), then the \textit{shear} module will not compute the last metric, |WSSG|.
%
%
%
\subsection{Stream function of a 2D incompressible flow: \textit{streamfunction} module}
The streamfunction module calculates the stream function of a 2D incompressible flow, by
solving the Poisson equation
\[
\nabla^2 \psi = -\omega
\]
where $\omega$ is the vorticity. Note that this module applies the same boundary conditions
specified for the y-direction velocity component \inltt{v} to the stream function,
what may not be the most appropriate choice.
To use this module, the user can run
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m streamfunction test.xml test.fld test-streamfunc.fld
\end{lstlisting}
where the file \inltt{test-streamfunc.fld} can be processed in a similar
way as described in section \ref{s:utilities:fieldconvert:sub:convert}.
%
%
......
......@@ -938,10 +938,20 @@ range of geometrical features.
For a full description of the GEO format the user should refer to Gmsh's
documentation. The following commands are currently supported:
\begin{itemize}
\item \inltt{//} (comments)
\item \inltt{//} (i.e. comments)
\item \inltt{Point}
\item \inltt{Line}
\item \inltt{Spline}
\item \inltt{Spline} (through points)
\item \inltt{BSpline} (i.e. a B\'{e}zier curve)
\item \inltt{Ellipse} (arc): as defined in Gmsh's OpenCASCADE kernel, the first
point defines the start of the arc, the second point the centre and the fourth
point the end. The third point is not used. The start point along with the centre
point form the major axis and the minor axis is then computed so that the end
point falls onto the arc. The major axis must always be greater or equal to the
minor axis.
\item \inltt{Circle} (arc): the circle is a special case of the ellipse where the
third point is skipped. The distances between the start and end points and the
centre point must be equal or an error will be thrown.
\item \inltt{Line Loop}
\item \inltt{Plane Surface}
\end{itemize}
......@@ -950,7 +960,7 @@ At the present time, NekMesh does not support the full scripting capabilities of
GEO format. The used GEO files should be a straightforward succession of entity
creations (see list above). This should however allow for the creation of quite
a wide range of 2D geometries by transformation of arbitrary curves into generic
splines.
splines and arcs.
%%% Local Variables:
......
......@@ -41,6 +41,7 @@ SET(FieldUtilsHeaders
ProcessModules/ProcessPointDataToFld.h
ProcessModules/ProcessPrintFldNorms.h
ProcessModules/ProcessScaleInFld.h
ProcessModules/ProcessStreamFunction.h
ProcessModules/ProcessSurfDistance.h
ProcessModules/ProcessVorticity.h
ProcessModules/ProcessScalGrad.h
......@@ -95,6 +96,7 @@ SET(FieldUtilsSources
ProcessModules/ProcessScaleInFld.cpp
ProcessModules/ProcessVorticity.cpp
ProcessModules/ProcessScalGrad.cpp
ProcessModules/ProcessStreamFunction.cpp
ProcessModules/ProcessSurfDistance.cpp
ProcessModules/ProcessMultiShear.cpp
ProcessModules/ProcessWSS.cpp
......
......@@ -39,6 +39,7 @@ using namespace std;
#include <LibUtilities/BasicUtils/PtsField.h>
#include <LibUtilities/BasicUtils/PtsIO.h>
#include <LibUtilities/BasicUtils/CsvIO.h>
#include <tinyxml.h>
......@@ -53,7 +54,11 @@ ModuleKey InputPts::m_className[5] = {
GetModuleFactory().RegisterCreatorFunction(
ModuleKey(eInputModule, "pts"), InputPts::create, "Reads Pts file."),
GetModuleFactory().RegisterCreatorFunction(
ModuleKey(eInputModule, "pts.gz"), InputPts::create, "Reads Pts file.")
ModuleKey(eInputModule, "pts.gz"), InputPts::create, "Reads Pts file."),
GetModuleFactory().RegisterCreatorFunction(
ModuleKey(eInputModule, "csv"), InputPts::create, "Reads csv file."),
GetModuleFactory().RegisterCreatorFunction(
ModuleKey(eInputModule, "csv.gz"), InputPts::create, "Reads csv file."),
};
/**
......@@ -63,6 +68,7 @@ ModuleKey InputPts::m_className[5] = {
InputPts::InputPts(FieldSharedPtr f) : InputModule(f)
{
m_allowedFiles.insert("pts");
m_allowedFiles.insert("csv");
}
/**
......@@ -79,10 +85,23 @@ void InputPts::Process(po::variables_map &vm)
{
string inFile = m_config["infile"].as<string>();
LibUtilities::PtsIOSharedPtr ptsIO =
MemoryManager<LibUtilities::PtsIO>::AllocateSharedPtr(m_f->m_comm);
ptsIO->Import(inFile, m_f->m_fieldPts);
// Determine appropriate field input
if (m_f->m_inputfiles.count("pts") != 0)
{
LibUtilities::CsvIOSharedPtr csvIO =
MemoryManager<LibUtilities::CsvIO>::AllocateSharedPtr(m_f->m_comm);
csvIO->Import(inFile, m_f->m_fieldPts);
}
else if (m_f->m_inputfiles.count("csv") != 0)
{
LibUtilities::PtsIOSharedPtr ptsIO =
MemoryManager<LibUtilities::PtsIO>::AllocateSharedPtr(m_f->m_comm);
ptsIO->Import(inFile, m_f->m_fieldPts);
}
else
{
ASSERTL0(false, "unknown input file type");
}
// save field names
for (int j = 0; j < m_f->m_fieldPts->GetNFields(); ++j)
......
......@@ -146,7 +146,7 @@ public:
/// Returns the output field
FIELD_UTILS_EXPORT LibUtilities::PtsFieldSharedPtr GetOutField() const;
/// Print statics of the interpolation weights
/// Returns if the weights have already been computed
FIELD_UTILS_EXPORT void PrintStatistics();
/// sets a callback funtion which gets called every time the interpolation
......
......@@ -6,7 +6,7 @@
//
// The MIT License
//
// Copyright (c) 2016 Kilian Lackhove
// Copyright (c) 2017 Kilian Lackhove
// Copyright (c) 2006 Division of Applied Mathematics, Brown University (USA),
// Department of Aeronautics, Imperial College London (UK), and Scientific
// Computing and Imaging Institute, University of Utah (USA).
......@@ -40,14 +40,21 @@ using namespace std;
#include "OutputPts.h"
#include <LibUtilities/BasicUtils/FileSystem.h>
#include <LibUtilities/BasicUtils/PtsIO.h>
#include <LibUtilities/BasicUtils/CsvIO.h>
namespace Nektar
{
namespace FieldUtils
{
ModuleKey OutputPts::m_className = GetModuleFactory().RegisterCreatorFunction(
ModuleKey(eOutputModule, "pts"), OutputPts::create, "Writes a pts file.");
ModuleKey OutputPts::m_className[5] = {
GetModuleFactory().RegisterCreatorFunction(
ModuleKey(eOutputModule, "pts"), OutputPts::create, "Writes a pts file."),
GetModuleFactory().RegisterCreatorFunction(
ModuleKey(eOutputModule, "csv"), OutputPts::create, "Writes a csv file."),
};
OutputPts::OutputPts(FieldSharedPtr f) : OutputFileBase(f)
{
......@@ -62,8 +69,16 @@ void OutputPts::OutputFromPts(po::variables_map &vm)
// Extract the output filename and extension
string filename = m_config["outfile"].as<string>();
LibUtilities::PtsIO ptsIO(m_f->m_comm);
ptsIO.Write(filename, m_f->m_fieldPts);
if (boost::filesystem::path(filename).extension() == ".csv")
{
LibUtilities::CsvIO csvIO(m_f->m_comm);
csvIO.Write(filename, m_f->m_fieldPts);
}
else
{
LibUtilities::PtsIO ptsIO(m_f->m_comm);
ptsIO.Write(filename, m_f->m_fieldPts);
}
}
void OutputPts::OutputFromExp(po::variables_map &vm)
......@@ -126,3 +141,4 @@ fs::path OutputPts::GetFullOutName(std::string &filename,
}
}
......@@ -53,7 +53,7 @@ public:
{
return MemoryManager<OutputPts>::AllocateSharedPtr(f);
}
static ModuleKey m_className;
static ModuleKey m_className[];
OutputPts(FieldSharedPtr f);
virtual ~OutputPts();
......
......@@ -45,6 +45,8 @@ using namespace std;
#include <LibUtilities/BasicUtils/ParseUtils.hpp>
#include <LibUtilities/BasicUtils/PtsField.h>
#include <LibUtilities/BasicUtils/SharedArray.hpp>
#include <LibUtilities/BasicUtils/PtsIO.h>
#include <LibUtilities/BasicUtils/CsvIO.h>
#include <boost/math/special_functions/fpclassify.hpp>
namespace bg = boost::geometry;
......@@ -84,11 +86,25 @@ void ProcessInterpPointDataToFld::Process(po::variables_map &vm)
ASSERTL0( m_config["frompts"].as<string>().compare("NotSet") != 0,
"ProcessInterpPointDataToFld requires frompts parameter");
string inFile = m_config["frompts"].as<string>().c_str();
LibUtilities::CommSharedPtr c =
LibUtilities::GetCommFactory().CreateInstance("Serial", 0, 0);
LibUtilities::PtsIOSharedPtr ptsIO =
MemoryManager<LibUtilities::PtsIO>::AllocateSharedPtr(c);
ptsIO->Import(inFile, fieldPts);
if (boost::filesystem::path(inFile).extension() == ".pts")
{
LibUtilities::PtsIOSharedPtr ptsIO =
MemoryManager<LibUtilities::PtsIO>::AllocateSharedPtr(m_f->m_comm);
ptsIO->Import(inFile, fieldPts);
}
else if (boost::filesystem::path(inFile).extension() == ".csv")
{
LibUtilities::CsvIOSharedPtr csvIO =
MemoryManager<LibUtilities::CsvIO>::AllocateSharedPtr(m_f->m_comm);
csvIO->Import(inFile, fieldPts);
}
else
{
ASSERTL0(false, "unknown frompts file type");
}
int nFields = fieldPts->GetNFields();
ASSERTL0(nFields > 0, "No field values provided in input");
......
......@@ -45,6 +45,8 @@ using namespace std;
#include <LibUtilities/BasicUtils/ParseUtils.hpp>
#include <LibUtilities/BasicUtils/Progressbar.hpp>
#include <LibUtilities/BasicUtils/SharedArray.hpp>
#include <LibUtilities/BasicUtils/PtsIO.h>
#include <LibUtilities/BasicUtils/CsvIO.h>
#include <boost/lexical_cast.hpp>
#include <boost/math/special_functions/fpclassify.hpp>
......@@ -116,8 +118,15 @@ void ProcessInterpPoints::Process(po::variables_map &vm)
MemoryManager<SpatialDomains::DomainRange>::AllocateSharedPtr();
int coordim = m_f->m_fieldPts->GetDim();
int npts = m_f->m_fieldPts->GetNpoints();
std::vector<std::string> fieldNames = m_f->m_fieldPts->GetFieldNames();
for (auto it = fieldNames.begin(); it != fieldNames.end(); ++it)
{
m_f->m_fieldPts->RemoveField(*it);
}
Array<OneD, Array<OneD, NekDouble> > pts;
m_f->m_fieldPts->GetPts(pts);
rng->m_checkShape = false;
rng->m_zmin = -1;
rng->m_zmax = 1;
......@@ -216,14 +225,30 @@ void ProcessInterpPoints::CreateFieldPts(po::variables_map &vm)
int rank = m_f->m_comm->GetRank();
int nprocs = m_f->m_comm->GetSize();
// Check for command line point specification
if (m_config["topts"].as<string>().compare("NotSet") != 0)
{
string inFile = m_config["topts"].as<string>();
LibUtilities::PtsIOSharedPtr ptsIO =
MemoryManager<LibUtilities::PtsIO>::AllocateSharedPtr(m_f->m_comm);
if (boost::filesystem::path(inFile).extension() == ".pts")
{
LibUtilities::PtsIOSharedPtr ptsIO =
MemoryManager<LibUtilities::PtsIO>::AllocateSharedPtr(m_f->m_comm);
ptsIO->Import(inFile, m_f->m_fieldPts);
}
else if (boost::filesystem::path(inFile).extension() == ".csv")
{
LibUtilities::CsvIOSharedPtr csvIO =
MemoryManager<LibUtilities::CsvIO>::AllocateSharedPtr(m_f->m_comm);
csvIO->Import(inFile, m_f->m_fieldPts);
}
else
{
ASSERTL0(false, "unknown topts file type");
}
ptsIO->Import(inFile, m_f->m_fieldPts);
}
else if (m_config["line"].as<string>().compare("NotSet") != 0)
{
......@@ -432,7 +457,7 @@ void ProcessInterpPoints::InterpolateFieldToPts(
NekDouble clamp_up,
NekDouble def_value)
{
ASSERTL0(pts->GetNFields() >= field0.size(), "ptField has too few fields");
ASSERTL0(pts->GetNFields() == field0.size(), "ptField has too few fields");
int nfields = field0.size();
......
......@@ -43,6 +43,8 @@ using namespace std;
#include <LibUtilities/BasicUtils/ParseUtils.hpp>
#include <LibUtilities/BasicUtils/Progressbar.hpp>
#include <LibUtilities/BasicUtils/SharedArray.hpp>
#include <LibUtilities/BasicUtils/PtsIO.h>
#include <LibUtilities/BasicUtils/CsvIO.h>
#include <boost/lexical_cast.hpp>
#include <boost/math/special_functions/fpclassify.hpp>
......@@ -133,10 +135,24 @@ void ProcessInterpPtsToPts::CreateFieldPts(po::variables_map &vm)
{
string inFile = m_config["topts"].as<string>();
LibUtilities::PtsIOSharedPtr ptsIO =
MemoryManager<LibUtilities::PtsIO>::AllocateSharedPtr(m_f->m_comm);
if (boost::filesystem::path(inFile).extension() == ".pts")
{
LibUtilities::PtsIOSharedPtr ptsIO =
MemoryManager<LibUtilities::PtsIO>::AllocateSharedPtr(m_f->m_comm);
ptsIO->Import(inFile, m_f->m_fieldPts);
}
else if (boost::filesystem::path(inFile).extension() == ".csv")
{