Commit 074e68f6 authored by Spencer Sherwin's avatar Spencer Sherwin
Browse files

Merge branch 'master' into fix/iterative-rhs-magnitude

Conflicts:
	CHANGELOG.md
parents 6df82840 40452dc8
......@@ -35,6 +35,7 @@ v4.4.0
support to the Helmholtz smoother used e.g. in FieldConverts C0Projection
module (!714)
- Fix bug in the calculation of the RHS magnitude in CG solver (!721)
- Fix bug in CMake Homebrew and MacPorts detection for OS X (!729)
**ADRSolver:**
- Add a projection equation system for C^0 projections (!675)
......@@ -56,6 +57,7 @@ v4.4.0
- Allow equi-spaced output for 1D and 2DH1D fields (!613)
- Update quality metric to include scaled Jacobian output (!695)
- Allow multiple XML files to be specified in InterpField module (!705)
- Fix issues with isocontour module (!719)
**NekMesh:**
- Modify curve module to allow for spline input (!628)
......@@ -76,10 +78,12 @@ v4.4.0
- Add flag to `insertsurface` process for non-conforming geometries (!700)
- Bug fix to get two meshgen regression tests working (!700)
- Remove libANN in deference to boost::geometry (!703)
- 2D to 3D mesh extrusion module (!715)
- Add a mesh extract option to the linearise module to visualise the result
(!712)
- Refactor library to use NekMesh modules for CAD generation (!704)
- Add `varopti` process module to optimise meshes (!711)
- Add a mesh extract option to the linearise module to visualise the result
(!712)
- 2D to 3D mesh extrusion module (!715)
- Add new two-dimensional mesher from NACA code or step file (!720)
**FieldConvert:**
- Move all modules to a new library, FieldUtils, to support post-processing
......
......@@ -70,13 +70,20 @@ IF (APPLE)
SET(CMAKE_LIBRARY_PATH ${MACPORTS_PREFIX}/lib ${CMAKE_LIBRARY_PATH})
SET(CMAKE_INCLUDE_PATH ${MACPORTS_PREFIX}/include ${CMAKE_INCLUDE_PATH})
MESSAGE(STATUS "Detected MacPorts installation: ${MACPORTS_PREFIX}")
ELSE()
UNSET(MACPORTS_PREFIX)
ENDIF()
IF (${DETECT_HOMEBREW} EQUAL 0)
SET(CMAKE_LIBRARY_PATH ${HOMEBREW_PREFIX}/lib ${CMAKE_LIBRARY_PATH})
SET(CMAKE_INCLUDE_PATH ${HOMEBREW_PREFIX}/include ${CMAKE_INCLUDE_PATH})
MESSAGE(STATUS "Detected Homebrew installation: ${HOMEBREW_PREFIX}")
ELSE()
UNSET(HOMEBREW_PREFIX)
ENDIF()
UNSET(DETECT_HOMEBREW)
UNSET(DETECT_MACPORTS)
ENDIF()
# Attempt to retrieve git branch and SHA1 hash of current changeset.
......
......@@ -24,16 +24,16 @@ if(NOT DEFINED OCE_DIR)
# Check for OSX needs to come first because UNIX evaluates to true on OSX
if(${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
if(DEFINED MACPORTS_PREFIX)
find_package(OCE 0.17 QUIET HINTS ${MACPORTS_PREFIX}/Library/Frameworks)
find_package(OCE 0.15 QUIET HINTS ${MACPORTS_PREFIX}/Library/Frameworks)
elseif(DEFINED HOMEBREW_PREFIX)
find_package(OCE 0.17 QUIET HINTS ${HOMEBREW_PREFIX}/Cellar/oce/*)
find_package(OCE 0.15 QUIET HINTS ${HOMEBREW_PREFIX}/Cellar/oce/*)
endif()
elseif(UNIX)
set(OCE_DIR "/usr/local/share/cmake/")
endif()
endif()
find_package(OCE 0.17 QUIET)
find_package(OCE 0.15 QUIET)
if(OCE_FOUND)
message(STATUS "-- OpenCASCADE Community Edition has been found.")
set(OCC_INCLUDE_DIR ${OCE_INCLUDE_DIRS})
......@@ -113,8 +113,8 @@ if(OCC_FOUND)
TKHLR
TKFeat
)
if(OCC_VERSION_STRING VERSION_LESS 6.8)
if(OCC_VERSION_STRING VERSION_LESS 6.7)
MESSAGE(SEND_ERROR "OCC version too low")
endif(OCC_VERSION_STRING VERSION_LESS 6.8)
endif(OCC_VERSION_STRING VERSION_LESS 6.7)
message(STATUS "-- Found OCE/OpenCASCADE with OCC version: ${OCC_VERSION_STRING}")
endif(OCC_FOUND)
......@@ -83,7 +83,7 @@
pages = {293-301},
year = {1996},
}
@article{CoRaNa98,
author = {M. Courtemanche\, R. J. Ramirez and S. Nattel},
title = {Ionic mechanisms underlying human atrial action potential properties: insights from a mathematical model},
......@@ -103,7 +103,7 @@
pages = {1501-1526},
year = {1991},
}
@article{TuPa06,
author = {K. H. W. J. ten Tusscher and A. V. Panfilov},
title = {Alternans and spiral breakup in a human ventricular tissue model},
......@@ -123,7 +123,7 @@
pages = {4331-51},
year = {2011},
}
@article{ShKa96,
title={Tetrahedral< i> hp</i> Finite Elements: Algorithms and Flow Simulations},
author={Sherwin, SJ and Karniadakis, G Em},
......@@ -378,9 +378,9 @@ year={2011}
}
@article{GuSh03,
Author="J.L. Guermond and J. Shen",
title="Velocity-correction projection methods for incompressible flows",
journal="SIAM J. Numer.\ Anal.",
Author="J.L. Guermond and J. Shen",
title="Velocity-correction projection methods for incompressible flows",
journal="SIAM J. Numer.\ Anal.",
volume=41,
pages = "112--134",
year=2003
......@@ -460,3 +460,18 @@ year={2011}
pages = {1079-1097},
}
@inproceedings{TuPeMo16,
abstract = {The generation of sufficiently high quality unstructured high-order meshes remains a significant obstacle in the adoption of high-order methods. However, there is little consensus on which approach is the most robust, fastest and produces the 'best' meshes. We aim to provide a route to investigate this question, by examining popular high-order mesh generation methods in the context of an efficient variational framework for the generation of curvilinear meshes. By considering previous works in a variational form, we are able to compare their characteristics and study their robustness. Alongside a description of the theory and practical implementation details, including an efficient multi-threading parallelisation strategy, we demonstrate the effectiveness of the framework, showing how it can be used for both mesh quality optimisation and untangling of invalid meshes.},
author = {Turner, M and Peir{\'{o}}, J and Moxey, D},
booktitle = {25th International Meshing Roundtable},
doi = {10.1016/j.proeng.2016.11.069},
file = {:Users/mike/Downloads/1-s2.0-S1877705816333781-main.pdf:pdf},
issn = {18777058},
keywords = {energy functional,high-order mesh generation,numerical optimization,variational mesh generation},
pages = {340--352},
title = {{A Variational Framework for High-Order Mesh Generation}},
url = {www.elsevier.com/locate/procedia%5Cnhttp://linkinghub.elsevier.com/retrieve/pii/S1877705816333781},
volume = {163},
year = {2016}
}
......@@ -69,7 +69,7 @@ format by issuing the command
\begin{lstlisting}[style=BashInputStyle]
FieldConvert in.fld out.fld:fld:format=Hdf5
\end{lstlisting}
%
%
\section{Range option \textit{-r}}
The Fieldconvert range option \inltt{-r} allows the user to specify
a sub-range of the mesh (computational domain) by using an
......@@ -121,7 +121,7 @@ possibly also Reynolds stresses) into single file;
\item \inltt{extract}: Extract a boundary field;
\item \inltt{homplane}: Extract a plane from 3DH1D expansions;
\item \inltt{homstretch}: Stretch a 3DH1D expansion by an integer factor;
\item \inltt{innerproduct}: take the inner product between one or a series of fields with another field (or series of fields).
\item \inltt{innerproduct}: take the inner product between one or a series of fields with another field (or series of fields).
\item \inltt{interpfield}: Interpolates one field to another, requires fromxml, fromfld to be defined;
\item \inltt{interppointdatatofld}: Interpolates given discrete data using a finite difference approximation to a fld file given an xml file;
\item \inltt{interppoints}: Interpolates a set of points to another, requires fromfld and fromxml to be defined, a line or plane of points can be defined;
......@@ -130,7 +130,7 @@ possibly also Reynolds stresses) into single file;
\item \inltt{qualitymetric}: Evaluate a quality metric of the underlying mesh to show mesh quality;
\item \inltt{meanmode}: Extract mean mode (plane zero) of 3DH1D expansions;
\item \inltt{pointdatatofld}: Given discrete data at quadrature points
project them onto an expansion basis and output fld file;
project them onto an expansion basis and output fld file;
\item \inltt{printfldnorms}: Print L2 and LInf norms to stdout;
\item \inltt{scalargrad}: Computes scalar gradient field;
\item \inltt{scaleinputfld}: Rescale input field by a constant factor;
......@@ -217,7 +217,7 @@ new field. To use this we simply run
In this case, we have produced a Tecplot file which contains the mesh and a
variable that contains the composite ID. To assist in boundary identification,
the input file \inlsh{mesh.xml} should be a surface XML file that can be
obtained through the \mc \inltt{extract} module (see section
obtained through the \nm \inltt{extract} module (see section
\ref{s:utilities:nekmesh:extract}).
\subsection{Sum two .fld files: \textit{addFld} module}
......@@ -234,8 +234,8 @@ which multiply the values of a given .fld file by a constant \inltt{value}.
is the associated session file, \inltt{file2.fld} is the .fld file which
is summed to \inltt{file1.fld} and finally \inltt{file3.fld} is the output
which contain the sum of the two .fld files.
\inltt{file3.fld} can be processed in a similar way as described
in section \ref{s:utilities:fieldconvert:sub:convert} to visualise
\inltt{file3.fld} can be processed in a similar way as described
in section \ref{s:utilities:fieldconvert:sub:convert} to visualise
the result either in Tecplot, Paraview or VisIt.
%
%
......@@ -249,8 +249,8 @@ use the \inltt{combineAvg} module of FieldConvert
file3.fld
\end{lstlisting}
%
\inltt{file3.fld} can be processed in a similar way as described
in section \ref{s:utilities:fieldconvert:sub:convert} to visualise
\inltt{file3.fld} can be processed in a similar way as described
in section \ref{s:utilities:fieldconvert:sub:convert} to visualise
the result either in Tecplot, Paraview or VisIt.
%
%
......@@ -325,8 +325,8 @@ of interest. Finally to process the surface file one can use
FieldConvert test-b0.xml test-b0.fld test-b0.dat
\end{lstlisting}
%
This will obviously generate a Tecplot output if a .dat file
is specified as last argument. A .vtu extension will produce
This will obviously generate a Tecplot output if a .dat file
is specified as last argument. A .vtu extension will produce
a Paraview or VisIt output.
%
%
......@@ -348,25 +348,25 @@ to visualise the result either in Tecplot, Paraview or VisIt.
To obtain a 2D expansion containing one of the planes of a
3DH1D field file, use the command:
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m homplane:planeid=value file.xml file.fld file-plane.fld
\end{lstlisting}
If the option \inltt{wavespace} is used, the Fourier coefficients
corresponding to \inltt{planeid} are obtained. The command in this case is:
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m homplane:wavespace:planeid=value file.xml \
file.fld file-plane.fld
\end{lstlisting}
The output file \inltt{file-plane.fld} can be processed in a similar
The output file \inltt{file-plane.fld} can be processed in a similar
way as described in section \ref{s:utilities:fieldconvert:sub:convert}
to visualise it either in Tecplot or in Paraview.
\subsection{Stretch a 3DH1D expansion: \textit{homstretch} module}
To stretch a 3DH1D expansion in the z-direction, use the command:
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m homstretch:factor=value file.xml file.fld file-stretch.fld
\end{lstlisting}
The number of modes in the resulting field can be chosen using the command-line
......@@ -374,7 +374,7 @@ parameter \inltt{output-points-hom-z}. Note that the output for
this module should always be a \inltt{.fld} file and this should not
be used in combination with other modules using a single command.
The output file \inltt{file-stretch.fld} can be processed in a similar
The output file \inltt{file-stretch.fld} can be processed in a similar
way as described in section \ref{s:utilities:fieldconvert:sub:convert}
to visualise it either in Tecplot or in Paraview.
......@@ -392,7 +392,7 @@ determine the inner product of these fields. The input option
\inltt{fromfld} must therefore be specified in this module.
Optional arguments for this module are \inltt{fields} which allow you to specify
the fields that you wish to use for the inner product, i.e.
the fields that you wish to use for the inner product, i.e.
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m innerproduct:fromfld=file1.fld:fields=''0,1,2'' file2.xml \
file2.fld out.stdout
......@@ -412,7 +412,7 @@ will take the inner product between a file names
field1\_0.fld, field1\_1.fld, field1\_2.fld and field1\_3.fld with
respect to field2.fld.
Analogously including the options \inltt{allfromflds}, i.e.
Analogously including the options \inltt{allfromflds}, i.e.
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m innerproduct:fromfld=file1.fld:multifldids=''0-3'':\
allfromflds file2.xml file2.fld out.stdout
......@@ -424,7 +424,7 @@ the unique inner products are evaluated so if four from fields are
given only the related trianuglar number $4\times5/2=10$ of inner
products are evaluated.
This option can be run in parallel.
This option can be run in parallel.
%
%
......@@ -544,7 +544,7 @@ $(x0,y0)$ to $(x1,y1)$ which can also be used in 3D by specifying $(x0,y0,z0)$
to $(x1,y1,z1)$.
An extraction of a plane of points can also be specified by
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m interppoints:fromxml=file1.xml:fromfld=file1.fld:\
plane=npts1,npts2,x0,y0,z0,x1,y1,z1,x2,y2,z2,x3,y3,z3
\end{lstlisting}
......@@ -553,13 +553,13 @@ direction and $(x0,y0,z0)$, $(x1,y1,z1)$, $(x2,y2,z2)$ and $(x3,y3,z3)$
define the plane of points specified in a clockwise or anticlockwise direction.
In addition an extraction of a box of points can also be specified by
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m interppoints:fromxml=file1.xml:fromfld=file1.fld:\
box=npts1,npts2,npts3,xmin,xmax,ymin,ymax,zmin,zmax
\end{lstlisting}
where \inltt{npts1,npts2,npts3} is the number of equispaced points in each
direction and $(xmin,ymin,zmin)$ and $(xmax,ymax,zmax3)$
define the limits of the box of points.
where \inltt{npts1,npts2,npts3} is the number of equispaced points in each
direction and $(xmin,ymin,zmin)$ and $(xmax,ymax,zmax3)$
define the limits of the box of points.
For the plane and box interpolation there is an additional optional
argument \inltt{cp=p0,q} which adds to the interpolated fields the value of
......@@ -568,7 +568,7 @@ pressure and $q$ is the free stream dynamics pressure. If the input
does not contain a field ``p'' or a velocity field ``u,v,w'' then $cp$
and $cp0$ are not evaluated accordingly
%
\begin{notebox}
\begin{notebox}
This module runs in parallel for the plane and box extraction of points. In this case a series of .dat files are generated that can be concatinated together. Other options do not run in parallel.
\end{notebox}
%
......@@ -611,7 +611,7 @@ have these as separate options.
In addition to the \inltt{smooth} or \inltt{globalcondense} options
you can specify \inltt{removesmallcontour}=100 which will remove
separate isocontours of less than 100 triangles.
separate isocontours of less than 100 triangles.
\begin{notebox}
Currently this option is only set up for triangles, quadrilaterals,
......@@ -633,7 +633,7 @@ keep.
The output file \inltt{jacenergy.fld} can be processed in a similar
way as described in section \ref{s:utilities:fieldconvert:sub:convert}
to visualise the result either in Tecplot, Paraview or VisIt.
to visualise the result either in Tecplot, Paraview or VisIt.
\subsection{Calculate mesh quality: \textit{qualitymetric} module}
......@@ -675,11 +675,11 @@ Two quality metrics are implemented that produce scalar fields $Q$:
To obtain a 2D expansion containing the mean mode (plane zero in Fourier space) of a
3DH1D field file, use the command:
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m meanmode file.xml file.fld file-mean.fld
\end{lstlisting}
The output file \inltt{file-mean.fld} can be processed in a similar
The output file \inltt{file-mean.fld} can be processed in a similar
way as described in section \ref{s:utilities:fieldconvert:sub:convert}
to visualise the result either in Tecplot or in Paraview or VisIt.
%
......@@ -697,7 +697,7 @@ FieldConvert --noequispaced -m pointdatatofld file.pts file.xml file.fld
This command will read in the points provided in the \inltt{file.pts}
and assume these are given at the same quadrature distribution as the
mesh and expansions defined in \inltt{file.xml} and output the field
to \inltt{file.fld}. If the points do not match an error will be dumped.
to \inltt{file.fld}. If the points do not match an error will be dumped.
The file \inltt{file.pts} which is assumed to be given by an interpolation from another source is of the form:
......@@ -720,7 +720,7 @@ point, the first, second, and third columns contains the
$x,y,z$-coordinate and subsequent columns contain the field values, in
this case the $p$-value So in the general case of $n$-dimensional
data, the $n$ coordinates are specified in the first $n$ columns
accordingly followed by the field data.
accordingly followed by the field data.
The default argument is to use the equipapced (but potentially
collapsed) coordinates which can be obtained from the command.
......@@ -755,7 +755,7 @@ this option.
\subsection{Print L2 and LInf norms: \textit{printfldnorms} module}
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert -m printfldnorms test.xml test.fld out.stdout
\end{lstlisting}
......@@ -920,7 +920,7 @@ replacing \inltt{<nprocs>} with the number of processors. For the
\inltt{.dat} and \inltt{.plt} outputs the current version will proudce
a single output file. However it is also sometimes useful to produce
multiple output files, one for each partition, and this
can be done by using the \inltt{writemultiplefiles} option, i.e.
can be done by using the \inltt{writemultiplefiles} option, i.e.
\begin{lstlisting}[style=BashInputStyle]
mpirun -np <nprocs> FieldConvert test.xml test.fld \
test.dat:dat:writemultiplefiles
......@@ -962,7 +962,7 @@ FieldConvert --nprocs 10 --procid 2 \
This call will only therefore consider the interpolation process across one
partition (namely, partition 2). To create the full interpolated field requires
a loop over each of the partitions, which, in a bash shell can be run as
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
for n in `seq 0 9`; do
FieldConvert --nprocs 10 --procid $n \
-m interpfield:fromxml=file1.xml:fromfld=file1.fld \
......@@ -975,7 +975,7 @@ of the different parallel partitions in files with names \inltt{P0000000.fld},
parallel field file. However, the \inltt{Info.xml} file, which contains the
information about which elements lie in each partition, is missing. This can be
generated by using the command
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert --nprocs 10 file2.xml file2.fld/Info.xml:info
\end{lstlisting}
Note the final \inltt{:info} extension on the last argument is necessary to tell
......@@ -988,7 +988,7 @@ input/output XML files.
Another approach to serially proessing a large file is to initially process the
file into multiple partitions. This can be done with the \inltt{--part-only}
option. So the command
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert --part-only 10 file.xml file.fld
\end{lstlisting}
will partition the mesh into 10 paritions and write each partition into a
......@@ -998,7 +998,7 @@ partitioned XML files \inltt{P0000000.xml}, \inltt{P0000001.xml}, \dots,
There is also a \inltt{--part-only-overlapping} option, which can be run in the
same fashion.
\begin{lstlisting}[style=BashInputStyle]
\begin{lstlisting}[style=BashInputStyle]
FieldConvert --part-only-overlapping 10 file.xml file.fld
\end{lstlisting}
In this mode, the mesh is partitioned into 10 partitions in a similar manner,
......
This diff is collapsed.
......@@ -190,7 +190,10 @@ state of the solution fields at at given timestep. This can subsequently be used
for restarting the simulation or examining time-dependent behaviour. This
produces a sequence of files, by default named \inltt{session\_*.chk}, where
\inltt{*} is replaced by a counter. The initial condition is written to
\inltt{session\_0.chk}.
\inltt{session\_0.chk}. Existing files are not overwritten, but renamed to e.g.
\inltt{session\_0.bak0.chk}. In case this file already exists, too, the \inltt{chk}-file
is renamed to \inltt{session\_0.bak*.chk} and so on.
\begin{notebox}
This functionality is equivalent to setting the \inltt{IO\_CheckSteps}
......
#SET(GraphSources
#SET(GraphSources
# GraphExample.cpp)
SET(MemoryManagerSources
SET(MemoryManagerSources
MemoryManager.cpp)
SET(PartitionAnalyseSources
PartitionAnalyse.cpp)
SET(FoundationSources
FoundationDemo.cpp)
FoundationDemo.cpp)
SET(NodalDemoSources NodalDemo.cpp)
SET(TimeIntegrationDemoSources
......
......@@ -151,10 +151,14 @@ int main(int argc, char *argv[])
{
util = new NodalUtilPrism(order, r, s, t);
}
else if(shape == eQuadrilateral)
{
util = new NodalUtilQuad(order, r, s);
}
ASSERTL1(util, "Unknown shape type!");
const int nPoints = r.num_elements();
const int dim = shape == eTriangle ? 2 : 3;
const int dim = (shape == eTriangle || shape == eQuadrilateral) ? 2 : 3;
if (vm.count("integral"))
{
......@@ -175,6 +179,9 @@ int main(int argc, char *argv[])
exact = -0.5 * (sin(1.0) + cos(1.0) + M_E * M_E *
(sin(1.0) - cos(1.0))) / M_E;
break;
case eQuadrilateral:
exact = 2.0 * (M_E - 1.0 / M_E) * sin(1.0);
break;
case eTetrahedron:
exact = 1.0 / M_E - 1.0 / M_E / M_E / M_E;
break;
......
......@@ -107,8 +107,9 @@ void InputDat::Process(po::variables_map &vm)
while (!datFile.eof())
{
getline(datFile, line);
if (line.find("VARIABLES") != string::npos)
string linetest = line;
boost::to_upper(linetest);
if (linetest.find("VARIABLES") != string::npos)
{
std::size_t pos = line.find('=');
pos++;
......@@ -138,10 +139,9 @@ void InputDat::Process(po::variables_map &vm)
while (!datFile.eof())
{
getline(datFile, line);
if ((line.find("ZONE") != string::npos) ||
(line.find("Zone") != string::npos) ||
(line.find("zone") != string::npos))
string linetest = line;
boost::to_upper(linetest);
if ((linetest.find("ZONE") != string::npos))
{
ReadTecplotFEBlockZone(datFile, line, pts, ptsConn);
}
......
......@@ -795,16 +795,20 @@ void OutputTecplot::WriteTecplotConnectivity(std::ofstream &outfile)
}
else
{
int cnt = 1;
for (int i = 0; i < m_conn.size(); ++i)
{
const int nConn = m_conn[i].num_elements();
for (int j = 0; j < nConn; ++j)
for (int j = 0; j < nConn; ++j,++cnt)
{
outfile << m_conn[i][j] + 1 << " ";
if (!(cnt % 1000))
{
outfile << std::endl;
}
}
outfile << endl;
}
outfile << endl;
if (m_oneOutputFile && m_f->m_comm->GetRank() == 0)
{
......@@ -814,7 +818,6 @@ void OutputTecplot::WriteTecplotConnectivity(std::ofstream &outfile)
{
Array<OneD, int> conn(m_rankConnSizes[n]);
m_f->m_comm->Recv(n, conn);
for (int j = 0; j < conn.num_elements(); ++j)
{
outfile << conn[j] + offset + 1 << " ";
......
......@@ -131,6 +131,11 @@ void ProcessIsoContour::Process(po::variables_map &vm)
if(m_f->m_fieldPts.get()) // assume we have read .dat file to directly input dat file.
{
if(rank == 0)
{
cout << "Process read iso from Field Pts" << endl;
}
SetupIsoFromFieldPts(iso);
}
else // extract isocontour from field
......@@ -196,6 +201,10 @@ void ProcessIsoContour::Process(po::variables_map &vm)
bool globalcondense = m_config["globalcondense"].m_beenSet;
if(globalcondense)
{
if(rank == 0)
{
cout << "Process global condense ..." << endl;
}
int nfields = m_f->m_fieldPts->GetNFields() + m_f->m_fieldPts->GetDim();
IsoSharedPtr g_iso = MemoryManager<Iso>::AllocateSharedPtr(nfields-3);
......@@ -832,13 +841,21 @@ void Iso::GlobalCondense(vector<IsoSharedPtr> &iso, bool verbose)
{
for(id1 = 0; id1 < iso[i]->m_nvert; ++id1)
{
inPoints.push_back(PointPair(BPoint( iso[i]->m_x[id1], iso[i]->m_y[id1], iso[i]->m_z[id1]), id2));
inPoints.push_back(PointPair(BPoint( iso[i]->m_x[id1],
iso[i]->m_y[id1],
iso[i]->m_z[id1]), id2));
global_to_unique_map[id2]=id2;
global_to_iso_map[id2] = make_pair(i,id1);
id2++;
}
}
if(verbose)
{
cout << "Process building tree ..." << endl;
}
//Build tree
bgi::rtree<PointPair, bgi::rstar<16> > rtree;
rtree.insert(inPoints.begin(), inPoints.end());
......@@ -851,49 +868,58 @@ void Iso::GlobalCondense(vector<IsoSharedPtr> &iso, bool verbose)
{
if(verbose)
{
prog = LibUtilities::PrintProgressbar(i,m_nvert,"Nearest verts",prog);
prog = LibUtilities::PrintProgressbar(i,m_nvert,
"Nearest verts",prog);
}
BPoint queryPoint = inPoints[i].first;
// find points within the distance box
std::vector<PointPair> result;
rtree.query(bgi::nearest(queryPoint, 100), std::back_inserter(result));
WARNINGL1(result.size() < 100,"Failed to find less than 100 neighbouring points");
id1 = 0;
unique_index_found = false;
int nptsfound = 0;
WARNINGL1(result.size() > 0,"Failed to find any nearest point");
for(id1 = 0; id1 < result.size(); ++id1)
// check to see if point has been already reset to lower than
// unique value
if(global_to_unique_map[i] < unique_index) // do nothing
{
}
else
{
if(bg::distance(queryPoint, result[id1].first)<SQ_PNT_TOL)
// find nearest 10 points within the distance box
std::vector<PointPair> result;
rtree.query(bgi::nearest(queryPoint, 10), std::back_inserter(result));
//see if any values have unique value already
set<int> samept;
set<int>::iterator it;
int new_index = -1;
for(id1 = 0; id1 < result.size(); ++id1)
{
id2 = result[id1].second;