minor changes after MPI merge
This commit is contained in:
parent
9fb8abb166
commit
e8ee35791f
|
@ -26,7 +26,7 @@ bool pFlow::processorBoundarySphereParticles::acceleration(const timeInfo &ti, c
|
||||||
auto I = Particles().I().BoundaryField(thisIndex).neighborProcField().deviceView();
|
auto I = Particles().I().BoundaryField(thisIndex).neighborProcField().deviceView();
|
||||||
auto cf = Particles().contactForce().BoundaryField(thisIndex).neighborProcField().deviceView();
|
auto cf = Particles().contactForce().BoundaryField(thisIndex).neighborProcField().deviceView();
|
||||||
auto ct = Particles().contactTorque().BoundaryField(thisIndex).neighborProcField().deviceView();
|
auto ct = Particles().contactTorque().BoundaryField(thisIndex).neighborProcField().deviceView();
|
||||||
auto acc = Particles().accelertion().BoundaryField(thisIndex).neighborProcField().deviceView();
|
auto acc = Particles().acceleration().BoundaryField(thisIndex).neighborProcField().deviceView();
|
||||||
auto rAcc = Particles().rAcceleration().BoundaryField(thisIndex).neighborProcField().deviceView();
|
auto rAcc = Particles().rAcceleration().BoundaryField(thisIndex).neighborProcField().deviceView();
|
||||||
|
|
||||||
Kokkos::parallel_for(
|
Kokkos::parallel_for(
|
||||||
|
|
|
@ -16,7 +16,7 @@ template class pFlow::MPI::dataIOMPI<pFlow::uint32x3>;
|
||||||
|
|
||||||
template class pFlow::MPI::dataIOMPI<pFlow::uint64>;
|
template class pFlow::MPI::dataIOMPI<pFlow::uint64>;
|
||||||
|
|
||||||
template class pFlow::MPI::dataIOMPI<pFlow::size_t>;
|
//template class pFlow::MPI::dataIOMPI<pFlow::size_t>;
|
||||||
|
|
||||||
template class pFlow::MPI::dataIOMPI<pFlow::real>;
|
template class pFlow::MPI::dataIOMPI<pFlow::real>;
|
||||||
|
|
||||||
|
|
|
@ -24,9 +24,9 @@ Licence:
|
||||||
#include "scatteredMasterDistribute.hpp"
|
#include "scatteredMasterDistribute.hpp"
|
||||||
#include "scatteredMasterDistributeChar.hpp"
|
#include "scatteredMasterDistributeChar.hpp"
|
||||||
|
|
||||||
pFlow::MPI::MPISimulationDomain::MPISimulationDomain(systemControl& control)
|
pFlow::MPI::MPISimulationDomain::MPISimulationDomain(systemControl& control, real maxBSphere)
|
||||||
:
|
:
|
||||||
simulationDomain(control),
|
simulationDomain(control, maxBSphere),
|
||||||
communication_(pFlowProcessors()),
|
communication_(pFlowProcessors()),
|
||||||
subDomainsAll_(pFlowProcessors()),
|
subDomainsAll_(pFlowProcessors()),
|
||||||
numPointsAll_(pFlowProcessors()),
|
numPointsAll_(pFlowProcessors()),
|
||||||
|
|
|
@ -61,7 +61,7 @@ public:
|
||||||
|
|
||||||
TypeInfo("simulationDomain<MPI>");
|
TypeInfo("simulationDomain<MPI>");
|
||||||
|
|
||||||
explicit MPISimulationDomain(systemControl& control);
|
explicit MPISimulationDomain(systemControl& control, real maxBSphere);
|
||||||
|
|
||||||
~MPISimulationDomain() final = default;
|
~MPISimulationDomain() final = default;
|
||||||
|
|
||||||
|
|
|
@ -47,7 +47,7 @@ pFlow::MPI::processorBoundaryField<T, MemorySpace>::updateBoundary(
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
#ifndef BoundaryModel1
|
#ifndef BoundaryModel1
|
||||||
if(!this->boundary().performBoundarytUpdate())
|
if(!this->boundary().performBoundaryUpdate())
|
||||||
return true;
|
return true;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -128,21 +128,19 @@ const typename pFlow::MPI::processorBoundaryField<T, MemorySpace>::
|
||||||
|
|
||||||
template<class T, class MemorySpace>
|
template<class T, class MemorySpace>
|
||||||
bool pFlow::MPI::processorBoundaryField<T, MemorySpace>::hearChanges(
|
bool pFlow::MPI::processorBoundaryField<T, MemorySpace>::hearChanges(
|
||||||
real t,
|
const timeInfo & ti,
|
||||||
real dt,
|
|
||||||
uint32 iter,
|
|
||||||
const message& msg,
|
const message& msg,
|
||||||
const anyList& varList
|
const anyList& varList
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
BoundaryFieldType::hearChanges(t,dt,iter, msg,varList);
|
|
||||||
if(msg.equivalentTo(message::BNDR_PROC_SIZE_CHANGED))
|
if(msg.equivalentTo(message::BNDR_PROC_SIZE_CHANGED))
|
||||||
{
|
{
|
||||||
auto newProcSize = varList.getObject<uint32>("size");
|
auto newProcSize = varList.getObject<uint32>(
|
||||||
|
message::eventName(message::BNDR_PROC_SIZE_CHANGED));
|
||||||
neighborProcField_.resize(newProcSize);
|
neighborProcField_.resize(newProcSize);
|
||||||
}
|
}
|
||||||
|
else if(msg.equivalentTo(message::BNDR_PROCTRANSFER_SEND))
|
||||||
if(msg.equivalentTo(message::BNDR_PROCTRANSFER_SEND))
|
|
||||||
{
|
{
|
||||||
const auto& indices = varList.getObject<uint32Vector_D>(
|
const auto& indices = varList.getObject<uint32Vector_D>(
|
||||||
message::eventName(message::BNDR_PROCTRANSFER_SEND)
|
message::eventName(message::BNDR_PROCTRANSFER_SEND)
|
||||||
|
@ -169,7 +167,6 @@ bool pFlow::MPI::processorBoundaryField<T, MemorySpace>::hearChanges(
|
||||||
thisFieldInNeighbor_.sendData(pFlowProcessors(),transferData);
|
thisFieldInNeighbor_.sendData(pFlowProcessors(),transferData);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
else if(msg.equivalentTo(message::BNDR_PROCTRANSFER_RECIEVE))
|
else if(msg.equivalentTo(message::BNDR_PROCTRANSFER_RECIEVE))
|
||||||
{
|
{
|
||||||
|
@ -182,30 +179,38 @@ bool pFlow::MPI::processorBoundaryField<T, MemorySpace>::hearChanges(
|
||||||
{
|
{
|
||||||
|
|
||||||
uint32 numRecieved = neighborProcField_.waitBufferForUse();
|
uint32 numRecieved = neighborProcField_.waitBufferForUse();
|
||||||
|
if(numRecieved == 0u)
|
||||||
if(msg.equivalentTo(message::CAP_CHANGED))
|
|
||||||
{
|
{
|
||||||
auto newCap = varList.getObject<uint32>(
|
return true;
|
||||||
message::eventName(message::CAP_CHANGED));
|
|
||||||
this->internal().field().reserve(newCap);
|
|
||||||
|
|
||||||
}
|
|
||||||
if(msg.equivalentTo(message::SIZE_CHANGED))
|
|
||||||
{
|
|
||||||
auto newSize = varList.getObject<uint32>(
|
|
||||||
message::eventName(message::SIZE_CHANGED));
|
|
||||||
this->internal().field().resize(newSize);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const auto& indices = varList.getObject<uint32IndexContainer>(
|
if(msg.equivalentTo(message::RANGE_CHANGED))
|
||||||
message::eventName(message::ITEM_INSERT));
|
{
|
||||||
|
auto newRange = varList.getObject<rangeU32>(
|
||||||
|
message::eventName(message::RANGE_CHANGED));
|
||||||
|
this->internal().field().resize(newRange.end());
|
||||||
|
}
|
||||||
|
|
||||||
this->internal().field().insertSetElement(indices, neighborProcField_.buffer().deviceView());
|
if(msg.equivalentTo(message::ITEMS_INSERT))
|
||||||
|
{
|
||||||
|
const auto& indices = varList.getObject<uint32IndexContainer>(
|
||||||
|
message::eventName(message::ITEMS_INSERT));
|
||||||
|
|
||||||
return true;
|
this->internal().field().insertSetElement(
|
||||||
|
indices,
|
||||||
|
neighborProcField_.buffer().deviceView());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
if(!BoundaryFieldType::hearChanges(ti, msg,varList) )
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
}
|
}
|
||||||
template <class T, class MemorySpace>
|
template <class T, class MemorySpace>
|
||||||
void pFlow::MPI::processorBoundaryField<T, MemorySpace>::sendBackData() const
|
void pFlow::MPI::processorBoundaryField<T, MemorySpace>::sendBackData() const
|
||||||
|
|
|
@ -91,9 +91,7 @@ public:
|
||||||
}
|
}
|
||||||
|
|
||||||
bool hearChanges(
|
bool hearChanges(
|
||||||
real t,
|
const timeInfo & ti,
|
||||||
real dt,
|
|
||||||
uint32 iter,
|
|
||||||
const message& msg,
|
const message& msg,
|
||||||
const anyList& varList
|
const anyList& varList
|
||||||
) override;
|
) override;
|
||||||
|
|
|
@ -83,15 +83,15 @@ pFlow::MPI::boundaryProcessor::beforeIteration(
|
||||||
else if(step == 2 )
|
else if(step == 2 )
|
||||||
{
|
{
|
||||||
|
|
||||||
#ifdef BoundaryModel1
|
#ifdef BoundaryModel1
|
||||||
callAgain = true;
|
callAgain = true;
|
||||||
#else
|
#else
|
||||||
if(!performBoundarytUpdate())
|
if(!performBoundaryUpdate())
|
||||||
{
|
{
|
||||||
callAgain = false;
|
callAgain = false;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
thisNumPoints_ = size();
|
thisNumPoints_ = size();
|
||||||
|
|
||||||
|
@ -136,7 +136,7 @@ pFlow::MPI::boundaryProcessor::beforeIteration(
|
||||||
|
|
||||||
varList.emplaceBack(msg.addAndName(message::BNDR_PROC_SIZE_CHANGED), neighborProcNumPoints_);
|
varList.emplaceBack(msg.addAndName(message::BNDR_PROC_SIZE_CHANGED), neighborProcNumPoints_);
|
||||||
|
|
||||||
if( !notify(ti.iter(), ti.t(), ti.dt(), msg, varList) )
|
if( !notify(ti, msg, varList) )
|
||||||
{
|
{
|
||||||
fatalErrorInFunction;
|
fatalErrorInFunction;
|
||||||
callAgain = false;
|
callAgain = false;
|
||||||
|
@ -343,8 +343,9 @@ bool pFlow::MPI::boundaryProcessor::transferData(
|
||||||
neighborProcPoints_.waitBufferForUse();
|
neighborProcPoints_.waitBufferForUse();
|
||||||
internal().insertPointsOnly(neighborProcPoints_.buffer(), msg, varList);
|
internal().insertPointsOnly(neighborProcPoints_.buffer(), msg, varList);
|
||||||
|
|
||||||
const auto& indices = varList.getObject<uint32IndexContainer>(message::eventName(message::ITEM_INSERT));
|
const auto& indices = varList.getObject<uint32IndexContainer>(message::eventName(message::ITEMS_INSERT));
|
||||||
|
|
||||||
|
// creates a view (does not copy data)
|
||||||
auto indView = deviceViewType1D<uint32>(indices.deviceView().data(), indices.deviceView().size());
|
auto indView = deviceViewType1D<uint32>(indices.deviceView().data(), indices.deviceView().size());
|
||||||
|
|
||||||
uint32Vector_D newIndices("newIndices", indView);
|
uint32Vector_D newIndices("newIndices", indView);
|
||||||
|
@ -356,7 +357,7 @@ bool pFlow::MPI::boundaryProcessor::transferData(
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
const auto ti = internal().time().TimeInfo();
|
const auto& ti = internal().time().TimeInfo();
|
||||||
if(!notify(ti, msg, varList))
|
if(!notify(ti, msg, varList))
|
||||||
{
|
{
|
||||||
fatalErrorInFunction;
|
fatalErrorInFunction;
|
||||||
|
|
|
@ -114,8 +114,8 @@ public:
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
fatalErrorInFunction<<"Event"<< msg.eventNames()<<"with code "<< msg <<
|
fatalErrorInFunction<<"Event "<< msg.eventNames()<<" with code "<< msg <<
|
||||||
" is not handled in boundaryField."<<endl;
|
" is not handled in boundaryField "<< name()<<endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -223,7 +223,7 @@ bool pFlow::internalField<T, MemorySpace>:: hearChanges
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
fatalErrorInFunction<<"hear changes in internal field is not processing "<<
|
fatalErrorInFunction<<"hear changes in internal field is not processing "<<
|
||||||
message::eventName(message::RANGE_CHANGED)<<
|
msg.eventNames()<<
|
||||||
" event with message code "<< msg<<endl;
|
" event with message code "<< msg<<endl;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,8 +25,8 @@ template class pFlow::dataIORegular<pFlow::uint32x3>;
|
||||||
template class pFlow::dataIO<pFlow::uint64>;
|
template class pFlow::dataIO<pFlow::uint64>;
|
||||||
template class pFlow::dataIORegular<pFlow::uint64>;
|
template class pFlow::dataIORegular<pFlow::uint64>;
|
||||||
|
|
||||||
template class pFlow::dataIO<size_t>;
|
//template class pFlow::dataIO<size_t>;
|
||||||
template class pFlow::dataIORegular<size_t>;
|
//template class pFlow::dataIORegular<size_t>;
|
||||||
|
|
||||||
template class pFlow::dataIO<pFlow::real>;
|
template class pFlow::dataIO<pFlow::real>;
|
||||||
template class pFlow::dataIORegular<pFlow::real>;
|
template class pFlow::dataIORegular<pFlow::real>;
|
||||||
|
|
|
@ -246,7 +246,7 @@ public:
|
||||||
|
|
||||||
/// Is this iter the right time for updating bounday list
|
/// Is this iter the right time for updating bounday list
|
||||||
inline
|
inline
|
||||||
bool performBoundarytUpdate()const
|
bool performBoundaryUpdate()const
|
||||||
{
|
{
|
||||||
return updateTime_;
|
return updateTime_;
|
||||||
}
|
}
|
||||||
|
|
|
@ -60,7 +60,7 @@ bool pFlow::boundaryExit::beforeIteration
|
||||||
{
|
{
|
||||||
callAgain = false;
|
callAgain = false;
|
||||||
|
|
||||||
if( !performBoundarytUpdate())
|
if( !performBoundaryUpdate())
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
|
@ -60,7 +60,7 @@ bool pFlow::boundaryPeriodic::beforeIteration(
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
//output<<this->thisBoundaryIndex()<<" ->"<<ti.iter()<<" update called\n";
|
//output<<this->thisBoundaryIndex()<<" ->"<<ti.iter()<<" update called\n";
|
||||||
if(!performBoundarytUpdate())
|
if(!performBoundaryUpdate())
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,18 +1,33 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
cd ${0%/*} || exit 1 # Run from this directory
|
||||||
|
|
||||||
rm -rf build/ include/ lib/
|
# Source the configurations - if there's a ./configurations file
|
||||||
mkdir build
|
[ -f ./configurations ] && source ./configurations
|
||||||
|
|
||||||
|
# Set environment variables to ensure shared library creation
|
||||||
|
export CFLAGS="-fPIC"
|
||||||
|
export CXXFLAGS="-fPIC"
|
||||||
|
export FCFLAGS="-fPIC"
|
||||||
|
|
||||||
|
# Create build directory
|
||||||
|
mkdir -p build
|
||||||
cd build
|
cd build
|
||||||
|
|
||||||
../configure \
|
# Run configure with shared library options
|
||||||
--prefix=$HOME/PhasicFlow/phasicFlow-v-1.0/thirdParty/Zoltan/ \
|
echo "Running configure with options to build shared library..."
|
||||||
--with-gnumake \
|
../configure --prefix=$PWD/.. --enable-shared --disable-static
|
||||||
--with-id-type=uint
|
|
||||||
--disable-tests
|
|
||||||
--disable-examples
|
|
||||||
|
|
||||||
make everything -j4
|
# Run make and install
|
||||||
|
echo "Building and installing Zoltan..."
|
||||||
make install
|
make install
|
||||||
|
|
||||||
cd ../
|
# Convert static to shared library if static library exists and shared doesn't
|
||||||
|
echo "Checking for static library and converting to shared if needed..."
|
||||||
|
if [ -f "$PWD/../lib/libzoltan.a" ] && [ ! -f "$PWD/../lib/libzoltan.so" ]; then
|
||||||
|
echo "Converting static library to shared library..."
|
||||||
|
cd $PWD/../lib
|
||||||
|
gcc -shared -o libzoltan.so -Wl,--whole-archive libzoltan.a -Wl,--no-whole-archive
|
||||||
|
echo "Shared library created as libzoltan.so"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Build completed"
|
||||||
|
|
Loading…
Reference in New Issue