minor changes after MPI merge

This commit is contained in:
Hamidreza 2025-05-19 13:53:34 +03:30
parent 9fb8abb166
commit e8ee35791f
14 changed files with 82 additions and 63 deletions

View File

@ -26,7 +26,7 @@ bool pFlow::processorBoundarySphereParticles::acceleration(const timeInfo &ti, c
auto I = Particles().I().BoundaryField(thisIndex).neighborProcField().deviceView();
auto cf = Particles().contactForce().BoundaryField(thisIndex).neighborProcField().deviceView();
auto ct = Particles().contactTorque().BoundaryField(thisIndex).neighborProcField().deviceView();
auto acc = Particles().accelertion().BoundaryField(thisIndex).neighborProcField().deviceView();
auto acc = Particles().acceleration().BoundaryField(thisIndex).neighborProcField().deviceView();
auto rAcc = Particles().rAcceleration().BoundaryField(thisIndex).neighborProcField().deviceView();
Kokkos::parallel_for(

View File

@ -16,7 +16,7 @@ template class pFlow::MPI::dataIOMPI<pFlow::uint32x3>;
template class pFlow::MPI::dataIOMPI<pFlow::uint64>;
template class pFlow::MPI::dataIOMPI<pFlow::size_t>;
//template class pFlow::MPI::dataIOMPI<pFlow::size_t>;
template class pFlow::MPI::dataIOMPI<pFlow::real>;

View File

@ -24,9 +24,9 @@ Licence:
#include "scatteredMasterDistribute.hpp"
#include "scatteredMasterDistributeChar.hpp"
pFlow::MPI::MPISimulationDomain::MPISimulationDomain(systemControl& control)
pFlow::MPI::MPISimulationDomain::MPISimulationDomain(systemControl& control, real maxBSphere)
:
simulationDomain(control),
simulationDomain(control, maxBSphere),
communication_(pFlowProcessors()),
subDomainsAll_(pFlowProcessors()),
numPointsAll_(pFlowProcessors()),

View File

@ -61,7 +61,7 @@ public:
TypeInfo("simulationDomain<MPI>");
explicit MPISimulationDomain(systemControl& control);
explicit MPISimulationDomain(systemControl& control, real maxBSphere);
~MPISimulationDomain() final = default;

View File

@ -47,7 +47,7 @@ pFlow::MPI::processorBoundaryField<T, MemorySpace>::updateBoundary(
)
{
#ifndef BoundaryModel1
if(!this->boundary().performBoundarytUpdate())
if(!this->boundary().performBoundaryUpdate())
return true;
#endif
@ -128,21 +128,19 @@ const typename pFlow::MPI::processorBoundaryField<T, MemorySpace>::
template<class T, class MemorySpace>
bool pFlow::MPI::processorBoundaryField<T, MemorySpace>::hearChanges(
real t,
real dt,
uint32 iter,
const timeInfo & ti,
const message& msg,
const anyList& varList
)
{
BoundaryFieldType::hearChanges(t,dt,iter, msg,varList);
if(msg.equivalentTo(message::BNDR_PROC_SIZE_CHANGED))
{
auto newProcSize = varList.getObject<uint32>("size");
auto newProcSize = varList.getObject<uint32>(
message::eventName(message::BNDR_PROC_SIZE_CHANGED));
neighborProcField_.resize(newProcSize);
}
if(msg.equivalentTo(message::BNDR_PROCTRANSFER_SEND))
else if(msg.equivalentTo(message::BNDR_PROCTRANSFER_SEND))
{
const auto& indices = varList.getObject<uint32Vector_D>(
message::eventName(message::BNDR_PROCTRANSFER_SEND)
@ -169,7 +167,6 @@ bool pFlow::MPI::processorBoundaryField<T, MemorySpace>::hearChanges(
thisFieldInNeighbor_.sendData(pFlowProcessors(),transferData);
}
}
else if(msg.equivalentTo(message::BNDR_PROCTRANSFER_RECIEVE))
{
@ -182,30 +179,38 @@ bool pFlow::MPI::processorBoundaryField<T, MemorySpace>::hearChanges(
{
uint32 numRecieved = neighborProcField_.waitBufferForUse();
if(msg.equivalentTo(message::CAP_CHANGED))
if(numRecieved == 0u)
{
auto newCap = varList.getObject<uint32>(
message::eventName(message::CAP_CHANGED));
this->internal().field().reserve(newCap);
return true;
}
if(msg.equivalentTo(message::SIZE_CHANGED))
if(msg.equivalentTo(message::RANGE_CHANGED))
{
auto newSize = varList.getObject<uint32>(
message::eventName(message::SIZE_CHANGED));
this->internal().field().resize(newSize);
auto newRange = varList.getObject<rangeU32>(
message::eventName(message::RANGE_CHANGED));
this->internal().field().resize(newRange.end());
}
const auto& indices = varList.getObject<uint32IndexContainer>(
message::eventName(message::ITEM_INSERT));
this->internal().field().insertSetElement(indices, neighborProcField_.buffer().deviceView());
return true;
if(msg.equivalentTo(message::ITEMS_INSERT))
{
const auto& indices = varList.getObject<uint32IndexContainer>(
message::eventName(message::ITEMS_INSERT));
this->internal().field().insertSetElement(
indices,
neighborProcField_.buffer().deviceView());
}
}
else
{
if(!BoundaryFieldType::hearChanges(ti, msg,varList) )
{
return false;
}
}
return true;
}
template <class T, class MemorySpace>
void pFlow::MPI::processorBoundaryField<T, MemorySpace>::sendBackData() const

View File

@ -91,9 +91,7 @@ public:
}
bool hearChanges(
real t,
real dt,
uint32 iter,
const timeInfo & ti,
const message& msg,
const anyList& varList
) override;

View File

@ -83,15 +83,15 @@ pFlow::MPI::boundaryProcessor::beforeIteration(
else if(step == 2 )
{
#ifdef BoundaryModel1
callAgain = true;
#else
if(!performBoundarytUpdate())
{
callAgain = false;
return true;
}
#endif
#ifdef BoundaryModel1
callAgain = true;
#else
if(!performBoundaryUpdate())
{
callAgain = false;
return true;
}
#endif
thisNumPoints_ = size();
@ -136,7 +136,7 @@ pFlow::MPI::boundaryProcessor::beforeIteration(
varList.emplaceBack(msg.addAndName(message::BNDR_PROC_SIZE_CHANGED), neighborProcNumPoints_);
if( !notify(ti.iter(), ti.t(), ti.dt(), msg, varList) )
if( !notify(ti, msg, varList) )
{
fatalErrorInFunction;
callAgain = false;
@ -343,8 +343,9 @@ bool pFlow::MPI::boundaryProcessor::transferData(
neighborProcPoints_.waitBufferForUse();
internal().insertPointsOnly(neighborProcPoints_.buffer(), msg, varList);
const auto& indices = varList.getObject<uint32IndexContainer>(message::eventName(message::ITEM_INSERT));
const auto& indices = varList.getObject<uint32IndexContainer>(message::eventName(message::ITEMS_INSERT));
// creates a view (does not copy data)
auto indView = deviceViewType1D<uint32>(indices.deviceView().data(), indices.deviceView().size());
uint32Vector_D newIndices("newIndices", indView);
@ -356,7 +357,7 @@ bool pFlow::MPI::boundaryProcessor::transferData(
return false;
}
const auto ti = internal().time().TimeInfo();
const auto& ti = internal().time().TimeInfo();
if(!notify(ti, msg, varList))
{
fatalErrorInFunction;

View File

@ -114,8 +114,8 @@ public:
return true;
}
fatalErrorInFunction<<"Event"<< msg.eventNames()<<"with code "<< msg <<
" is not handled in boundaryField."<<endl;
fatalErrorInFunction<<"Event "<< msg.eventNames()<<" with code "<< msg <<
" is not handled in boundaryField "<< name()<<endl;
return false;
}

View File

@ -223,7 +223,7 @@ bool pFlow::internalField<T, MemorySpace>:: hearChanges
else
{
fatalErrorInFunction<<"hear changes in internal field is not processing "<<
message::eventName(message::RANGE_CHANGED)<<
msg.eventNames()<<
" event with message code "<< msg<<endl;
return false;
}

View File

@ -25,8 +25,8 @@ template class pFlow::dataIORegular<pFlow::uint32x3>;
template class pFlow::dataIO<pFlow::uint64>;
template class pFlow::dataIORegular<pFlow::uint64>;
template class pFlow::dataIO<size_t>;
template class pFlow::dataIORegular<size_t>;
//template class pFlow::dataIO<size_t>;
//template class pFlow::dataIORegular<size_t>;
template class pFlow::dataIO<pFlow::real>;
template class pFlow::dataIORegular<pFlow::real>;

View File

@ -246,7 +246,7 @@ public:
/// Is this iter the right time for updating bounday list
inline
bool performBoundarytUpdate()const
bool performBoundaryUpdate()const
{
return updateTime_;
}

View File

@ -60,7 +60,7 @@ bool pFlow::boundaryExit::beforeIteration
{
callAgain = false;
if( !performBoundarytUpdate())
if( !performBoundaryUpdate())
{
return true;
}

View File

@ -60,7 +60,7 @@ bool pFlow::boundaryPeriodic::beforeIteration(
return true;
}
//output<<this->thisBoundaryIndex()<<" ->"<<ti.iter()<<" update called\n";
if(!performBoundarytUpdate())
if(!performBoundaryUpdate())
{
return true;
}

View File

@ -1,18 +1,33 @@
#!/bin/bash
cd ${0%/*} || exit 1 # Run from this directory
rm -rf build/ include/ lib/
mkdir build
# Source the configurations - if there's a ./configurations file
[ -f ./configurations ] && source ./configurations
# Set environment variables to ensure shared library creation
export CFLAGS="-fPIC"
export CXXFLAGS="-fPIC"
export FCFLAGS="-fPIC"
# Create build directory
mkdir -p build
cd build
../configure \
--prefix=$HOME/PhasicFlow/phasicFlow-v-1.0/thirdParty/Zoltan/ \
--with-gnumake \
--with-id-type=uint
--disable-tests
--disable-examples
# Run configure with shared library options
echo "Running configure with options to build shared library..."
../configure --prefix=$PWD/.. --enable-shared --disable-static
make everything -j4
# Run make and install
echo "Building and installing Zoltan..."
make install
cd ../
# Convert static to shared library if static library exists and shared doesn't
echo "Checking for static library and converting to shared if needed..."
if [ -f "$PWD/../lib/libzoltan.a" ] && [ ! -f "$PWD/../lib/libzoltan.so" ]; then
echo "Converting static library to shared library..."
cd $PWD/../lib
gcc -shared -o libzoltan.so -Wl,--whole-archive libzoltan.a -Wl,--no-whole-archive
echo "Shared library created as libzoltan.so"
fi
echo "Build completed"