A few fixes for MPI

main.cpp:
	1. 	Check return value of ParseFDTDSetup and exit if false
	2. 	Use exit instead of return. These are almost identical. But
		in my OpenMPI installation the process with teh highes rank
		segfaults at the end when using return. This is not the case
		with exit. Probably some C++ cleanup problem (destructors).
openems.cpp:
	Give Parse_XML_FDTDSetup a deterministic return value.
openems_fdtd_mpi.cpp:
	1.	Remove the word "only" in an error message because there can
		also be too many processes.
	2.	Fix the indexing variables for SetSplitPos in SetupMPI. Otherwise
		more than one split results in an out-of-range exception and
		unexpected behavior.
RunOpenEMS_MPI.m:
	Apply Settings.MPI.GlobalArgs also to multi-host scenarios.
pull/23/head
Ubuntu 2016-12-01 09:57:08 +00:00
parent ed33316dcf
commit 4cffaa5c53
4 changed files with 12 additions and 9 deletions

View File

@ -210,7 +210,7 @@ bool openEMS_FDTD_MPI::SetupMPI()
if (numProcs!=m_NumProc)
{
if (m_MyID==0)
cerr << "openEMS_FDTD_MPI::SetupMPI: Error: Requested splits require " << numProcs << " processes, but only " << m_NumProc << " were found! Exit! " << endl;
cerr << "openEMS_FDTD_MPI::SetupMPI: Error: Requested splits require " << numProcs << " processes, but " << m_NumProc << " were found! Exit! " << endl;
exit(10);
}
@ -256,8 +256,8 @@ bool openEMS_FDTD_MPI::SetupMPI()
grid->AddDiscLine(2, m_Original_Grid->GetLine(2,n) );
m_MPI_Op->SetSplitPos(0,m_SplitNumber[0].at(i));
m_MPI_Op->SetSplitPos(1,m_SplitNumber[1].at(i));
m_MPI_Op->SetSplitPos(2,m_SplitNumber[2].at(i));
m_MPI_Op->SetSplitPos(1,m_SplitNumber[1].at(j));
m_MPI_Op->SetSplitPos(2,m_SplitNumber[2].at(k));
if (i>0)
m_MPI_Op->SetNeighborDown(0,procTable[i-1][j][k]);

View File

@ -68,8 +68,12 @@ int main(int argc, char *argv[])
}
int EC = FDTD.ParseFDTDSetup(argv[1]);
if(!EC) {
cerr << "openEMS - ParseFDTDSetup failed." << endl;
exit(1);
}
EC = FDTD.SetupFDTD();
if (EC) return EC;
if (EC) exit(EC);
FDTD.RunFDTD();
#ifdef MPI_SUPPORT
@ -77,5 +81,5 @@ int main(int argc, char *argv[])
MPI::Finalize();
#endif
return 0;
exit(0);
}

View File

@ -87,7 +87,7 @@ end
if isfield(Settings.MPI,'Hosts')
disp(['Running remote openEMS_MPI in working dir: ' work_path]);
[status] = system(['mpiexec -host ' HostList ' -n ' int2str(NrProc) ' -wdir ' work_path ' ' Settings.MPI.Binary ' ' Sim_File ' ' opts ' ' append_unix]);
[status] = system(['mpiexec ' Settings.MPI.GlobalArgs ' -host ' HostList ' -n ' int2str(NrProc) ' -wdir ' work_path ' ' Settings.MPI.Binary ' ' Sim_File ' ' opts ' ' append_unix]);
else
disp('Running local openEMS_MPI');
[status] = system(['mpiexec ' Settings.MPI.GlobalArgs ' -n ' int2str(NrProc) ' ' Settings.MPI.Binary ' ' Sim_File ' ' opts ' ' append_unix]);

View File

@ -418,13 +418,11 @@ bool openEMS::SetupProcessing()
}
if (CylinderCoords)
proc->SetMeshType(Processing::CYLINDRICAL_MESH);
if ((pb->GetProbeType()==1) || (pb->GetProbeType()==3))
if ((pb->GetProbeType()==1) || (pb->GetProbeType()==3) || (pb->GetProbeType()==11))
{
proc->SetDualTime(true);
proc->SetDualMesh(true);
}
if (pb->GetProbeType()==11)
proc->SetDualTime(true);
proc->SetProcessInterval(Nyquist/m_OverSampling);
if (pb->GetStartTime()>0 || pb->GetStopTime()>0)
proc->SetProcessStartStopTime(pb->GetStartTime(), pb->GetStopTime());
@ -821,6 +819,7 @@ bool openEMS::Parse_XML_FDTDSetup(TiXmlElement* FDTD_Opts)
this->SetTimeStep(dhelp);
if (FDTD_Opts->QueryDoubleAttribute("TimeStepFactor",&dhelp)==TIXML_SUCCESS)
this->SetTimeStepFactor(dhelp);
return true;
}
void openEMS::SetGaussExcite(double f0, double fc)