ECCE @ EIC Software
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
G4MPImanager.hh
Go to the documentation of this file. Or view the newest version in sPHENIX GitHub for file G4MPImanager.hh
1 //
2 // ********************************************************************
3 // * License and Disclaimer *
4 // * *
5 // * The Geant4 software is copyright of the Copyright Holders of *
6 // * the Geant4 Collaboration. It is provided under the terms and *
7 // * conditions of the Geant4 Software License, included in the file *
8 // * LICENSE and available at http://cern.ch/geant4/license . These *
9 // * include a list of copyright holders. *
10 // * *
11 // * Neither the authors of this software system, nor their employing *
12 // * institutes,nor the agencies providing financial support for this *
13 // * work make any representation or warranty, express or implied, *
14 // * regarding this software system or assume any liability for its *
15 // * use. Please see the license in the file LICENSE and URL above *
16 // * for the full disclaimer and the limitation of liability. *
17 // * *
18 // * This code implementation is the result of the scientific and *
19 // * technical work of the GEANT4 collaboration. *
20 // * By using, copying, modifying or distributing the software (or *
21 // * any work based on the software) you agree to acknowledge its *
22 // * use in resulting scientific publications, and indicate your *
23 // * acceptance of all terms of the Geant4 Software license. *
24 // ********************************************************************
27 
28 #ifndef G4MPI_MANAGER_H
29 #define G4MPI_MANAGER_H
30 
31 #include "mpi.h"
32 #include <fstream>
33 #include <pthread.h>
34 #include "globals.hh"
35 
36 #define DISALLOW_COPY_AND_ASSIGN(TypeName) \
37  TypeName(const TypeName&); \
38  void operator=(const TypeName&)
39 
40 class G4MPImessenger;
41 class G4MPIsession;
42 class G4MPIstatus;
44 class G4VMPIextraWorker;
45 
46 class G4MPImanager {
47 public:
48  // MPI master rank
49  enum { kRANK_MASTER = 0 };
50 
51  enum { // MPI tag
54  kTAG_G4SEED = 300,
55  kTAG_DATA = 1000,
56  kTAG_HISTO = 1001,
57  kTAG_RUN = 1002,
58  kTAG_CMDSCR = 1003,
59  kTAG_NTUPLE = 1004
60  };
61 
62  G4MPImanager(int nof_extra_workers = 0);
63  G4MPImanager(int argc, char** argv, int nof_extra_workers = 0);
64  ~G4MPImanager();
65 
66  static G4MPImanager* GetManager();
67 
68  // set/get methods
69  G4MPIsession* GetMPIsession() const;
70 
71  G4int GetVerbose() const;
72  void SetVerbose(G4int iverbose);
73 
74  G4int GetTotalSize() const; // get size of all ranks
75  G4int GetActiveSize() const; // get size of ranks wher RunBeamOn is called
76  G4int GetRank() const;
77 
78  G4bool IsMaster() const;
79  G4bool IsSlave() const;
80  G4bool IsExtraWorker() const;
81 
82  G4bool IsInitMacro() const;
83  const G4String& GetInitFileName() const;
84 
85  G4bool IsBatchMode() const;
86  const G4String& GetMacroFileName() const;
87 
88  void SetMasterWeight(G4double aweight);
89  G4double GetMasterWeight() const;
90 
91  void SetExtraWorker(G4VMPIextraWorker* extraWorker);
93 
95 
96  // MPI methods
97  G4String BcastCommand(const G4String& command);
98  void ShowStatus();
99  void ShowSeeds();
100  void SetSeed(G4int inode, G4long seed);
101  void WaitBeamOn();
102 
103  // methods for MPI environment
104  void DistributeSeeds();
105  void ExecuteMacroFile(const G4String& fname, G4bool qbatch=false);
107  void ExecuteThreadCommand(const G4String& command);
108  void ExecuteBeamOnThread(const G4String& command);
109  void JoinBeamOnThread();
110 
111  void BeamOn(G4int nevent, G4bool qdivide=true);
112  void Print(const G4String& message);
113 
114  // misc
115  void ShowHelp() const;
116 
117  const MPI::Intracomm* GetComm() const { return &COMM_G4COMMAND_; }
118  const MPI_Comm* GetProcessingComm() const { return &processing_comm_; }
119  const MPI_Comm* GetCollectingComm() const { return &collecting_comm_; }
120  const MPI_Comm* GetAllComm() const { return &all_comm_; }
121 private:
123 
124  // internal use
125  void Initialize();
126  void ParseArguments(G4int argc, char** argv);
127  void UpdateStatus();
128 
133 
134  // seed generator
136 
137  G4MPIstatus* status_; // status for each node
138 
140 
141  // MPI rank
146  G4int size_; // processing comm size
147  G4int world_size_; // world comm size
148 
149  // MPI communicator (when no extra ranks)
150  MPI::Intracomm COMM_G4COMMAND_;
151  // MPI communicator (processing ranks - if ntuple merging)
153  // MPI communicator (collecting ranks - if ntuple merging)
155  // MPI communicator (all ranks - if ntuple mergins)
157  // Interim data - need to be freed
158  MPI_Group world_group_;
159  MPI_Group processing_group_;
160  MPI_Group collecting_group_;
161  MPI_Group all_group_;
162 
163  // cout/cerr control
165  std::ofstream fscout_;
166 
167  // init/macro file
172 
173  // for beamOn
174  pthread_t thread_id_;
175 
176  // parallel parameters
179 };
180 
181 // ====================================================================
183 {
184  return session_;
185 }
186 
188 {
189  return verbose_;
190 }
191 
192 inline void G4MPImanager::SetVerbose(G4int iverbose)
193 {
194  G4int lv = iverbose;
195  if( iverbose > 1 ) lv = 1;
196  if( iverbose < 0 ) lv = 0;
197 
198  verbose_ = lv;
199  return;
200 }
201 
203 {
204  return rank_;
205 }
206 
208 {
209  return world_size_;
210 }
211 
213 {
214  return size_;
215 }
216 
218 {
219  return is_master_;
220 }
221 
223 {
224  return is_slave_;
225 }
226 
228 {
229  return is_extra_worker_;
230 }
231 
233 {
234  return qinitmacro_;
235 
236 }
237 
239 {
240  return init_file_name_;
241 
242 }
243 
245 {
246  return qbatchmode_;
247 }
248 
250 {
251  return macro_file_name_;
252 }
253 
255 {
256  master_weight_ = aweight;
257 
258  if( aweight < 0. ) master_weight_ = 0.;
259  if( aweight > 1. ) master_weight_ = 1.;
260 }
261 
263 {
264  return master_weight_;
265 }
266 
268 {
269  return extra_worker_;
270 }
271 
273 {
274  return seed_generator_;
275 }
276 
277 #endif