[1651] | 1 | #ifdef _usingEP |
---|
| 2 | #include "ep_lib.hpp" |
---|
| 3 | #include <mpi.h> |
---|
| 4 | #include "ep_declaration.hpp" |
---|
| 5 | #include "ep_mpi.hpp" |
---|
| 6 | |
---|
| 7 | using namespace std; |
---|
| 8 | |
---|
| 9 | extern std::map<std::pair<int, int>, MPI_Group* > * tag_group_map; |
---|
| 10 | |
---|
| 11 | extern std::map<int, std::pair<ep_lib::MPI_Comm*, std::pair<int, int> > > * tag_comm_map; |
---|
| 12 | |
---|
| 13 | extern MPI_Group MPI_GROUP_WORLD; |
---|
| 14 | |
---|
| 15 | namespace ep_lib |
---|
| 16 | { |
---|
| 17 | |
---|
| 18 | int MPI_Intercomm_create(MPI_Comm local_comm, int local_leader, MPI_Comm peer_comm, int remote_leader, int tag, MPI_Comm *newintercomm) |
---|
| 19 | { |
---|
| 20 | if(!local_comm->is_ep) return MPI_Intercomm_create_mpi(local_comm, local_leader, peer_comm, remote_leader, tag, newintercomm); |
---|
| 21 | |
---|
| 22 | int ep_rank = local_comm->ep_comm_ptr->size_rank_info[0].first; |
---|
| 23 | |
---|
| 24 | |
---|
| 25 | // check if local leaders are in the same mpi proc |
---|
| 26 | // by checking their mpi_rank in peer_comm |
---|
| 27 | |
---|
| 28 | int mpi_rank_of_leader[2]; |
---|
| 29 | |
---|
| 30 | if(ep_rank == local_leader) |
---|
| 31 | { |
---|
| 32 | mpi_rank_of_leader[0] = peer_comm->ep_comm_ptr->size_rank_info[2].first; |
---|
| 33 | mpi_rank_of_leader[1] = peer_comm->ep_rank_map->at(remote_leader).second; |
---|
| 34 | } |
---|
| 35 | |
---|
| 36 | MPI_Bcast(mpi_rank_of_leader, 2, MPI_INT, local_leader, local_comm); |
---|
| 37 | |
---|
| 38 | if(mpi_rank_of_leader[0] != mpi_rank_of_leader[1]) |
---|
| 39 | { |
---|
| 40 | Debug("calling MPI_Intercomm_create_kernel\n"); |
---|
| 41 | return MPI_Intercomm_create_endpoint(local_comm, local_leader, peer_comm, remote_leader, tag, newintercomm); |
---|
| 42 | } |
---|
| 43 | else |
---|
| 44 | { |
---|
| 45 | printf("local leaders are in the same MPI proc. Routine not yet implemented\n"); |
---|
| 46 | MPI_Abort(local_comm, 0); |
---|
| 47 | } |
---|
| 48 | } |
---|
| 49 | |
---|
| 50 | |
---|
| 51 | |
---|
| 52 | int MPI_Intercomm_create_endpoint(MPI_Comm local_comm, int local_leader, MPI_Comm peer_comm, int remote_leader, int tag, MPI_Comm *newintercomm) |
---|
| 53 | { |
---|
| 54 | int ep_rank, ep_rank_loc, mpi_rank; |
---|
| 55 | int ep_size, num_ep, mpi_size; |
---|
| 56 | |
---|
| 57 | ep_rank = local_comm->ep_comm_ptr->size_rank_info[0].first; |
---|
| 58 | ep_rank_loc = local_comm->ep_comm_ptr->size_rank_info[1].first; |
---|
| 59 | mpi_rank = local_comm->ep_comm_ptr->size_rank_info[2].first; |
---|
| 60 | ep_size = local_comm->ep_comm_ptr->size_rank_info[0].second; |
---|
| 61 | num_ep = local_comm->ep_comm_ptr->size_rank_info[1].second; |
---|
| 62 | mpi_size = local_comm->ep_comm_ptr->size_rank_info[2].second; |
---|
| 63 | |
---|
| 64 | ////////////////////////////////////////////////////////////////////////////////////////////////////////////////// |
---|
| 65 | // step 1 : local leaders exchange ep_size, leader_rank_in_peer, leader_rank_in_peer_mpi, leader_rank_in_world. // |
---|
| 66 | // local leaders bcast results to all ep in local_comm // |
---|
| 67 | ////////////////////////////////////////////////////////////////////////////////////////////////////////////////// |
---|
| 68 | |
---|
| 69 | bool is_local_leader = ep_rank==local_leader? true: false; |
---|
| 70 | |
---|
| 71 | |
---|
| 72 | int local_leader_rank_in_peer; |
---|
| 73 | int local_leader_rank_in_peer_mpi; |
---|
| 74 | int local_leader_rank_in_world; |
---|
| 75 | |
---|
| 76 | int remote_ep_size; |
---|
| 77 | int remote_leader_rank_in_peer; |
---|
| 78 | int remote_leader_rank_in_peer_mpi; |
---|
| 79 | int remote_leader_rank_in_world; |
---|
| 80 | |
---|
| 81 | int send_quadruple[4]; |
---|
| 82 | int recv_quadruple[4]; |
---|
| 83 | |
---|
| 84 | |
---|
| 85 | if(is_local_leader) |
---|
| 86 | { |
---|
| 87 | MPI_Comm_rank(peer_comm, &local_leader_rank_in_peer); |
---|
| 88 | ::MPI_Comm_rank(to_mpi_comm(peer_comm->mpi_comm), &local_leader_rank_in_peer_mpi); |
---|
| 89 | ::MPI_Comm_rank(to_mpi_comm(MPI_COMM_WORLD->mpi_comm), &local_leader_rank_in_world); |
---|
| 90 | |
---|
| 91 | send_quadruple[0] = ep_size; |
---|
| 92 | send_quadruple[1] = local_leader_rank_in_peer; |
---|
| 93 | send_quadruple[2] = local_leader_rank_in_peer_mpi; |
---|
| 94 | send_quadruple[3] = local_leader_rank_in_world; |
---|
| 95 | |
---|
| 96 | MPI_Status status; |
---|
| 97 | MPI_Sendrecv(send_quadruple, 4, MPI_INT, remote_leader, tag, recv_quadruple, 4, MPI_INT, remote_leader, tag, peer_comm, &status); |
---|
| 98 | |
---|
| 99 | remote_ep_size = recv_quadruple[0]; |
---|
| 100 | remote_leader_rank_in_peer = recv_quadruple[1]; |
---|
| 101 | remote_leader_rank_in_peer_mpi = recv_quadruple[2]; |
---|
| 102 | remote_leader_rank_in_world = recv_quadruple[3]; |
---|
| 103 | |
---|
| 104 | } |
---|
| 105 | |
---|
| 106 | MPI_Bcast(send_quadruple, 4, MPI_INT, local_leader, local_comm); |
---|
| 107 | MPI_Bcast(recv_quadruple, 4, MPI_INT, local_leader, local_comm); |
---|
| 108 | |
---|
| 109 | if(!is_local_leader) |
---|
| 110 | { |
---|
| 111 | local_leader_rank_in_peer = send_quadruple[1]; |
---|
| 112 | local_leader_rank_in_peer_mpi = send_quadruple[2]; |
---|
| 113 | local_leader_rank_in_world = send_quadruple[3]; |
---|
| 114 | |
---|
| 115 | remote_ep_size = recv_quadruple[0]; |
---|
| 116 | remote_leader_rank_in_peer = recv_quadruple[1]; |
---|
| 117 | remote_leader_rank_in_peer_mpi = recv_quadruple[2]; |
---|
| 118 | remote_leader_rank_in_world = recv_quadruple[3]; |
---|
| 119 | } |
---|
| 120 | |
---|
| 121 | |
---|
| 122 | |
---|
| 123 | /////////////////////////////////////////////////////////////////// |
---|
| 124 | // step 2 : gather ranks in world for both local and remote comm // |
---|
| 125 | /////////////////////////////////////////////////////////////////// |
---|
| 126 | |
---|
| 127 | int rank_in_world; |
---|
| 128 | ::MPI_Comm_rank(to_mpi_comm(MPI_COMM_WORLD->mpi_comm), &rank_in_world); |
---|
| 129 | |
---|
| 130 | int *ranks_in_world_local = new int[ep_size]; |
---|
| 131 | int *ranks_in_world_remote = new int[remote_ep_size]; |
---|
| 132 | |
---|
| 133 | MPI_Allgather(&rank_in_world, 1, MPI_INT, ranks_in_world_local, 1, MPI_INT, local_comm); |
---|
| 134 | |
---|
| 135 | if(is_local_leader) |
---|
| 136 | { |
---|
| 137 | MPI_Status status; |
---|
| 138 | MPI_Sendrecv(ranks_in_world_local, ep_size, MPI_INT, remote_leader, tag, ranks_in_world_remote, remote_ep_size, MPI_INT, remote_leader, tag, peer_comm, &status); |
---|
| 139 | } |
---|
| 140 | |
---|
| 141 | MPI_Bcast(ranks_in_world_remote, remote_ep_size, MPI_INT, local_leader, local_comm); |
---|
| 142 | |
---|
| 143 | |
---|
| 144 | ////////////////////////////////////////////////////////////// |
---|
| 145 | // step 3 : determine the priority and ownership of each ep // |
---|
| 146 | ////////////////////////////////////////////////////////////// |
---|
| 147 | |
---|
| 148 | bool priority = local_leader_rank_in_peer < remote_leader_rank_in_peer? true : false; |
---|
| 149 | |
---|
| 150 | |
---|
| 151 | int ownership = 1; |
---|
| 152 | |
---|
| 153 | if(rank_in_world == ranks_in_world_local[local_leader]) ownership = 1; |
---|
| 154 | if(rank_in_world == remote_leader_rank_in_world) ownership = 0; |
---|
| 155 | |
---|
| 156 | |
---|
| 157 | |
---|
| 158 | ////////////////////////////////////////////////////// |
---|
| 159 | // step 4 : extract local_comm and create intercomm // |
---|
| 160 | ////////////////////////////////////////////////////// |
---|
| 161 | |
---|
| 162 | bool is_involved = is_local_leader || (!is_local_leader && ep_rank_loc == 0 && rank_in_world != local_leader_rank_in_world); |
---|
| 163 | |
---|
| 164 | |
---|
| 165 | |
---|
| 166 | if(is_involved) |
---|
| 167 | { |
---|
| 168 | ::MPI_Group local_group; |
---|
| 169 | ::MPI_Group extracted_group; |
---|
| 170 | ::MPI_Comm extracted_comm; |
---|
| 171 | |
---|
| 172 | |
---|
| 173 | ::MPI_Comm_group(to_mpi_comm(local_comm->mpi_comm), &local_group); |
---|
| 174 | |
---|
| 175 | int *ownership_list = new int[mpi_size]; |
---|
| 176 | int *mpi_rank_list = new int[mpi_size]; |
---|
| 177 | |
---|
| 178 | ::MPI_Allgather(&ownership, 1, to_mpi_type(MPI_INT), ownership_list, 1, to_mpi_type(MPI_INT), to_mpi_comm(local_comm->mpi_comm)); |
---|
| 179 | ::MPI_Allgather(&mpi_rank, 1, to_mpi_type(MPI_INT), mpi_rank_list, 1, to_mpi_type(MPI_INT), to_mpi_comm(local_comm->mpi_comm)); |
---|
| 180 | |
---|
| 181 | |
---|
| 182 | int n=0; |
---|
| 183 | for(int i=0; i<mpi_size; i++) |
---|
| 184 | { |
---|
| 185 | n+=ownership_list[i]; |
---|
| 186 | } |
---|
| 187 | |
---|
| 188 | int *new_mpi_rank_list = new int[n]; |
---|
| 189 | int j=0; |
---|
| 190 | for(int i=0; i<mpi_size; i++) |
---|
| 191 | { |
---|
| 192 | if(ownership_list[i] !=0) |
---|
| 193 | { |
---|
| 194 | new_mpi_rank_list[j++] = mpi_rank_list[i]; |
---|
| 195 | } |
---|
| 196 | } |
---|
| 197 | |
---|
| 198 | |
---|
| 199 | ::MPI_Group_incl(local_group, n, new_mpi_rank_list, &extracted_group); |
---|
| 200 | |
---|
| 201 | ::MPI_Comm_create(to_mpi_comm(local_comm->mpi_comm), extracted_group, &extracted_comm); |
---|
| 202 | |
---|
| 203 | ::MPI_Comm mpi_inter_comm; |
---|
| 204 | |
---|
| 205 | int local_leader_rank_in_extracted_comm; |
---|
| 206 | |
---|
| 207 | if(is_local_leader) |
---|
| 208 | { |
---|
| 209 | ::MPI_Comm_rank(extracted_comm, &local_leader_rank_in_extracted_comm); |
---|
| 210 | } |
---|
| 211 | |
---|
| 212 | ::MPI_Bcast(&local_leader_rank_in_extracted_comm, 1, to_mpi_type(MPI_INT), local_comm->ep_rank_map->at(local_leader).second, to_mpi_comm(local_comm->mpi_comm)); |
---|
| 213 | |
---|
| 214 | ::MPI_Comm *intracomm = new ::MPI_Comm; |
---|
| 215 | bool is_real_involved = ownership && extracted_comm != to_mpi_comm(MPI_COMM_NULL->mpi_comm); |
---|
| 216 | |
---|
| 217 | if(is_real_involved) |
---|
| 218 | { |
---|
| 219 | #pragma omp critical (_mpi_call) |
---|
| 220 | ::MPI_Intercomm_create(extracted_comm, local_leader_rank_in_extracted_comm, to_mpi_comm(peer_comm->mpi_comm), remote_leader_rank_in_peer_mpi, tag, &mpi_inter_comm); |
---|
| 221 | ::MPI_Intercomm_merge(mpi_inter_comm, !priority, intracomm); |
---|
| 222 | } |
---|
| 223 | |
---|
| 224 | |
---|
| 225 | |
---|
| 226 | //////////////////////////////////// |
---|
| 227 | // step 5 :: determine new num_ep // |
---|
| 228 | //////////////////////////////////// |
---|
| 229 | |
---|
| 230 | int num_ep_count=0; |
---|
| 231 | |
---|
| 232 | for(int i=0; i<ep_size; i++) |
---|
| 233 | { |
---|
| 234 | if(rank_in_world == ranks_in_world_local[i]) |
---|
| 235 | num_ep_count++; |
---|
| 236 | } |
---|
| 237 | |
---|
| 238 | for(int i=0; i<remote_ep_size; i++) |
---|
| 239 | { |
---|
| 240 | if(rank_in_world == ranks_in_world_remote[i]) |
---|
| 241 | num_ep_count++; |
---|
| 242 | } |
---|
| 243 | |
---|
| 244 | |
---|
| 245 | /////////////////////////////////////////////////// |
---|
| 246 | // step 6 : create endpoints from extracted_comm // |
---|
| 247 | /////////////////////////////////////////////////// |
---|
| 248 | |
---|
| 249 | if(is_real_involved) |
---|
| 250 | { |
---|
| 251 | MPI_Comm *ep_comm; |
---|
| 252 | MPI_Info info; |
---|
| 253 | MPI_Comm_create_endpoints(intracomm, num_ep_count, info, ep_comm); |
---|
| 254 | |
---|
| 255 | |
---|
| 256 | |
---|
| 257 | #pragma omp critical (write_to_tag_list) |
---|
| 258 | intercomm_list.push_back(make_pair( make_pair(tag, min(local_leader_rank_in_world, remote_leader_rank_in_world)) , make_pair(ep_comm , make_pair(num_ep_count, 0)))); |
---|
| 259 | #pragma omp flush |
---|
| 260 | |
---|
| 261 | ::MPI_Comm_free(intracomm); |
---|
| 262 | delete intracomm; |
---|
| 263 | } |
---|
| 264 | |
---|
| 265 | |
---|
| 266 | delete ownership_list; |
---|
| 267 | delete mpi_rank_list; |
---|
| 268 | delete new_mpi_rank_list; |
---|
| 269 | |
---|
| 270 | } |
---|
| 271 | |
---|
| 272 | int repeated=0; |
---|
| 273 | for(int i=0; i<remote_ep_size; i++) |
---|
| 274 | { |
---|
| 275 | if(rank_in_world == ranks_in_world_remote[i]) |
---|
| 276 | repeated++; |
---|
| 277 | } |
---|
| 278 | |
---|
| 279 | int new_ep_rank_loc = ownership==1? ep_rank_loc : ep_rank_loc+repeated; |
---|
| 280 | |
---|
| 281 | |
---|
| 282 | |
---|
| 283 | MPI_Barrier_local(local_comm); |
---|
| 284 | |
---|
| 285 | |
---|
| 286 | #pragma omp flush |
---|
| 287 | #pragma omp critical (read_from_intercomm_list) |
---|
| 288 | { |
---|
| 289 | bool flag=true; |
---|
| 290 | while(flag) |
---|
| 291 | { |
---|
| 292 | for(std::list<std::pair<std::pair<int, int> , std::pair<MPI_Comm * , std::pair<int, int> > > >::iterator iter = intercomm_list.begin(); iter!=intercomm_list.end(); iter++) |
---|
| 293 | { |
---|
| 294 | if(iter->first == make_pair(tag, min(local_leader_rank_in_world, remote_leader_rank_in_world))) |
---|
| 295 | { |
---|
| 296 | *newintercomm = iter->second.first[new_ep_rank_loc]; |
---|
| 297 | |
---|
| 298 | iter->second.second.second++; |
---|
| 299 | |
---|
| 300 | if(iter->second.second.first == iter->second.second.second) |
---|
| 301 | intercomm_list.erase(iter); |
---|
| 302 | |
---|
| 303 | flag = false; |
---|
| 304 | break; |
---|
| 305 | } |
---|
| 306 | } |
---|
| 307 | } |
---|
| 308 | } |
---|
| 309 | |
---|
| 310 | |
---|
| 311 | |
---|
| 312 | |
---|
| 313 | |
---|
| 314 | ////////////////////////////////////////////////////////// |
---|
| 315 | // step 7 : create intercomm_rank_map for local leaders // |
---|
| 316 | ////////////////////////////////////////////////////////// |
---|
| 317 | |
---|
| 318 | (*newintercomm)->is_intercomm = true; |
---|
| 319 | |
---|
| 320 | (*newintercomm)->inter_rank_map = new INTER_RANK_MAP; |
---|
| 321 | |
---|
| 322 | |
---|
| 323 | int rank_info[2]; |
---|
| 324 | rank_info[0] = ep_rank; |
---|
| 325 | rank_info[1] = (*newintercomm)->ep_comm_ptr->size_rank_info[0].first; |
---|
| 326 | |
---|
| 327 | |
---|
| 328 | |
---|
| 329 | int *local_rank_info = new int[2*ep_size]; |
---|
| 330 | int *remote_rank_info = new int[2*remote_ep_size]; |
---|
| 331 | |
---|
| 332 | MPI_Allgather(rank_info, 2, MPI_INT, local_rank_info, 2, MPI_INT, local_comm); |
---|
| 333 | |
---|
| 334 | if(is_local_leader) |
---|
| 335 | { |
---|
| 336 | MPI_Status status; |
---|
| 337 | MPI_Sendrecv(local_rank_info, 2*ep_size, MPI_INT, remote_leader, tag, remote_rank_info, 2*remote_ep_size, MPI_INT, remote_leader, tag, peer_comm, &status); |
---|
| 338 | } |
---|
| 339 | |
---|
| 340 | MPI_Bcast(remote_rank_info, 2*remote_ep_size, MPI_INT, local_leader, local_comm); |
---|
| 341 | |
---|
| 342 | for(int i=0; i<remote_ep_size; i++) |
---|
| 343 | { |
---|
| 344 | (*newintercomm)->inter_rank_map->insert(make_pair(remote_rank_info[2*i], remote_rank_info[2*i+1])); |
---|
| 345 | } |
---|
| 346 | |
---|
| 347 | (*newintercomm)->ep_comm_ptr->size_rank_info[0] = local_comm->ep_comm_ptr->size_rank_info[0]; |
---|
| 348 | |
---|
| 349 | |
---|
| 350 | delete[] local_rank_info; |
---|
| 351 | delete[] remote_rank_info; |
---|
| 352 | delete[] ranks_in_world_local; |
---|
| 353 | delete[] ranks_in_world_remote; |
---|
| 354 | |
---|
| 355 | |
---|
| 356 | } |
---|
| 357 | |
---|
| 358 | |
---|
| 359 | |
---|
| 360 | int MPI_Intercomm_create_mpi(MPI_Comm local_comm, int local_leader, MPI_Comm peer_comm, int remote_leader, int tag, MPI_Comm *newintercomm) |
---|
| 361 | { |
---|
| 362 | printf("MPI_Intercomm_create_mpi not yet implemented\n"); |
---|
| 363 | MPI_Abort(local_comm, 0); |
---|
| 364 | } |
---|
| 365 | |
---|
| 366 | } |
---|
| 367 | #endif |
---|