| Line | Branch | Exec | Source |
|---|---|---|---|
| 1 | #include "baldin_a_my_scatter/mpi/include/ops_mpi.hpp" | ||
| 2 | |||
| 3 | #include <mpi.h> | ||
| 4 | |||
| 5 | #include <algorithm> | ||
| 6 | #include <cstddef> | ||
| 7 | #include <cstring> | ||
| 8 | #include <vector> | ||
| 9 | |||
| 10 | #include "baldin_a_my_scatter/common/include/common.hpp" | ||
| 11 | |||
| 12 | namespace baldin_a_my_scatter { | ||
| 13 | |||
| 14 | 38 | BaldinAMyScatterMPI::BaldinAMyScatterMPI(const InType &in) { | |
| 15 | SetTypeOfTask(GetStaticTypeOfTask()); | ||
| 16 | GetInput() = in; | ||
| 17 | 38 | } | |
| 18 | |||
| 19 | 38 | bool BaldinAMyScatterMPI::ValidationImpl() { | |
| 20 | const auto &[sendbuf, sendcount, sendtype, recvbuf, recvcount, recvtype, root, comm] = GetInput(); | ||
| 21 | |||
| 22 |
3/6✓ Branch 0 taken 38 times.
✗ Branch 1 not taken.
✓ Branch 2 taken 38 times.
✗ Branch 3 not taken.
✗ Branch 4 not taken.
✓ Branch 5 taken 38 times.
|
38 | if (sendcount <= 0 || sendcount != recvcount || root < 0) { |
| 23 | return false; | ||
| 24 | } | ||
| 25 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 38 times.
|
38 | if (sendtype != recvtype) { |
| 26 | return false; | ||
| 27 | } | ||
| 28 | |||
| 29 | auto is_sup_type = [](MPI_Datatype type) -> bool { | ||
| 30 |
5/6✓ Branch 0 taken 22 times.
✓ Branch 1 taken 16 times.
✓ Branch 2 taken 12 times.
✓ Branch 3 taken 10 times.
✗ Branch 4 not taken.
✓ Branch 5 taken 12 times.
|
38 | return (type == MPI_INT || type == MPI_FLOAT || type == MPI_DOUBLE); |
| 31 | }; | ||
| 32 | |||
| 33 | return is_sup_type(sendtype); | ||
| 34 | } | ||
| 35 | |||
| 36 | 38 | bool BaldinAMyScatterMPI::PreProcessingImpl() { | |
| 37 | 38 | int root = std::get<6>(GetInput()); | |
| 38 | |||
| 39 | 38 | int world_size = 0; | |
| 40 | 38 | MPI_Comm_size(std::get<7>(GetInput()), &world_size); | |
| 41 | |||
| 42 | // если root выходит за границы, корректируем его | ||
| 43 |
2/2✓ Branch 0 taken 14 times.
✓ Branch 1 taken 24 times.
|
38 | if (root >= world_size) { |
| 44 | 14 | std::get<6>(GetInput()) = root % world_size; | |
| 45 | } | ||
| 46 | |||
| 47 | 38 | return true; | |
| 48 | } | ||
| 49 | |||
| 50 | namespace { | ||
| 51 | |||
| 52 | MPI_Aint GetDataTypeExtent(MPI_Datatype type) { | ||
| 53 | 38 | MPI_Aint lb = 0; | |
| 54 | 38 | MPI_Aint extent = 0; | |
| 55 | 38 | MPI_Type_get_extent(type, &lb, &extent); | |
| 56 | 38 | return extent; | |
| 57 | } | ||
| 58 | |||
| 59 | int VirtualToRealRank(int v_rank, int root, int size) { | ||
| 60 | 38 | return (v_rank + root) % size; | |
| 61 | } | ||
| 62 | |||
| 63 | int CalculateSubtreeSize(int v_dest, int mask, int size) { | ||
| 64 |
1/2✓ Branch 0 taken 19 times.
✗ Branch 1 not taken.
|
19 | return std::min(v_dest + mask, size); |
| 65 | } | ||
| 66 | |||
| 67 | int CalculateInitialMask(int size) { | ||
| 68 | int mask = 1; | ||
| 69 |
2/2✓ Branch 0 taken 38 times.
✓ Branch 1 taken 38 times.
|
76 | while (mask < size) { |
| 70 | 38 | mask <<= 1; | |
| 71 | } | ||
| 72 | 38 | return mask >> 1; | |
| 73 | } | ||
| 74 | |||
| 75 | 19 | void PrepareRootBuffer(const void *sendbuf, int size, int root, int count, MPI_Aint extent, std::vector<char> &buffer) { | |
| 76 | 19 | size_t total_bytes = static_cast<size_t>(size) * count * extent; | |
| 77 | 19 | size_t chunk_bytes = static_cast<size_t>(count) * extent; | |
| 78 | |||
| 79 | 19 | buffer.resize(total_bytes); | |
| 80 | |||
| 81 | const char *send_ptr = static_cast<const char *>(sendbuf); | ||
| 82 | char *tmp_ptr = buffer.data(); | ||
| 83 | |||
| 84 | // Логика сдвига: [root...end] -> начало, [0...root] -> конец | ||
| 85 | 19 | size_t first_part_bytes = (size - root) * chunk_bytes; | |
| 86 | 19 | size_t second_part_bytes = root * chunk_bytes; | |
| 87 | |||
| 88 | 19 | std::memcpy(tmp_ptr, send_ptr + second_part_bytes, first_part_bytes); | |
| 89 | 19 | std::memcpy(tmp_ptr + first_part_bytes, send_ptr, second_part_bytes); | |
| 90 | 19 | } | |
| 91 | |||
| 92 | } // namespace | ||
| 93 | |||
| 94 | 38 | bool BaldinAMyScatterMPI::RunImpl() { | |
| 95 | auto &input = GetInput(); | ||
| 96 | const auto &[sendbuf, sendcount, sendtype, recvbuf, recvcount, recvtype, root, comm] = input; | ||
| 97 | |||
| 98 | 38 | int rank = 0; | |
| 99 | 38 | int size = 0; | |
| 100 | 38 | MPI_Comm_rank(comm, &rank); | |
| 101 | 38 | MPI_Comm_size(comm, &size); | |
| 102 | |||
| 103 |
2/2✓ Branch 0 taken 19 times.
✓ Branch 1 taken 19 times.
|
38 | MPI_Aint extent = GetDataTypeExtent(rank == root ? sendtype : recvtype); |
| 104 | |||
| 105 | 38 | std::vector<char> temp_buffer; | |
| 106 | const char *curr_buf_ptr = nullptr; | ||
| 107 | |||
| 108 | // --- ЭТАП 1: Подготовка (только на root) --- | ||
| 109 |
2/2✓ Branch 0 taken 19 times.
✓ Branch 1 taken 19 times.
|
38 | if (rank == root) { |
| 110 |
1/2✓ Branch 1 taken 19 times.
✗ Branch 2 not taken.
|
19 | PrepareRootBuffer(sendbuf, size, root, sendcount, extent, temp_buffer); |
| 111 | curr_buf_ptr = temp_buffer.data(); | ||
| 112 | } | ||
| 113 | |||
| 114 | 38 | int v_rank = (rank - root + size) % size; | |
| 115 | int mask = CalculateInitialMask(size); | ||
| 116 | |||
| 117 | // --- ЭТАП 2: Рассылка по дереву --- | ||
| 118 |
2/2✓ Branch 0 taken 38 times.
✓ Branch 1 taken 38 times.
|
76 | while (mask > 0) { |
| 119 | // Если процесс - отправитель на этом уровне | ||
| 120 |
2/2✓ Branch 0 taken 19 times.
✓ Branch 1 taken 19 times.
|
38 | if (v_rank % (2 * mask) == 0) { |
| 121 | 19 | int v_dest = v_rank + mask; | |
| 122 | |||
| 123 |
1/2✓ Branch 0 taken 19 times.
✗ Branch 1 not taken.
|
19 | if (v_dest < size) { |
| 124 | int subtree_size = CalculateSubtreeSize(v_dest, mask, size); | ||
| 125 | 19 | int count_to_send = (subtree_size - v_dest) * recvcount; | |
| 126 | |||
| 127 | 19 | size_t offset_bytes = static_cast<size_t>(v_dest - v_rank) * recvcount * extent; | |
| 128 | 19 | int real_dest = VirtualToRealRank(v_dest, root, size); | |
| 129 | |||
| 130 |
2/4✓ Branch 0 taken 19 times.
✗ Branch 1 not taken.
✓ Branch 3 taken 19 times.
✗ Branch 4 not taken.
|
19 | MPI_Send(curr_buf_ptr + offset_bytes, count_to_send, (rank == root ? sendtype : recvtype), real_dest, 0, comm); |
| 131 | } | ||
| 132 | } | ||
| 133 | |||
| 134 | // Если процесс - получатель | ||
| 135 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 19 times.
|
19 | else if (v_rank % (2 * mask) == mask) { |
| 136 | 19 | int v_source = v_rank - mask; | |
| 137 |
1/2✓ Branch 1 taken 19 times.
✗ Branch 2 not taken.
|
19 | int real_source = VirtualToRealRank(v_source, root, size); |
| 138 | |||
| 139 | int subtree_end = CalculateSubtreeSize(v_rank, mask, size); | ||
| 140 | 19 | int count_to_recv = (subtree_end - v_rank) * recvcount; | |
| 141 | |||
| 142 | 19 | size_t bytes_to_recv = static_cast<size_t>(count_to_recv) * extent; | |
| 143 |
1/2✓ Branch 1 taken 19 times.
✗ Branch 2 not taken.
|
19 | temp_buffer.resize(bytes_to_recv); |
| 144 | |||
| 145 |
1/2✓ Branch 1 taken 19 times.
✗ Branch 2 not taken.
|
19 | MPI_Recv(temp_buffer.data(), count_to_recv, recvtype, real_source, 0, comm, MPI_STATUS_IGNORE); |
| 146 | |||
| 147 | // Теперь работаем с полученным буфером | ||
| 148 | curr_buf_ptr = temp_buffer.data(); | ||
| 149 | } | ||
| 150 | |||
| 151 | 38 | mask >>= 1; | |
| 152 | } | ||
| 153 | |||
| 154 | // --- ЭТАП 3: Копирование в пользовательский буфер --- | ||
| 155 |
2/4✓ Branch 0 taken 38 times.
✗ Branch 1 not taken.
✓ Branch 2 taken 38 times.
✗ Branch 3 not taken.
|
38 | if (recvbuf != MPI_IN_PLACE && curr_buf_ptr != nullptr) { |
| 156 | // Копируем только свою долю (recvcount) | ||
| 157 | 38 | std::memcpy(recvbuf, curr_buf_ptr, recvcount * extent); | |
| 158 | } | ||
| 159 |
1/2✓ Branch 0 taken 38 times.
✗ Branch 1 not taken.
|
38 | GetOutput() = recvbuf; |
| 160 | 38 | return true; | |
| 161 | } | ||
| 162 | |||
| 163 | 38 | bool BaldinAMyScatterMPI::PostProcessingImpl() { | |
| 164 | 38 | return true; | |
| 165 | } | ||
| 166 | |||
| 167 | } // namespace baldin_a_my_scatter | ||
| 168 |