GCC Code Coverage Report


Directory: ./
File: tasks/kondrashova_v_sum_col_mat/mpi/src/ops_mpi.cpp
Date: 2026-01-27 01:59:34
Exec Total Coverage
Lines: 0 73 0.0%
Functions: 0 7 0.0%
Branches: 0 48 0.0%

Line Branch Exec Source
1 #include "kondrashova_v_sum_col_mat/mpi/include/ops_mpi.hpp"
2
3 #include <mpi.h>
4
5 #include <algorithm>
6 #include <cstddef>
7 #include <limits>
8 #include <vector>
9
10 #include "kondrashova_v_sum_col_mat/common/include/common.hpp"
11
12 namespace kondrashova_v_sum_col_mat {
13
14 KondrashovaVSumColMatMPI::KondrashovaVSumColMatMPI(const InType &in) {
15 SetTypeOfTask(GetStaticTypeOfTask());
16 GetInput() = in;
17 GetOutput().clear();
18 }
19
20 bool KondrashovaVSumColMatMPI::ValidationImpl() {
21 int rank = 0;
22 MPI_Comm_rank(MPI_COMM_WORLD, &rank);
23
24 int is_valid = 0;
25
26 if (rank == 0) {
27 is_valid = static_cast<int>(
28 (GetInput().size() >= 2) && GetOutput().empty() && (GetInput()[0] > 0) && (GetInput()[1] > 0) &&
29 (GetInput()[0] <= std::numeric_limits<int>::max() / GetInput()[1]) &&
30 (GetInput().size() == (2 + (static_cast<size_t>(GetInput()[0]) * static_cast<size_t>(GetInput()[1])))));
31 }
32
33 MPI_Bcast(&is_valid, 1, MPI_INT, 0, MPI_COMM_WORLD);
34
35 return static_cast<bool>(is_valid);
36 }
37
38 bool KondrashovaVSumColMatMPI::PreProcessingImpl() {
39 int rank = 0;
40 MPI_Comm_rank(MPI_COMM_WORLD, &rank);
41
42 if (rank == 0) {
43 rows_ = GetInput()[0];
44 cols_ = GetInput()[1];
45 }
46
47 MPI_Bcast(&rows_, 1, MPI_INT, 0, MPI_COMM_WORLD);
48 MPI_Bcast(&cols_, 1, MPI_INT, 0, MPI_COMM_WORLD);
49
50 if (rank == 0) {
51 GetOutput().assign(static_cast<size_t>(cols_), 0);
52 }
53
54 return true;
55 }
56
57 namespace {
58
59 void ComputeLocalCols(int rank, int ost, int cols_on_proc, int &first_col, int &end_col) {
60 if (rank < ost) {
61 first_col = rank * (cols_on_proc + 1);
62 end_col = first_col + (cols_on_proc + 1);
63 } else {
64 first_col = (ost * (cols_on_proc + 1)) + ((rank - ost) * cols_on_proc);
65 end_col = first_col + cols_on_proc;
66 }
67 }
68
69 void ComputeLocalSums(const std::vector<int> &matrix, std::vector<int> &local_sums, int rows, int cols, int first_col) {
70 const int local_cols = static_cast<int>(local_sums.size());
71 for (int col = 0; col < local_cols; col++) {
72 const int global_col = first_col + col;
73 for (int row = 0; row < rows; row++) {
74 local_sums[col] += matrix[(row * cols) + global_col];
75 }
76 }
77 }
78
79 void GatherSums(std::vector<int> &local_sums, int first_col, int local_cols, int rank, int size,
80 std::vector<int> &output) {
81 if (rank == 0) {
82 std::ranges::copy(local_sums, output.begin() + first_col);
83
84 for (int src = 1; src < size; src++) {
85 int src_first = 0;
86 int src_local = 0;
87
88 MPI_Recv(&src_first, 1, MPI_INT, src, 1, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
89 MPI_Recv(&src_local, 1, MPI_INT, src, 2, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
90
91 MPI_Recv(&output[src_first], src_local, MPI_INT, src, 3, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
92 }
93
94 } else {
95 MPI_Send(&first_col, 1, MPI_INT, 0, 1, MPI_COMM_WORLD);
96 MPI_Send(&local_cols, 1, MPI_INT, 0, 2, MPI_COMM_WORLD);
97
98 MPI_Send(local_sums.data(), local_cols, MPI_INT, 0, 3, MPI_COMM_WORLD);
99 }
100 }
101
102 } // namespace
103
104 bool KondrashovaVSumColMatMPI::RunImpl() {
105 int rank = 0;
106 int size = 0;
107 MPI_Comm_rank(MPI_COMM_WORLD, &rank);
108 MPI_Comm_size(MPI_COMM_WORLD, &size);
109
110 const int cols_on_proc = cols_ / size;
111 const int ost = cols_ % size;
112
113 int first_col = 0;
114 int end_col = 0;
115 ComputeLocalCols(rank, ost, cols_on_proc, first_col, end_col);
116
117 const int local_cols = end_col - first_col;
118
119 std::vector<int> matrix;
120 if (rank == 0) {
121 matrix.assign(GetInput().begin() + 2, GetInput().end());
122 } else {
123 matrix.resize(static_cast<size_t>(rows_) * static_cast<size_t>(cols_));
124 }
125
126 MPI_Bcast(matrix.data(), rows_ * cols_, MPI_INT, 0, MPI_COMM_WORLD);
127
128 std::vector<int> local_sums(static_cast<size_t>(local_cols), 0);
129 ComputeLocalSums(matrix, local_sums, rows_, cols_, first_col);
130 GatherSums(local_sums, first_col, local_cols, rank, size, GetOutput());
131
132 return true;
133 }
134
135 bool KondrashovaVSumColMatMPI::PostProcessingImpl() {
136 int rank = 0;
137 MPI_Comm_rank(MPI_COMM_WORLD, &rank);
138
139 if (rank == 0) {
140 return !GetOutput().empty();
141 }
142 return true;
143 }
144
145 } // namespace kondrashova_v_sum_col_mat
146