Advertisement
Guest User

Untitled

a guest
Dec 10th, 2018
75
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 5.49 KB | None | 0 0
  1. #include <iostream>
  2. #include <stdlib.h>
  3. #include <mpi.h>
  4. #include <time.h>
  5.  
  6. using std::cout;
  7. using std::cin;
  8.  
  9. using namespace std;
  10.  
  11. int mpi_Gather(int* send_data, int send_count, MPI_Datatype send_datatype, int* recv_data, int recv_count, MPI_Datatype recv_datatype, int root, MPI_Comm communicator)
  12. {
  13. int myid, numprocs;
  14. int* tmp = new int[send_count];
  15. MPI_Status status;
  16. MPI_Comm_rank(MPI_COMM_WORLD, &myid);
  17. MPI_Comm_size(MPI_COMM_WORLD, &numprocs);
  18.  
  19. int index = 0;
  20. if (myid != root)
  21. {
  22. MPI_Send(send_data, send_count, MPI_INT, root, 99, MPI_COMM_WORLD);
  23. }
  24. else
  25. {
  26. for (int i = 0; i < send_count; i++)
  27. {
  28. recv_data[index] = send_data[i];
  29. index++;
  30. }
  31. for (int i = 1; i < numprocs; i++)
  32. {
  33. MPI_Recv(tmp, recv_count, MPI_INT, i, 99, MPI_COMM_WORLD, &status);
  34. for (int j = 0; j < send_count; j++)
  35. {
  36. recv_data[index] = *tmp;
  37. *(++tmp);
  38. index++;
  39. }
  40. }
  41. }
  42. return 0;
  43. }
  44.  
  45. int main(int argc, char **argv) {
  46. int size, rank;
  47. int rcvData1;
  48. int rcvData2;
  49. int *global_array = NULL;
  50. MPI_Status status;
  51.  
  52.  
  53. MPI_Init(&argc, &argv);
  54. MPI_Comm_size(MPI_COMM_WORLD, &size);
  55. MPI_Comm_rank(MPI_COMM_WORLD, &rank);
  56. srand(time(NULL));
  57.  
  58. int a = (size -1)/2;
  59. int b = a+1;
  60. int tableSize = a;
  61.  
  62. //initialization of global_array in the memory of process 0
  63. if (rank == 0) {
  64. global_array = new int[tableSize];
  65.  
  66. for (int i = 0; i < tableSize; i++) {
  67. global_array[i] = rand() % 100 + 1;
  68. int liczba = global_array[i];
  69. if (i == 0) {
  70. continue;
  71. }
  72. MPI_Send(&liczba, 1, MPI_INT, i, 99, MPI_COMM_WORLD);
  73. }
  74. rcvData1 = global_array[0];
  75. rcvData2 = global_array[0];
  76. cout << "Proces A" << rank + 1 << " otrzymal: " << rcvData1 << " oraz: " << rcvData2 << endl;
  77. /*for (int i = 0; i < tableSize; i++) {
  78. cout << global_array[i] << endl;
  79. }*/
  80.  
  81. //MPI_Send(&rcvData1, 1, MPI_INT, tableSize + rank, 99, MPI_COMM_WORLD);
  82. //MPI_Send(&rcvData2, 1, MPI_INT, tableSize + rank + 1, 99, MPI_COMM_WORLD);
  83. }
  84.  
  85.  
  86. if (rank < a && rank != 0)
  87. {
  88. MPI_Recv(&rcvData1, 1, MPI_INT, 0, 99, MPI_COMM_WORLD, &status);
  89. rcvData2 = rcvData1;
  90. cout << "Proces A" << rank + 1<< " otrzymal: " << rcvData1 << " oraz: " << rcvData2 << endl;
  91. //MPI_Send(&rcvData1, 1, MPI_INT, tableSize + rank, 99, MPI_COMM_WORLD);
  92. //MPI_Send(&rcvData1, 1, MPI_INT, tableSize + rank + 1, 99, MPI_COMM_WORLD);
  93. }
  94. //Koniec inicjalizacji
  95.  
  96. //Początek algorytmu
  97. for (int i = 0; i < a * 2 ; i++) {
  98. if (rank < a) {
  99. if (rcvData1 < rcvData2) {
  100. MPI_Send(&rcvData1, 1, MPI_INT, tableSize + rank, 99, MPI_COMM_WORLD);
  101. MPI_Send(&rcvData2, 1, MPI_INT, tableSize + rank + 1, 99, MPI_COMM_WORLD);
  102. cout << "wysylam do procesu B" << rank << " oraz do B" << rank + 1 << endl;
  103. }
  104. else {
  105. MPI_Send(&rcvData1, 1, MPI_INT, tableSize + rank + 1, 99, MPI_COMM_WORLD);
  106. MPI_Send(&rcvData2, 1, MPI_INT, tableSize + rank, 99, MPI_COMM_WORLD);
  107. cout << "wysylam do procesu B" << rank << " oraz do B" << rank + 1 << endl;
  108. }
  109. MPI_Recv(&rcvData1, 1, MPI_INT, tableSize + rank, 99, MPI_COMM_WORLD, &status);
  110. MPI_Recv(&rcvData2, 1, MPI_INT, tableSize + rank + 1, 99, MPI_COMM_WORLD, &status);
  111. cout << "W iteracji Proces A" << rank +1 << " otrzymal: " << rcvData1 << " oraz: " << rcvData2 << endl;
  112. }
  113. if (rank >= a)
  114. {
  115. //pierwszy b
  116. if (rank == a)
  117. {
  118. MPI_Recv(&rcvData1, 1, MPI_INT, 0, 99, MPI_COMM_WORLD, &status);
  119. cout << "Proces B" << rank - a << " otrzymal: " << rcvData1 << endl;
  120. MPI_Send(&rcvData1, 1, MPI_INT, 0, 99, MPI_COMM_WORLD);
  121.  
  122. }//ostatni b
  123. else if (rank == a + b -1)
  124. {
  125. MPI_Recv(&rcvData1, 1, MPI_INT, a - 1, 99, MPI_COMM_WORLD, &status);
  126. cout << "Proces B" << rank - a << " otrzymal: " << rcvData1 << endl;
  127. MPI_Send(&rcvData1, 1, MPI_INT, a - 1, 99, MPI_COMM_WORLD);
  128. }
  129. else
  130. {
  131. MPI_Recv(&rcvData1, 1, MPI_INT, rank - a - 1, 99, MPI_COMM_WORLD, &status);
  132. MPI_Recv(&rcvData2, 1, MPI_INT, rank - a, 99, MPI_COMM_WORLD, &status);
  133. cout << "Proces B" << rank - a<< " otrzymal: " << rcvData1 << " oraz: " << rcvData2 << endl;
  134. if (rcvData1 < rcvData2) {
  135. MPI_Send(&rcvData1, 1, MPI_INT, rank - a - 1, 99, MPI_COMM_WORLD);
  136. MPI_Send(&rcvData2, 1, MPI_INT, rank - a, 99, MPI_COMM_WORLD);
  137. }
  138. else {
  139. MPI_Send(&rcvData1, 1, MPI_INT, rank - a, 99, MPI_COMM_WORLD);
  140. MPI_Send(&rcvData2, 1, MPI_INT, rank - a - 1, 99, MPI_COMM_WORLD);
  141. }
  142.  
  143. }
  144.  
  145. }
  146. }
  147. if (rank < a && rank != 0) {
  148. MPI_Send(&rcvData1, 1, MPI_INT, 0, 99, MPI_COMM_WORLD);
  149. }
  150. if (rank == 0) {
  151. for (int i = 1; i < a; i++) {
  152. MPI_Recv(&global_array[i], 1, MPI_INT, i, 99, MPI_COMM_WORLD, &status);
  153. }
  154. global_array[0] = rcvData1;
  155. cout << "Posortowana tabela" << endl;
  156. for (int i = 0; i < tableSize; i++) {
  157. cout << global_array[i] << endl;
  158. }
  159.  
  160. }
  161.  
  162. //if (rank >= a)
  163. //{
  164. // //pierwszy b
  165. // if (rank == a)
  166. // {
  167. // //MPI_Recv(&rcvData1, 1, MPI_INT, 0, 99, MPI_COMM_WORLD, &status);
  168. // //MPI_Send(&rcvData1, 1, MPI_INT, 0, 99, MPI_COMM_WORLD);
  169.  
  170. // }//ostatni b
  171. // else if (rank == a + b - 1)
  172. // {
  173. // //MPI_Recv(&rcvData1, 1, MPI_INT, a + b - 1, 99, MPI_COMM_WORLD, &status);
  174. // //MPI_Send(&rcvData1, 1, MPI_INT, a + b - 1, 99, MPI_COMM_WORLD);
  175. // }
  176. // else
  177. // {
  178. // //MPI_Recv(&rcvData2, 1, MPI_INT, rank - a, 99, MPI_COMM_WORLD, &status);
  179. // //MPI_Recv(&rcvData1, 1, MPI_INT, rank - a - 1, 99, MPI_COMM_WORLD, &status);
  180.  
  181.  
  182. // }
  183.  
  184. //}
  185. //cout << "Koniec " << rank << endl;
  186.  
  187. MPI_Finalize();
  188. return 0;
  189. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement