Getting undesired behavior when sending-receiving messages using MPI

173 Views Asked by At

I'm exploring MPI in C++ and I wanted to parallelize the creation of a picture of the Mandelbrot set. I'm using the ppm format. Each processor builds its part and sends it back to the main process that receives it as MPI_CHAR. This is the code:

#include "mpi.h"
#include <iostream>
#include <string>
#include <fstream>
#include <complex>

using namespace std;

int mandelbrot(int x, int y, int width, int height, int max)  {
    complex<float> point((float) (y - height/2.0) * 4.0/width, (float) (x - width/2.0) * 4.0/width);
    complex<float> z(0, 0);
    unsigned int iteration = 0;

    while (abs(z) < 4 && iteration < max) {
           z = z * z + point;
           iteration++;
    }
    return iteration;
}

int main(int argc, char **argv) {
  int numprocs;
  int myid;
  int buff_size = 404270; // 200x200 
  char buff[buff_size];
  int i;

  MPI_Status stat;

  MPI_Init(&argc,&argv);
  MPI_Comm_size(MPI_COMM_WORLD,&numprocs);
  MPI_Comm_rank(MPI_COMM_WORLD,&myid);

  int width = 200, height = 200, max_iter = 1000;

  if (myid == 0) {

    ofstream image("mandel.ppm");
    image << "P3\n" << width << " " << height << " 255\n";

    for(i=1; i < numprocs; i++) {
      MPI_Probe(i, 0, MPI_COMM_WORLD, &stat);
      int length;
      MPI_Get_count(&stat, MPI_CHAR, &length);
      MPI_Recv(buff, length, MPI_CHAR, i, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
      image << buff;
    }

  } else {

    stringstream ss;
    // proc rank: 1, 2, ..., n
    int part = height/(numprocs-1), start = (myid - 1) * part, end = part * myid;
    printf("%d -> %d\n", start, end);

    for (int row = start; row < end; row++) {
        for (int col = 0; col < width; col++) {

            int iteration = mandelbrot(row, col, width, height, max_iter);

            if (row == start) ss << 255 << ' ' << 255 << ' ' << 255 << "\n";
            else if (iteration < max_iter) ss << iteration * 255 << ' ' << iteration * 20 << ' ' << iteration * 5 << "\n";
            else ss << 0 << ' ' << 0 << ' ' << 0 << "\n";
        }
    }

    printf("\n sizeof = %d\n", ss.str().length());
    MPI_Send(ss.str().c_str(), ss.str().length(), MPI_CHAR, 0, 0, MPI_COMM_WORLD);
  }

  MPI_Finalize();

  return 0;  
}

Code compilation:

$ mpic++ -std=c++0x mpi.mandel.cpp -o mpi.mandel

Running with 3 processes (process main + process rank 1 and 2)

$ mpirun -np 3 ./mpi.mandel

Resulting ppm pictures when running with 3, 4, and 5 process:

enter image description here enter image description here enter image description here

It seems that the point-to-point communication of sending-receiving is mixing the results when more than 3 processes try to send the MPI_CHAR elements to the main process. How can avoid this behavior?

1

There are 1 best solutions below

6
On

It works when creating the buffer buff with the same length as the receiving message:

.
.
   for (int i=1; i < numprocs; i++) {
      MPI_Probe(i, 0, MPI_COMM_WORLD, &stat);
      int length;
      MPI_Get_count(&stat, MPI_CHAR, &length);
      printf("\nfrom %d <<-- %d (stat.source=%d) Receiving %d chars\n", myid, i, stat.MPI_SOURCE, length);
      char buff[length + 1];
      MPI_Recv(buff, length, MPI_CHAR, i, 0, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
      buff[length] = '\0';
      image << buff;
   }
.
.

Thus, we don't need anymore the declaration at the beginning int buff_size = 404270; neither char buff[buff_size];

enter image description here enter image description here enter image description here