Calculations in GPU.js giving different results in CPU and GPU modes

105 Views Asked by At

I'm trying to use GPU.js to draw the Mandelbrot set.

A problem I'm having though is math within the kernel function is giving different results when run in CPU mode vs GPU mode. Here's the kernel function and supporting code:

function inlineTestPoint(initialReal, initialImag, maxIterations) {
  let currentReal = initialReal;
  let currentImag = initialImag;

  let i = 0;
  for (;i < maxIterations; i++) {
    const curRealSq = (currentReal * currentReal);
    const curImagSq = (currentImag * currentImag);
    if (curRealSq + curImagSq <= 4) {
      currentImag = (2 * currentReal * currentImag) + initialImag;
      currentReal = (curRealSq - curImagSq) + initialReal;
    } else {
      break;
    }
  }

  return i;
}

function mapRange(n, sourceMin, sourceMax, targetMin, targetMax) {
  const sourceRatio = (n - sourceMin) / (sourceMax - sourceMin);

  return (sourceRatio * (targetMax - targetMin)) + targetMin;
}

const canvasWidth = 1000;
const canvasHeight = 1000;

const gpu = new GPU({mode: "gpu"});
gpu.addFunction(mapRange, {argumentTypes: ['Float', 'Float', 'Float', 'Float', 'Float'], returnType: 'Float'});
gpu.addFunction(inlineTestPoint, {source: inlineTestPoint, argumentTypes: ['Float', 'Float', 'Float'], returnType: 'Integer'});
const kernel = gpu.createKernel(function(canvasWidth,canvasHeight) {
  const canvasX = this.thread.x;
  const canvasY = this.thread.y;
  const real = mapRange(canvasX, 0, canvasWidth, -2, 2);
  const imag = mapRange(canvasY, 0, canvasHeight, -2, 2);

  const iters = inlineTestPoint(real, imag, 200);
  this.color(iters / 50, iters / 100, iters / 200);
  //return [real, imag, iters];
})
  .setGraphical(true)
  .setLoopMaxIterations(200)
  .setOutput([canvasWidth, canvasHeight]);

kernel(canvasWidth, canvasHeight);

When run in CPU mode, iters for each pixel is as I expect (verified using the breakpoints available in CPU mode).

In GPU mode though, iters is always 0. I can see that by switching the graphical mode off and returning iters from the kernel function. It's the same math though, so I don't understand why it running on the CPU vs GPU would make any difference. This is also proving to be very difficult to debug, since the only method of debugging is to return values from the kernel.

What could cause the results to change?

1

There are 1 best solutions below

0
On

The cause appears to have been that the arguments to the extra functions were being treated as integers and truncated. CPUKernels and GPU Kernels seem to infer types differently, which caused, in my case, the arguments to mapRange to be treated as integers. Or at least, that's what my guess is. I can infer that by specifying different types for the functions and seeing what causes it to start working.

The minimal fix was to add argumentTypes to the call to addFunction for mapRange, but I ended up adding types for both functions just to be safe:

gpu.addFunction(mapRange, {argumentTypes: ['Float', 'Float', 'Float', 'Float', 'Float'], returnType: 'Float'});
gpu.addFunction(inlineTestPoint, {source: inlineTestPoint, argumentTypes: ['Float', 'Float', 'Float'], returnType: 'Integer'});

And this fixed it.

If you're having issues like this, I recommend explicitly setting the types. I'll start adding them for all functions now regardless just to avoid this problem altogether.