A while back I had to generate an image with the normals of a spheroid.
Normal maps are used to store normalized vector information in a pixel. Very useful to calculate lighting effects in 3D engines.
A normalized vector is a vector of length == 1.
How it’s done?
First start with a canvas.
<canvas id="canvas" width="256" height="256"></canvas>
And define some basic helper functions
function distance(xa, ya, xb, yb) {
return Math.sqrt((xa - xb) * (xa - xb) + (ya - yb) * (ya - yb));
}
function normalizeVector(vector) {
// a normalized vector is a vector of length 1.0
// we get the length of 1 by dividing the whole
// vector by its magnitude
// the magnitude of a vector V is the distance from (0,0,0) to V
var length = Math.sqrt(
vector[0] * vector[0] +
vector[1] * vector[1] +
vector[2] * vector[2]
)
if (length != 0.0) {
vector[0] = (vector[0] / length)
vector[1] = (vector[1] / length)
vector[2] = (vector[2] / length)
}
return vector
}
Then define some helpers to access the canvas to draw to
const element = document.getElementById('canvas');
const canvas = element.getContext('2d');
const width = element.width;
const height = element.height;
const radius = Math.min(width, height) / 2.0;
const picture = canvas.createImageData(width, height);
var pointer = 0;
The main loop
// for each pixel
for (var x = 0; x < width; x++) {
for (var y = 0; y < height; y++) {
var distanceToCenter = distance(x, y, radius, radius);
var altitude = Math.sqrt(
radius * radius * 4 - distanceToCenter * distanceToCenter
);
// altitude is NaN when distance > radius
if (isNaN(altitude)) altitude = -radius;
var v = normalizeVector([
radius - x,
radius - y,
altitude
])
// transform the vector space (-1,1) to pixel space [0,255]
// being {0.5, 0.5} => {0, 0}
picture.data[pointer++] = (v[0] * 128) + 128;
picture.data[pointer++] = (v[1] * 128) + 128;
picture.data[pointer++] = (v[2] * 128) + 128;
picture.data[pointer++] = 255;
}
}
Finally, write the image to the canvas
canvas.putImageData(picture, 0, 0);