I've been using three.js to make a project that creates a point material object which can then be interacted with. I'm trying to map the pixels of an image to the vertexes of a buffer geometry so it shows the image as a set of points (Point cloud like). The image in question being a map of the earth (downscaled to 106 x 53 pixels)
I'm doing this by drawing the image on a canvas, extracting the image data, setting the colour from the image data (based on pixel coordinates) and then setting the colour attribute of my geometry (in this case a sphere buffer geometry). My question is where am I going wrong with the mapping?
This is code for extracting colours and placing them in an array for the geometry:
let colors = [];
let color = new THREE.Color();
let positionAttribute = g.attributes.position; //g being geometry
for (let x = 0; x < img.width; x ) {
for (let y = 0; y < img.height; y ) {
let c = earthCanvas.getContext("2d");
let p = c.getImageData(x, y, 1, 1).data;
let hex = "#" ("000000" rgbToHex(p[0], p[1], p[2])).slice(-6);
color.set(hex);
console.log("set");
colors.push(color.r, color.g, color.b);
}
}
g.setAttribute("color", new THREE.Float32BufferAttribute(colors, 3));
Which results in this happening:
Is there any way to make this look like earth as a globe? Am I just placing the coordinates of the pixels wrong?
The code for the geometry itself looks like this:
g = new THREE.SphereBufferGeometry(3,104,52);
count = g.attributes.position.count;
console.log(count);
g.center();
let pointShape = new THREE.TextureLoader().load("./models/particle.png");
m = new THREE.PointsMaterial({
size: pointSize,
map: pointShape,
vertexColors: true,
//blending: THREE.AdditiveBlending,
depthTest: false,
opacity: 1
});
And the html and JS for the canvas looks like this:
function drawImageSource(source, canvas) {
img = new Image();
img.addEventListener("load", function () {
// The image can be drawn from any source
canvas
.getContext("2d")
.drawImage(
img,
0,
0,
img.width,
img.height,
0,
0,
canvas.width,
canvas.height
);
});
img.crossOrigin = "Anonymous";
img.setAttribute("src", source);
}
<div id="canvasDiv">
<canvas id="earthCanvas", width="106", height="53" hidden />
</body>
Any help would be appreciated, thanks!
The link to the code sandbox project is here: https://codesandbox.io/s/small-hill-mvhgc?file=/src/index.js:5956-6389 (I apologise for messy code it's a prototype)
CodePudding user response:
I would go with a different approach: modify the existing PointsMaterial
, using .onBeforeCompile()
, and pass a texture in a uniform.
body{
overflow:hidden;
margin:0;
}
<script type="module">
import * as THREE from "https://cdn.skypack.dev/[email protected]";
import {OrbitControls} from "https://cdn.skypack.dev/[email protected]/examples/jsm/controls/OrbitControls";
let scene = new THREE.Scene();
let camera = new THREE.PerspectiveCamera(60, innerWidth / innerHeight, 1, 1000);
camera.position.set(0, 0, 10);
let renderer = new THREE.WebGLRenderer();
renderer.setSize(innerWidth, innerHeight);
document.body.appendChild(renderer.domElement);
let controls = new OrbitControls(camera, renderer.domElement);
let g = new THREE.SphereGeometry(4, 360, 180);
let m = new THREE.PointsMaterial({
size: 0.05,
onBeforeCompile: shader => {
shader.uniforms.tex = {value: new THREE.TextureLoader().load("https://threejs.org/examples/textures/uv_grid_opengl.jpg")};
shader.vertexShader = `
varying vec2 vUv;
${shader.vertexShader}
`.replace(
`#include <begin_vertex>`,
`#include <begin_vertex>
vUv = uv;
`
);
//console.log(shader.vertexShader);
shader.fragmentShader = `
uniform sampler2D tex;
varying vec2 vUv;
${shader.fragmentShader}
`.replace(
`vec4 diffuseColor = vec4( diffuse, opacity );`,
`
vec3 col = texture2D(tex, vUv).rgb;
col *= diffuse;
vec4 diffuseColor = vec4( col, opacity );`
);
//console.log(shader.fragmentShader);
}
});
let p = new THREE.Points(g, m);
scene.add(p);
renderer.setAnimationLoop(() => {
renderer.render(scene, camera);
});
</script>