Merge branch 'sandcastle-v2' into nested-gallery

This commit is contained in:
jjspace 2025-06-06 13:31:02 -04:00
commit e1955f9b56
No known key found for this signature in database
GPG Key ID: F2EE53A25EF6F396
86 changed files with 9773 additions and 295 deletions

View File

@ -11,10 +11,10 @@ jobs:
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: install node 20
- name: install node 22
uses: actions/setup-node@v4
with:
node-version: '20'
node-version: '22'
- name: install npm packages
working-directory: ./.github/actions/check-for-CLA
run: npm install

View File

@ -26,10 +26,10 @@ jobs:
DEPLOYED_URL: https://ci-builds.cesium.com/cesium/${{ github.ref_name }}/
steps:
- uses: actions/checkout@v4
- name: install node 20
- name: install node 22
uses: actions/setup-node@v4
with:
node-version: '20'
node-version: '22'
- name: npm install
run: npm install
- name: set the version in package.json

View File

@ -12,10 +12,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: install node 20
- name: install node 22
uses: actions/setup-node@v4
with:
node-version: '20'
node-version: '22'
- name: npm install
run: npm install
- name: lint *.js
@ -33,10 +33,10 @@ jobs:
BRANCH: ${{ github.ref_name }}
steps:
- uses: actions/checkout@v4
- name: install node 20
- name: install node 22
uses: actions/setup-node@v4
with:
node-version: '20'
node-version: '22'
- name: npm install
run: npm install
- name: build
@ -50,10 +50,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: install node 20
- name: install node 22
uses: actions/setup-node@v4
with:
node-version: '20'
node-version: '22'
- name: npm install
run: npm install
- name: release build
@ -62,14 +62,14 @@ jobs:
run: npm run test -- --browsers ChromeHeadless --failTaskOnError --webgl-stub --release --suppressPassed
- name: cloc
run: npm run cloc
node-18:
node-20:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: install node 18
- name: install node 20
uses: actions/setup-node@v4
with:
node-version: '18'
node-version: '20'
- name: npm install
run: npm install
- name: release build

View File

@ -8,10 +8,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: install node 20
- name: install node 22
uses: actions/setup-node@v4
with:
node-version: '20'
node-version: '22'
- name: npm install
run: npm install
- name: lint *.js
@ -33,10 +33,10 @@ jobs:
GITHUB_SHA: ${{ github.sha }}
steps:
- uses: actions/checkout@v4
- name: install node 20
- name: install node 22
uses: actions/setup-node@v4
with:
node-version: '20'
node-version: '22'
- name: npm install
run: npm install
- name: build website release

View File

@ -186,6 +186,25 @@ a.linkButton:hover {
overflow: auto !important;
}
.feedback {
width: 250px;
.future-banner {
display: flex;
justify-content: center;
align-items: stretch;
text-align: center;
flex-direction: column;
h3 {
margin: 0 1em;
}
p {
margin: 1em 1.5em;
}
}
}
.claro .dijitTabContainerTop-tabs .dijitTabChecked .dijitTabContent {
background-position: 0 -103px;
}

View File

@ -0,0 +1,91 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta
name="viewport"
content="width=device-width, initial-scale=1, maximum-scale=1, minimum-scale=1, user-scalable=no"
/>
<meta
name="description"
content="Drape 2D raster imagery with roads and labels on a 3D Tiles reality mesh dataset streamed from the iTwin Platform."
/>
<meta name="cesium-sandcastle-labels" content="Beginner, Showcases, 3D Tiles" />
<title>Cesium Demo</title>
<script type="text/javascript" src="../Sandcastle-header.js"></script>
<script type="module" src="../load-cesium-es6.js"></script>
</head>
<body class="sandcastle-loading" data-sandcastle-bucket="bucket-requirejs.html">
<style>
@import url(../templates/bucket.css);
</style>
<div id="cesiumContainer" class="fullSize"></div>
<div id="loadingOverlay"><h1>Loading...</h1></div>
<div id="toolbar"></div>
<script id="cesium_sandcastle_script">
window.startup = async function (Cesium) {
"use strict";
//Sandcastle_Begin
// Generate a share key for access to an iTwin without OAuth
// https://developer.bentley.com/apis/access-control-v2/operations/create-itwin-share/
Cesium.ITwinPlatform.defaultShareKey =
"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpVHdpbklkIjoiNTM1YTI0YTMtOWIyOS00ZTIzLWJiNWQtOWNlZGI1MjRjNzQzIiwiaWQiOiI2NTEwMzUzMi02MmU3LTRmZGQtOWNlNy1iODIxYmEyMmI5NjMiLCJleHAiOjE3NzcwNTU4MTh9.Q9MgsWWkc6bb1zHUJ7ahZjxPtaTWEjpNvRln7NS3faM";
const viewer = new Cesium.Viewer("cesiumContainer", {
timeline: false,
animation: false,
sceneModePicker: false,
baseLayerPicker: false,
});
viewer.scene.skyAtmosphere.show = true;
const iTwinId = "535a24a3-9b29-4e23-bb5d-9cedb524c743";
const realityMeshId = "85897090-3bcc-470b-bec7-20bb639cc1b9";
const tileset = await Cesium.ITwinData.createTilesetForRealityDataId(
iTwinId,
realityMeshId,
);
viewer.scene.primitives.add(tileset);
tileset.maximumScreenSpaceError = 2;
// Create the imagery layer for Bing Maps (labels only)
const labelImageryLayer = Cesium.ImageryLayer.fromProviderAsync(
Cesium.IonImageryProvider.fromAssetId(2411391),
);
tileset.imageryLayers.add(labelImageryLayer);
Sandcastle.addToggleButton(
"Show labels",
labelImageryLayer.show,
function (checked) {
labelImageryLayer.show = checked;
},
);
// Look at Philadelphia
viewer.scene.camera.setView({
destination: new Cesium.Cartesian3(
1252289.5782535905,
-4732887.700120302,
4075105.3952877373,
),
orientation: new Cesium.HeadingPitchRoll(
6.130466027267037,
-1.1315283015826818,
6.2831472551984575,
),
});
//Sandcastle_End
Sandcastle.finishedLoading();
};
if (typeof Cesium !== "undefined") {
window.startupCalled = true;
window.startup(Cesium).catch((error) => {
"use strict";
console.error(error);
});
}
</script>
</body>
</html>

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

View File

@ -139,7 +139,7 @@
const customShader = new Cesium.CustomShader({
fragmentShaderText: `void fragmentMain(FragmentInput fsInput, inout czm_modelMaterial material)
{
vec3 voxelNormal = normalize(czm_normal * fsInput.voxel.surfaceNormal);
vec3 voxelNormal = fsInput.attributes.normalEC;
float diffuse = max(0.0, dot(voxelNormal, czm_lightDirectionEC));
float lighting = 0.5 + 0.5 * diffuse;

View File

@ -42,6 +42,8 @@
geocoder: false,
animation: false,
timeline: false,
projectionPicker: true,
sceneModePicker: false,
});
viewer.extend(Cesium.viewerVoxelInspectorMixin);

View File

@ -0,0 +1,658 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta
name="viewport"
content="width=device-width, initial-scale=1, maximum-scale=1, minimum-scale=1, user-scalable=no"
/>
<meta
name="description"
content="Rendering Volume Cloud with Texture3D and Custom GLSL. Transplanted from Three.js"
/>
<meta name="cesium-sandcastle-labels" content="Development" />
<title>Cesium Demo</title>
<script type="text/javascript" src="../Sandcastle-header.js"></script>
<script
type="text/javascript"
src="../../../Build/CesiumUnminified/Cesium.js"
nomodule
></script>
<script type="module" src="../load-cesium-es6.js"></script>
</head>
<body class="sandcastle-loading" data-sandcastle-bucket="bucket-requirejs.html">
<style>
@import url(../templates/bucket.css);
</style>
<div id="cesiumContainer" class="fullSize"></div>
<div id="loadingOverlay">
<h1>Loading...</h1>
</div>
<div id="toolbar"></div>
<script id="cesium_sandcastle_script">
window.startup = async function (Cesium) {
"use strict";
//Sandcastle_Begin
// ImprovedNoise from Three.js
// https://github.com/mrdoob/three.js/blob/dev/examples/jsm/math/ImprovedNoise.js
const lerp = Cesium.Math.lerp;
const _p = [
151, 160, 137, 91, 90, 15, 131, 13, 201, 95, 96, 53, 194, 233, 7, 225, 140, 36,
103, 30, 69, 142, 8, 99, 37, 240, 21, 10, 23, 190, 6, 148, 247, 120, 234, 75, 0,
26, 197, 62, 94, 252, 219, 203, 117, 35, 11, 32, 57, 177, 33, 88, 237, 149, 56,
87, 174, 20, 125, 136, 171, 168, 68, 175, 74, 165, 71, 134, 139, 48, 27, 166,
77, 146, 158, 231, 83, 111, 229, 122, 60, 211, 133, 230, 220, 105, 92, 41, 55,
46, 245, 40, 244, 102, 143, 54, 65, 25, 63, 161, 1, 216, 80, 73, 209, 76, 132,
187, 208, 89, 18, 169, 200, 196, 135, 130, 116, 188, 159, 86, 164, 100, 109,
198, 173, 186, 3, 64, 52, 217, 226, 250, 124, 123, 5, 202, 38, 147, 118, 126,
255, 82, 85, 212, 207, 206, 59, 227, 47, 16, 58, 17, 182, 189, 28, 42, 223, 183,
170, 213, 119, 248, 152, 2, 44, 154, 163, 70, 221, 153, 101, 155, 167, 43, 172,
9, 129, 22, 39, 253, 19, 98, 108, 110, 79, 113, 224, 232, 178, 185, 112, 104,
218, 246, 97, 228, 251, 34, 242, 193, 238, 210, 144, 12, 191, 179, 162, 241, 81,
51, 145, 235, 249, 14, 239, 107, 49, 192, 214, 31, 181, 199, 106, 157, 184, 84,
204, 176, 115, 121, 50, 45, 127, 4, 150, 254, 138, 236, 205, 93, 222, 114, 67,
29, 24, 72, 243, 141, 128, 195, 78, 66, 215, 61, 156, 180,
];
for (let i = 0; i < 256; i++) {
_p[256 + i] = _p[i];
}
function fade(t) {
return t * t * t * (t * (t * 6 - 15) + 10);
}
function grad(hash, x, y, z) {
const h = hash & 15;
const u = h < 8 ? x : y,
v = h < 4 ? y : h === 12 || h === 14 ? x : z;
return ((h & 1) === 0 ? u : -u) + ((h & 2) === 0 ? v : -v);
}
/**
* A utility class providing a 3D noise function.
*
* The code is based on [IMPROVED NOISE]{@link https://cs.nyu.edu/~perlin/noise/}
* by Ken Perlin, 2002.
*
* @three_import import { ImprovedNoise } from 'three/addons/math/ImprovedNoise.js';
*/
class ImprovedNoise {
/**
* Returns a noise value for the given parameters.
*
* @param {number} x - The x coordinate.
* @param {number} y - The y coordinate.
* @param {number} z - The z coordinate.
* @return {number} The noise value.
*/
noise(x, y, z) {
const floorX = Math.floor(x),
floorY = Math.floor(y),
floorZ = Math.floor(z);
const X = floorX & 255,
Y = floorY & 255,
Z = floorZ & 255;
x -= floorX;
y -= floorY;
z -= floorZ;
const xMinus1 = x - 1,
yMinus1 = y - 1,
zMinus1 = z - 1;
const u = fade(x),
v = fade(y),
w = fade(z);
const A = _p[X] + Y,
AA = _p[A] + Z,
AB = _p[A + 1] + Z,
B = _p[X + 1] + Y,
BA = _p[B] + Z,
BB = _p[B + 1] + Z;
return lerp(
lerp(
lerp(grad(_p[AA], x, y, z), grad(_p[BA], xMinus1, y, z), u),
lerp(grad(_p[AB], x, yMinus1, z), grad(_p[BB], xMinus1, yMinus1, z), u),
v,
),
lerp(
lerp(
grad(_p[AA + 1], x, y, zMinus1),
grad(_p[BA + 1], xMinus1, y, zMinus1),
u,
),
lerp(
grad(_p[AB + 1], x, yMinus1, zMinus1),
grad(_p[BB + 1], xMinus1, yMinus1, zMinus1),
u,
),
v,
),
w,
);
}
}
// End ImprovedNoise from Three.js
// GeometryPrimitive
const {
Cartesian3,
destroyObject,
DrawCommand,
VertexArray,
GeometryPipeline,
Matrix4,
} = Cesium;
/**
* Custom Primitive for Geometry
*/
class GeometryPrimitive {
/**
*
* @param {*} options
* @param {*} options.modelMatrix
* @param {*} options.vertexShaderSource
* @param {*} options.fragmentShaderSource
* @param {*} options.uniformMap
* @param {*} options.renderState
* @param {*} options.pass
*/
constructor(geometry, options) {
this.options = options;
this.geometry = geometry;
}
/**
*
* @param {*} frameState
*/
update(frameState) {
if (Cesium.defined(this._drawCommand)) {
frameState.commandList.push(this._drawCommand);
return;
}
if (this.geometry.constructor.createGeometry) {
this.geometry = this.geometry.constructor.createGeometry(this.geometry);
}
const context = frameState.context;
const attributeLocations = GeometryPipeline.createAttributeLocations(
this.geometry,
);
const vertexArray = VertexArray.fromGeometry({
context: context,
geometry: this.geometry,
attributeLocations,
});
// calculate boundingSphere
const boundingSphere = this.geometry.boundingSphere;
boundingSphere.center = Matrix4.multiplyByPoint(
this.options.modelMatrix,
boundingSphere.center,
new Cartesian3(),
);
boundingSphere.radius = 1000000;
this._boundingSphereWC = [boundingSphere];
const shaderProgram = Cesium.ShaderProgram.fromCache({
context: context,
attributeLocations,
vertexShaderSource: this.options.vertexShaderSource,
fragmentShaderSource: this.options.fragmentShaderSource,
});
this._drawCommand = new DrawCommand({
owner: this,
boundingVolume: boundingSphere,
primitiveType: this.geometry.primitiveType,
vertexArray: vertexArray,
shaderProgram,
...this.options,
});
}
destroy() {
return destroyObject(this);
}
isDestroyed() {
return false;
}
}
// GeometryPrimitive end
function makeTexture3D(context) {
const size = 100;
const dataLength = size * size * size;
const data = new Uint8Array(dataLength);
let i = 0;
const scale = 0.05;
const perlin = new ImprovedNoise();
let vector = new Cesium.Cartesian3();
const halfSize = Cesium.Cartesian3.fromElements(
size / 2,
size / 2,
size / 2,
new Cesium.Cartesian3(),
);
for (let z = 0; z < size; z++) {
for (let y = 0; y < size; y++) {
for (let x = 0; x < size; x++) {
vector = Cesium.Cartesian3.fromElements(x, y, z, vector);
vector = Cesium.Cartesian3.subtract(vector, halfSize, vector);
vector = Cesium.Cartesian3.divideByScalar(vector, size, vector);
const d = 1.0 - Cesium.Cartesian3.magnitude(vector);
const tv =
(128 +
128 * perlin.noise((x * scale) / 1.5, y * scale, (z * scale) / 1.5)) *
d *
d;
data[i] = tv;
i++;
}
}
}
return new Cesium.Texture3D({
context: context,
width: size,
height: size,
depth: size,
flipY: false,
pixelFormat: Cesium.PixelFormat.RED,
pixelDatatype: Cesium.PixelDatatype.UNSIGNED_BYTE,
source: {
arrayBufferView: data,
width: size,
height: size,
depth: size,
},
sampler: new Cesium.Sampler({
minificationFilter: Cesium.TextureMinificationFilter.LINEAR,
magnificationFilter: Cesium.TextureMagnificationFilter.LINEAR,
}),
});
}
const vertexShader = /* glsl */ `
in vec3 position3DHigh;
in vec3 position3DLow;
in vec3 normal;
in vec2 st;
in float batchId;
out vec3 vOrigin;
out vec3 vDirection;
out vec3 vPosition;
vec4 translateRelativeToEye(vec3 high, vec3 low) {
vec3 highDifference = high - czm_encodedCameraPositionMCHigh;
if(length(highDifference) == 0.0f) {
highDifference = vec3(0);
}
vec3 lowDifference = low - czm_encodedCameraPositionMCLow;
return vec4(highDifference + lowDifference, 1.0f);
}
void main()
{
vec4 p = translateRelativeToEye(position3DHigh, position3DLow);
vOrigin = czm_encodedCameraPositionMCHigh + czm_encodedCameraPositionMCLow;
vec3 modelPosition = position3DHigh + position3DLow;
vPosition = modelPosition;
vDirection = modelPosition - vOrigin;
gl_Position = czm_modelViewProjectionRelativeToEye * p;
}`;
const fragmentShader = /* glsl */ `
precision highp float;
precision highp sampler3D;
in vec3 vOrigin;
in vec3 vDirection;
// https://github.com/mrdoob/three.js/blob/dev/examples/webgl_volume_cloud.html
uniform vec3 base;
uniform sampler3D map;
uniform float threshold;
uniform float range;
uniform float opacity;
uniform float steps;
uniform float frame;
uint wang_hash(uint seed)
{
seed = (seed ^ 61u) ^ (seed >> 16u);
seed *= 9u;
seed = seed ^ (seed >> 4u);
seed *= 0x27d4eb2du;
seed = seed ^ (seed >> 15u);
return seed;
}
float randomFloat(inout uint seed)
{
return float(wang_hash(seed)) / 4294967296.;
}
vec2 hitBox( vec3 orig, vec3 dir ) {
const vec3 box_min = vec3( - 0.5 );
const vec3 box_max = vec3( 0.5 );
vec3 inv_dir = 1.0 / dir;
vec3 tmin_tmp = ( box_min - orig ) * inv_dir;
vec3 tmax_tmp = ( box_max - orig ) * inv_dir;
vec3 tmin = min( tmin_tmp, tmax_tmp );
vec3 tmax = max( tmin_tmp, tmax_tmp );
float t0 = max( tmin.x, max( tmin.y, tmin.z ) );
float t1 = min( tmax.x, min( tmax.y, tmax.z ) );
return vec2( t0, t1 );
}
float sample1( vec3 p ) {
return texture( map, p ).r;
}
float shading( vec3 coord ) {
float step = 0.01;
return sample1( coord + vec3( - step ) ) - sample1( coord + vec3( step ) );
}
vec4 linearToSRGB( in vec4 value ) {
return vec4( mix( pow( value.rgb, vec3( 0.41666 ) ) * 1.055 - vec3( 0.055 ), value.rgb * 12.92, vec3( lessThanEqual( value.rgb, vec3( 0.0031308 ) ) ) ), value.a );
}
void main() {
vec3 rayDir = normalize( vDirection );
vec2 bounds = hitBox( vOrigin, rayDir );
if ( bounds.x > bounds.y ) discard;
bounds.x = max( bounds.x, 0.0 );
vec3 p = vOrigin + bounds.x * rayDir;
vec3 inc = 1.0 / abs( rayDir );
float delta = min( inc.x, min( inc.y, inc.z ) );
delta /= steps;
// Nice little seed from
// https://blog.demofox.org/2020/05/25/casual-shadertoy-path-tracing-1-basic-camera-diffuse-emissive/
uint seed = uint( gl_FragCoord.x ) * uint( 1973 ) + uint( gl_FragCoord.y ) * uint( 9277 ) + uint( frame ) * uint( 26699 );
vec3 size = vec3( textureSize( map, 0 ) );
float randNum = randomFloat( seed ) * 2.0 - 1.0;
p += rayDir * randNum * ( 1.0 / size );
vec4 ac = vec4( base, 0.0 );
for ( float t = bounds.x; t < bounds.y; t += delta ) {
float d = sample1( p + 0.5 );
d = smoothstep( threshold - range, threshold + range, d ) * opacity;
float col = shading( p + 0.5 ) * 3.0 + ( ( p.x + p.y ) * 0.25 ) + 0.2;
ac.rgb += ( 1.0 - ac.a ) * d * col;
ac.a += ( 1.0 - ac.a ) * d;
if ( ac.a >= 0.95 ) break;
p += rayDir * delta;
}
vec4 color = linearToSRGB( ac );
color = czm_gammaCorrect( color );
if ( color.a == 0.0 ) discard;
out_FragColor = color;
}
`;
const viewer = new Cesium.Viewer("cesiumContainer", {
orderIndependentTranslucency: true,
});
const texture3D = makeTexture3D(viewer.scene.context);
texture3D.generateMipmap();
const boxSideLength = 1.0;
const zoomScale = 10000;
const centerPoint = Cesium.Cartesian3.fromDegrees(
113,
33,
(boxSideLength / 0.5) * zoomScale,
);
const modelMatrix = Cesium.Transforms.eastNorthUpToFixedFrame(centerPoint);
const boxSize = new Cesium.Cartesian3(
boxSideLength,
boxSideLength,
boxSideLength,
);
const renderState = Cesium.RenderState.fromCache({
depthMask: false,
blending: {
enabled: true,
color: {
red: 0.0,
green: 0.0,
blue: 0.0,
alpha: 0.0,
},
},
depthTest: {
enabled: true,
func: Cesium.DepthFunction.LESS_OR_EQUAL,
},
cull: {
enabled: true,
face: Cesium.CullFace.FRONT,
},
});
const zoomMat = Cesium.Matrix4.fromScale(
new Cesium.Cartesian3(zoomScale, zoomScale, zoomScale),
new Cesium.Matrix4(),
);
Cesium.Matrix4.multiply(modelMatrix, zoomMat, modelMatrix);
const halfBox = Cesium.Cartesian3.multiplyByScalar(
boxSize,
0.5,
new Cesium.Cartesian3(),
);
const negHalfBox = Cesium.Cartesian3.negate(halfBox, new Cesium.Cartesian3());
const boxGeometry = new Cesium.BoxGeometry({
minimum: negHalfBox,
maximum: halfBox,
});
const uniforms = {
base: new Cesium.Color(0.1912, 0.2542, 0.3515, 0),
map: texture3D,
opacity: 0.25,
range: 0.1,
steps: 100,
frame: 0,
threshold: 0.25,
};
window.uniforms = uniforms;
const cmdUniforms = {};
for (const key in uniforms) {
if (key) {
cmdUniforms[key] = function () {
return uniforms[key];
};
}
}
const primitive = new GeometryPrimitive(boxGeometry, {
uniformMap: cmdUniforms,
vertexShaderSource: vertexShader,
fragmentShaderSource: fragmentShader,
renderState: renderState,
modelMatrix,
pass: Cesium.Pass.TRANSLUCENT,
});
viewer.scene.primitives.add(primitive);
const cameraState = {
destination: centerPoint,
orientation: {
heading: 4.159717744111784,
pitch: -0.4648127266675117,
roll: boxSideLength * zoomScale * 2,
},
duration: 0,
};
// viewer.camera.flyTo(camState);
const hpr = new Cesium.HeadingPitchRange(
cameraState.orientation.heading,
cameraState.orientation.pitch,
boxSideLength * zoomScale * 2,
);
viewer.camera.lookAt(cameraState.destination, hpr);
// create params control UI
function createSlider(
toolbar,
{ labelText, min, max, step, defaultValue, callback },
) {
const container = document.createElement("div");
container.style.cssText = `
display: flex;
align-items: center;
gap: 10px;
margin-bottom: 8px;
`;
const label = document.createElement("label");
label.textContent = labelText;
label.style.cssText = `
color: #ffffff;
font-family: Arial, sans-serif;
min-width: 60px;
font-size: 14px;
`;
const slider = document.createElement("input");
slider.type = "range";
slider.min = min;
slider.max = max;
slider.step = step;
slider.value = defaultValue;
slider.style.cssText = `
width: 100px;
cursor: pointer;
accent-color: #4CAF50;
`;
slider.addEventListener("input", (e) => callback(e.target.value));
const valueDisplay = document.createElement("span");
valueDisplay.textContent = defaultValue;
valueDisplay.style.cssText = `
color: #ffffff;
font-family: monospace;
width: 30px;
text-align: right;
`;
slider.addEventListener("input", (e) => {
valueDisplay.textContent = parseFloat(e.target.value).toFixed(2);
});
container.appendChild(label);
container.appendChild(slider);
container.appendChild(valueDisplay);
toolbar.appendChild(container);
return slider;
}
function createUi() {
const toolbar = document.getElementById("toolbar");
toolbar.style.cssText = `
position: fixed;
top: 10px;
left: 10px;
padding: 15px 20px 5px 20px;
background: rgba(40, 40, 40, 0.85);
border-radius: 12px;
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.3);
backdrop-filter: blur(5px);
z-index: 1000;
`;
createSlider(toolbar, {
labelText: "opacity",
min: 0,
max: 1,
step: 0.01,
defaultValue: uniforms.opacity,
callback: (val) => {
uniforms.opacity = parseFloat(val);
},
});
createSlider(toolbar, {
labelText: "range",
min: 0,
max: 1,
step: 0.01,
defaultValue: uniforms.range,
callback: (val) => {
uniforms.range = parseFloat(val);
},
});
createSlider(toolbar, {
labelText: "steps",
min: 20,
max: 200,
step: 1,
defaultValue: uniforms.steps,
callback: (val) => {
uniforms.steps = parseFloat(val);
},
});
createSlider(toolbar, {
labelText: "threshold",
min: 0,
max: 1,
step: 0.01,
defaultValue: uniforms.threshold,
callback: (val) => {
uniforms.threshold = parseFloat(val);
},
});
}
createUi();
//Sandcastle_End
};
if (typeof Cesium !== "undefined") {
window.startupCalled = true;
window.startup(Cesium).catch((error) => {
"use strict";
console.error(error);
});
Sandcastle.finishedLoading();
}
</script>
</body>
</html>

Binary file not shown.

After

Width:  |  Height:  |  Size: 93 KiB

View File

@ -305,33 +305,66 @@
<div
id="bottomPanel"
class="bottomPanel"
data-dojo-type="dijit.layout.TabContainer"
data-dojo-type="dijit.layout.BorderContainer"
data-dojo-props="region: 'bottom', splitter: true"
>
<div
id="innerPanel"
class="bottomPanel"
data-dojo-type="dijit.layout.TabContainer"
data-dojo-props="title: 'Gallery', nested: true"
data-dojo-props="region: 'center', splitter: false"
>
<div
id="searchContainer"
class="galleryContainer"
data-dojo-type="dijit.layout.ContentPane"
data-dojo-props="title: 'Search Results'"
id="innerPanel"
class="bottomPanel"
data-dojo-type="dijit.layout.TabContainer"
data-dojo-props="title: 'Gallery', nested: true"
>
<div id="searchResults" class="demosContainer">
<div id="searchDemos" class="demos"></div>
<div
id="searchContainer"
class="galleryContainer"
data-dojo-type="dijit.layout.ContentPane"
data-dojo-props="title: 'Search Results'"
>
<div id="searchResults" class="demosContainer">
<div id="searchDemos" class="demos"></div>
</div>
</div>
</div>
<div
id="logContainer"
data-dojo-type="dijit.layout.ContentPane"
data-dojo-props="title: 'Console'"
>
<div class="logContainer">
<div id="logOutput"></div>
</div>
</div>
</div>
<div
id="logContainer"
data-dojo-type="dijit.layout.ContentPane"
data-dojo-props="title: 'Console'"
class="feedback"
data-dojo-type="dijit.layout.TabContainer"
data-dojo-props="region: 'right'"
>
<div class="logContainer">
<div id="logOutput"></div>
<div
class="future-banner"
data-dojo-type="dijit.layout.ContentPane"
data-dojo-props="title: 'Feedback'"
>
<h3>Help needed</h3>
<p>
We're currently gathering user feedback to make Sandcastle even
better.
</p>
<button data-dojo-type="dijit/form/Button" type="button">
Please share your thoughts!
<script
type="dojo/on"
data-dojo-event="click"
data-dojo-args="evt"
>
window.open("https://community.cesium.com/t/upgrading-sandcastle-we-need-your-input/39715/", "_blank")
</script>
</button>
</div>
</div>
</div>

View File

@ -1,5 +1,31 @@
# Change Log
## 1.130 - 2025-06-02
### @cesium/engine
#### Breaking Changes :mega:
- The `FragmentInput` struct for voxel shaders has been updated to be more consistent with the `CustomShader` documentation. Remaining differences in `CustomShader` usage between `VoxelPrimitive` and `Cesium3DTileset` or `Model` are now documented in the Custom Shader Guide. [#12636](https://github.com/CesiumGS/cesium/pull/12636). Key changes include:
- The non-standard position attributes `fsInput.voxel.positionUv`, `fsInput.voxel.positionShapeUv`, and `fsInput.voxel.positionLocal` have been removed, and replaced by a single eye coordinate position `fsInput.attributes.positionEC`.
- The normal in model coordinates `fsInput.voxel.surfaceNormal` has been replaced by a normal in eye coordinates `fsInput.attributes.normalEC`. Example:
```glsl
// Replace this:
// vec3 voxelNormal = normalize(czm_normal * fsInput.voxel.surfaceNormal);
// with this:
vec3 voxelNormal = fsInput.attributes.normalEC;
```
#### Additions :tada:
- Add basic support for draping imagery on 3D Tiles. [#12567](https://github.com/CesiumGS/cesium/pull/12567)
- Add support for 3D Textures and add Volume Cloud sandcastle example. [#12661](https://github.com/CesiumGS/cesium/pull/12611)
#### Fixes :wrench:
- Fixed voxel rendering with orthographic cameras. [#12629](https://github.com/CesiumGS/cesium/pull/12629)
## 1.129 - 2025-05-01
### @cesium/engine

View File

@ -39,7 +39,7 @@
## Get the Code
- Setup Git if it isn't already ([link](https://help.github.com/articles/set-up-git/#platform-all)).
- [Setup Git](https://help.github.com/articles/set-up-git/#platform-all) if it isn't already.
- New to git or need a refresher? Now's a good time to learn! [Easy tutorials here.](https://guides.github.com/)
- Make sure your SSH keys are configured ([linux](https://help.github.com/articles/generating-ssh-keys#platform-linux) | [mac](https://help.github.com/articles/generating-ssh-keys#platform-mac) | [windows](https://help.github.com/articles/generating-ssh-keys#platform-windows)).
- Double-check your settings for name and email: `git config --get-regexp user.*`.
@ -59,7 +59,7 @@
Prerequisites:
- Install [Node.js](http://nodejs.org/) on your system. Building Cesium requires Node 18.x or newer.
- Install [Node.js](http://nodejs.org/) on your system. Building Cesium requires Node 20.x or newer.
Cesium uses [npm modules](https://docs.npmjs.com/getting-started/what-is-npm) for development, so after syncing, you need to run `npm install` from the Cesium root directory:

View File

@ -148,7 +148,7 @@ for more info.
will checkout pull request [#3941](https://github.com/CesiumGS/cesium/pull/3941).
More details can be found [here](https://cli.github.com/manual/gh_pr_checkout).
More details can be found in the [GitHub CLI manual](https://cli.github.com/manual/gh_pr_checkout).
#### Using git

View File

@ -128,7 +128,7 @@ A few more naming conventions are introduced below along with their design patte
## Linting
For syntax and style guidelines, we use the ESLint recommended settings (the list of rules can be found [here](http://eslint.org/docs/rules/)) as a base and extend it with additional rules via a shared config Node module, [eslint-config-cesium](https://www.npmjs.com/package/eslint-config-cesium). This package is maintained as a part of the Cesium repository and is also used throughout the Cesium ecosystem. For an up to date list of which rules are enabled, look in [index.js](https://github.com/CesiumGS/eslint-config-cesium/blob/main/index.js), [browser.js](https://github.com/CesiumGS/eslint-config-cesium/blob/main/browser.js), and [node.js](https://github.com/CesiumGS/eslint-config-cesium/blob/main/node.js). Below are listed some specific rules to keep in mind
For syntax and style guidelines, we use the ESLint recommended settings as a base and extend it with additional rules (see the [list of all rules](http://eslint.org/docs/rules/)) via a shared config Node module, [eslint-config-cesium](https://www.npmjs.com/package/eslint-config-cesium). This package is maintained as a part of the Cesium repository and is also used throughout the Cesium ecosystem. For an up to date list of which rules are enabled, look in [index.js](https://github.com/CesiumGS/eslint-config-cesium/blob/main/index.js), [browser.js](https://github.com/CesiumGS/eslint-config-cesium/blob/main/browser.js), and [node.js](https://github.com/CesiumGS/eslint-config-cesium/blob/main/node.js). Below are listed some specific rules to keep in mind
**General rules:**

View File

@ -8,8 +8,8 @@
| 4/1/2025 | `@ggetz` |
| 5/1/2025 | `@jjspace` |
| 6/2/2025 | `@jjhembd` |
| 7/1/2025 | `@lukemckinstry` |
| 8/1/2025 | `@jjspace` |
| 7/1/2025 | `@jjspace` |
| 8/1/2025 | `@lukemckinstry` |
| 9/1/2025 | `@ggetz` |
| 10/1/2025 | `@jjhembd` |
| 11/3/2025 | `@lukemckinstry` |

View File

@ -60,8 +60,8 @@ const customShader = new Cesium.CustomShader({
## Applying A Custom Shader
Custom shaders can be applied to either 3D Tiles or a `Model` as
follows:
Custom shaders can be applied to either 3D Tiles, a `Model`, or
a `VoxelPrimitive` as follows:
```js
const customShader = new Cesium.CustomShader(/* ... */);
@ -69,17 +69,28 @@ const customShader = new Cesium.CustomShader(/* ... */);
// Applying to all tiles in a tileset.
const tileset = await Cesium.Cesium3DTileset.fromUrl(
"http://example.com/tileset.json", {
customShader: customShader
customShader: customShader,
});
viewer.scene.primitives.add(tileset);
// Applying to a model directly
const model = await Cesium.Model.fromGltfAsync({,
url: "http://example.com/model.gltf",
customShader: customShader
customShader: customShader,
});
// Applying to a VoxelPrimitive
const provider = await Cesium.Cesium3DTilesVoxelProvider.fromUrl(
"http://example.com/tileset.json",
);
const voxelPrimitive = new Cesium.VoxelPrimitive({
provider: provider,
customShader: customShader,
});
```
Voxels only support a subset of custom shader functionality. See [Using custom shaders for voxel rendering](#using-custom-shaders-for-voxel-rendering).
## Uniforms
Custom Shaders currently supports the following uniform types:
@ -229,7 +240,7 @@ struct FragmentInput {
};
```
## Attributes Struct
## `Attributes` Struct
The `Attributes` struct is dynamically generated given the variables used in
the custom shader and the attributes available in the primitive to render.
@ -871,3 +882,19 @@ even when `lightingModel` is `LightingModel.UNLIT`.
When `scene.highDynamicRange` is `false`, the final computed color
(after custom shaders and lighting) is converted to `sRGB`.
## Using custom shaders for voxel rendering
Voxel rendering uses a subset of custom shader functionality.
The supplied shader is executed in the fragment shader _only_. (If a `vertexShaderText` is supplied, it will be ignored.)
The supplied fragment shader is executed at each step of the raymarching through the voxel.
The final rendered color at a given pixel will be an alpha-blended composition of the shader executions at all steps along the ray.
The `FragmentInput` struct in a voxel shader has some differences with other custom shaders:
- `Attributes`: Only `positionEC` and `normalEC` properties are supported.
- `FeatureIds` is not present in voxel shaders.
- `Metadata` is fully supported.
- `MetadataClass` is not present.
- `MetadataStatistics`: The only supported statistics are `min` and `max`.

View File

@ -0,0 +1,55 @@
# UnitSquare
A unit square, in different flavors.
All files describe a square (0,0,0)-(1,1,0), with positions, normals,
and texture coordinates. The normals are always (0,0,1). The texture
coordinates are the (x,1-y) components of the vertex positions (i.e.
the (x,y) coordinates, but flipped vertically). The texture is a
simple 128x128 pixel dummy PNG texture.
The files that start with `unitSquare11x11` consist of a regular grid
of 11x11 = 121 vertices, representing and 200 triangles, stored with
`mode=GL_TRIANGLES`.
The flavors of this file are
- `unitSquare11x11_plain.glb`: Just the plain glTF, without compression
or quantization, storing all attributes in their floating point
representation, in non-interleaved accessors
- `unitSquare11x11_plain_interleaved.glb`: The same as `plain`, but
storing the data in interleaved accessors
- `unitSquare11x11_draco.glb`: The `plain` version, draco-compressed
- `unitSquare11x11_meshopt.glb`: The `plain` version, meshopt-compressed
- `unitSquare11x11_quantized.glb`: The `plain` file, with quantization, storing
the result in non-interleaved accessors
- `unitSquare11x11_quantized_interleaved.glb`: The same as `quantized`, but storing
the result in interleaved accessors
- `unitSquare11x11_unsignedShortTexCoords.glb`: The same as `plain`, but with the
texture coordinates stored as a `GL_UNSIGNED_SHORT (normalized)` accessor,
in non-interleaved accessors
- `unitSquare11x11_unsignedShortTexCoords_interleaved.glb`: The same as
`unsignedShortTexCoords`, stored with interleaved accessors
- `unitSquare11x11_unsignedShortTexCoords_quantized.glb`: The same as
`unsignedShortTexCoords`, with additional quantization, stored in
non-interleaved accessors
- `unitSquare11x11_unsignedShortTexCoords_quantized_interleaved.glb`: The same as
`unsignedShortTexCoords_quantized`, but stored with interleaved accessors
The other representations aim at different primitive modes:
- `unitSquare11x2_triangleStrip.glb`: Contains 11x2 vertices, 11 in x-direction
and 2 in y-direction, storing the triangles in `mode=GL_TRIANGLE_STRIP`
- `unitSquare5_triangleFan.glb`: Contains 5 vertices, one at (0.5, 0.5) and
one at each corner of the unit square, representing the triangles in
`mode=GL_TRIANGLE_FAN`
Other representations aim at further structural tests
- `unitSquare_fourPrimitives_plain.glb`: A unit square given as a single mesh
that consists of four primitives, each with 3x3 vertices.
Most basic versions of the files have been created with a custom snippet
based on JglTF. The compressed, interleaved, and quantized versions have
been created with `glTF-Transform` (via https://gltf.report/).
The quantization in all cases was `{ quantizePosition: 14, quantizeNormal: 10 }`.

View File

@ -0,0 +1,31 @@
const viewer = new Cesium.Viewer("cesiumContainer");
// Create the tileset in the viewer
const tileset = viewer.scene.primitives.add(
await Cesium.Cesium3DTileset.fromUrl(
"http://localhost:8003/tileset.json", {
debugShowBoundingVolume: true,
})
);
// Move the tileset to a certain position on the globe,
// and scale it up
const transform = Cesium.Transforms.eastNorthUpToFixedFrame(
Cesium.Cartesian3.fromDegrees(-75.152408, 39.946975, 20)
);
const scale = 15.0;
const modelMatrix = Cesium.Matrix4.multiplyByUniformScale(
transform,
scale,
new Cesium.Matrix4()
);
tileset.modelMatrix = modelMatrix;
// Zoom to the tileset, with a small offset so that
// it is fully visible
const offset = new Cesium.HeadingPitchRange(
Cesium.Math.toRadians(-22.5),
Cesium.Math.toRadians(-22.5),
250.0
);
viewer.zoomTo(tileset, offset);

View File

@ -0,0 +1,370 @@
{
"asset": {
"version": "1.1"
},
"geometricError": 4096,
"root": {
"refine": "REPLACE",
"transform": [
1, 0, 0, 0,
0, 0,-1, 0,
0, 1, 0, 0,
0, 0, 0, 1
],
"boundingVolume": {
"box": [
3.5,
0,
2.5,
3.5,
0,
0,
0,
0,
0,
0,
0,
2.5
]
},
"geometricError": 1024,
"children": [
{
"transform": [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1
],
"boundingVolume": {
"box": [
0.5,
0,
0.5,
0.5,
0,
0,
0,
0,
0,
0,
0,
0.5
]
},
"geometricError": 512,
"content": {
"uri": "unitSquare11x11_draco.glb"
}
},
{
"transform": [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
2, 0, 0, 1
],
"boundingVolume": {
"box": [
0.5,
0,
0.5,
0.5,
0,
0,
0,
0,
0,
0,
0,
0.5
]
},
"geometricError": 512,
"content": {
"uri": "unitSquare11x11_meshopt.glb"
}
},
{
"transform": [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
4, 0, 0, 1
],
"boundingVolume": {
"box": [
0.5,
0,
0.5,
0.5,
0,
0,
0,
0,
0,
0,
0,
0.5
]
},
"geometricError": 512,
"content": {
"uri": "unitSquare11x11_plain.glb"
}
},
{
"transform": [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
6, 0, 0, 1
],
"boundingVolume": {
"box": [
0.5,
0,
0.5,
0.5,
0,
0,
0,
0,
0,
0,
0,
0.5
]
},
"geometricError": 512,
"content": {
"uri": "unitSquare11x11_plain_interleaved.glb"
}
},
{
"transform": [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 2, 1
],
"boundingVolume": {
"box": [
0.5,
0,
0.5,
0.5,
0,
0,
0,
0,
0,
0,
0,
0.5
]
},
"geometricError": 512,
"content": {
"uri": "unitSquare11x11_quantized.glb"
}
},
{
"transform": [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
2, 0, 2, 1
],
"boundingVolume": {
"box": [
0.5,
0,
0.5,
0.5,
0,
0,
0,
0,
0,
0,
0,
0.5
]
},
"geometricError": 512,
"content": {
"uri": "unitSquare11x11_quantized_interleaved.glb"
}
},
{
"transform": [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
4, 0, 2, 1
],
"boundingVolume": {
"box": [
0.5,
0,
0.5,
0.5,
0,
0,
0,
0,
0,
0,
0,
0.5
]
},
"geometricError": 512,
"content": {
"uri": "unitSquare11x11_unsignedShortTexCoords.glb"
}
},
{
"transform": [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
6, 0, 2, 1
],
"boundingVolume": {
"box": [
0.5,
0,
0.5,
0.5,
0,
0,
0,
0,
0,
0,
0,
0.5
]
},
"geometricError": 512,
"content": {
"uri": "unitSquare11x11_unsignedShortTexCoords_interleaved.glb"
}
},
{
"transform": [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 4, 1
],
"boundingVolume": {
"box": [
0.5,
0,
0.5,
0.5,
0,
0,
0,
0,
0,
0,
0,
0.5
]
},
"geometricError": 512,
"content": {
"uri": "unitSquare11x11_unsignedShortTexCoords_quantized.glb"
}
},
{
"transform": [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
2, 0, 4, 1
],
"boundingVolume": {
"box": [
0.5,
0,
0.5,
0.5,
0,
0,
0,
0,
0,
0,
0,
0.5
]
},
"geometricError": 512,
"content": {
"uri": "unitSquare11x11_unsignedShortTexCoords_quantized_interleaved.glb"
}
},
{
"transform": [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
4, 0, 4, 1
],
"boundingVolume": {
"box": [
0.5,
0,
0.5,
0.5,
0,
0,
0,
0,
0,
0,
0,
0.5
]
},
"geometricError": 512,
"content": {
"uri": "unitSquare11x2_triangleStrip.glb"
}
},
{
"transform": [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
6, 0, 4, 1
],
"boundingVolume": {
"box": [
0.5,
0,
0.5,
0.5,
0,
0,
0,
0,
0,
0,
0,
0.5
]
},
"geometricError": 512,
"content": {
"uri": "unitSquare5_triangleFan.glb"
}
}
]
}
}

View File

@ -0,0 +1,22 @@
{
"asset" : {
"version" : "1.1"
},
"geometricError" : 65536.0,
"root" : {
"refine": "REPLACE",
"transform": [
1, 0, 0, 0,
0, 0,-1, 0,
0, 1, 0, 0,
0, 0, 0, 1
],
"boundingVolume" : {
"box" : [ 0.5, 0.0, 0.5, 0.5, 0.0, 0.0, 0.0, 0.001, 0.0, 0.0, 0.0, 0.5 ]
},
"geometricError" : 1.0,
"content": {
"uri": "unitSquare_fourPrimitives_plain.glb"
}
}
}

View File

@ -0,0 +1,252 @@
import { test, expect } from "./test.js";
test("renders procedural voxel in perspective camera", async ({
cesiumPage,
}) => {
await cesiumPage.goto();
await cesiumPage.page.evaluate(() => {
const viewer = new Cesium.Viewer("cesiumContainer", {
baseLayer: Cesium.ImageryLayer.fromProviderAsync(
Cesium.TileMapServiceImageryProvider.fromUrl(
Cesium.buildModuleUrl("Assets/Textures/NaturalEarthII"),
),
),
baseLayerPicker: false,
geocoder: false,
animation: false,
timeline: false,
sceneModePicker: false,
homeButton: false,
navigationHelpButton: false,
});
const { scene, camera } = viewer;
camera.setView({
destination: new Cesium.Cartesian3(
20463166.456674013,
24169216.80790143,
15536221.507601531,
),
orientation: new Cesium.HeadingPitchRoll(
6.283185307179586,
-1.5680902263173198,
0,
),
});
const globalTransform = Cesium.Matrix4.fromScale(
Cesium.Cartesian3.fromElements(
Cesium.Ellipsoid.WGS84.maximumRadius,
Cesium.Ellipsoid.WGS84.maximumRadius,
Cesium.Ellipsoid.WGS84.maximumRadius,
),
);
function ProceduralSingleTileVoxelProvider(shape) {
this.shape = shape;
this.minBounds = Cesium.VoxelShapeType.getMinBounds(shape).clone();
this.maxBounds = Cesium.VoxelShapeType.getMaxBounds(shape).clone();
this.dimensions = new Cesium.Cartesian3(8, 8, 8);
this.names = ["color"];
this.types = [Cesium.MetadataType.VEC4];
this.componentTypes = [Cesium.MetadataComponentType.FLOAT32];
this.globalTransform = globalTransform;
}
const scratchColor = new Cesium.Color();
ProceduralSingleTileVoxelProvider.prototype.requestData = function (
options,
) {
if (options.tileLevel >= 1) {
return Promise.reject(`No tiles available beyond level 0`);
}
const dimensions = this.dimensions;
const voxelCount = dimensions.x * dimensions.y * dimensions.z;
const type = this.types[0];
const channelCount = Cesium.MetadataType.getComponentCount(type);
const dataColor = new Float32Array(voxelCount * channelCount);
const randomSeed = dimensions.y * dimensions.x + dimensions.x;
Cesium.Math.setRandomNumberSeed(randomSeed);
const hue = Cesium.Math.nextRandomNumber();
for (let z = 0; z < dimensions.z; z++) {
for (let y = 0; y < dimensions.y; y++) {
const indexZY = z * dimensions.y * dimensions.x + y * dimensions.x;
for (let x = 0; x < dimensions.x; x++) {
const lerperX = x / (dimensions.x - 1);
const lerperY = y / (dimensions.y - 1);
const lerperZ = z / (dimensions.z - 1);
const h = hue + lerperX * 0.5 - lerperY * 0.3 + lerperZ * 0.2;
const s = 1.0 - lerperY * 0.2;
const v = 0.5 + 2.0 * (lerperZ - 0.5) * 0.2;
const color = Cesium.Color.fromHsl(h, s, v, 1.0, scratchColor);
const index = (indexZY + x) * channelCount;
dataColor[index + 0] = color.red;
dataColor[index + 1] = color.green;
dataColor[index + 2] = color.blue;
dataColor[index + 3] = 0.75;
}
}
}
const content = Cesium.VoxelContent.fromMetadataArray([dataColor]);
return Promise.resolve(content);
};
const provider = new ProceduralSingleTileVoxelProvider(
Cesium.VoxelShapeType.BOX,
);
const customShader = new Cesium.CustomShader({
fragmentShaderText: `void fragmentMain(FragmentInput fsInput, inout czm_modelMaterial material)
{
material.diffuse = fsInput.metadata.color.rgb;
float transparency = 1.0 - fsInput.metadata.color.a;
// To mimic light scattering, use exponential decay
float thickness = fsInput.voxel.travelDistance * 16.0;
material.alpha = 1.0 - pow(transparency, thickness);
}`,
});
scene.primitives.add(new Cesium.VoxelPrimitive({ provider, customShader }));
});
await cesiumPage.page.clock.pauseAt(new Date("2023-12-25T14:00:00"));
await cesiumPage.page.waitForLoadState("networkidle");
await cesiumPage.page.clock.runFor(1000);
await expect(cesiumPage.page).toHaveScreenshot();
});
test("renders procedural voxel in orthographic camera", async ({
cesiumPage,
}) => {
await cesiumPage.goto();
await cesiumPage.page.evaluate(() => {
const viewer = new Cesium.Viewer("cesiumContainer", {
baseLayer: Cesium.ImageryLayer.fromProviderAsync(
Cesium.TileMapServiceImageryProvider.fromUrl(
Cesium.buildModuleUrl("Assets/Textures/NaturalEarthII"),
),
),
baseLayerPicker: false,
geocoder: false,
animation: false,
timeline: false,
sceneModePicker: false,
homeButton: false,
navigationHelpButton: false,
});
const { scene, camera } = viewer;
camera.setView({
destination: new Cesium.Cartesian3(
20463166.456674013,
24169216.80790143,
15536221.507601531,
),
orientation: new Cesium.HeadingPitchRoll(
6.283185307179586,
-1.5680902263173198,
0,
),
});
camera.switchToOrthographicFrustum();
const globalTransform = Cesium.Matrix4.fromScale(
Cesium.Cartesian3.fromElements(
Cesium.Ellipsoid.WGS84.maximumRadius,
Cesium.Ellipsoid.WGS84.maximumRadius,
Cesium.Ellipsoid.WGS84.maximumRadius,
),
);
function ProceduralSingleTileVoxelProvider(shape) {
this.shape = shape;
this.minBounds = Cesium.VoxelShapeType.getMinBounds(shape).clone();
this.maxBounds = Cesium.VoxelShapeType.getMaxBounds(shape).clone();
this.dimensions = new Cesium.Cartesian3(8, 8, 8);
this.names = ["color"];
this.types = [Cesium.MetadataType.VEC4];
this.componentTypes = [Cesium.MetadataComponentType.FLOAT32];
this.globalTransform = globalTransform;
}
const scratchColor = new Cesium.Color();
ProceduralSingleTileVoxelProvider.prototype.requestData = function (
options,
) {
if (options.tileLevel >= 1) {
return Promise.reject(`No tiles available beyond level 0`);
}
const dimensions = this.dimensions;
const voxelCount = dimensions.x * dimensions.y * dimensions.z;
const type = this.types[0];
const channelCount = Cesium.MetadataType.getComponentCount(type);
const dataColor = new Float32Array(voxelCount * channelCount);
const randomSeed = dimensions.y * dimensions.x + dimensions.x;
Cesium.Math.setRandomNumberSeed(randomSeed);
const hue = Cesium.Math.nextRandomNumber();
for (let z = 0; z < dimensions.z; z++) {
for (let y = 0; y < dimensions.y; y++) {
const indexZY = z * dimensions.y * dimensions.x + y * dimensions.x;
for (let x = 0; x < dimensions.x; x++) {
const lerperX = x / (dimensions.x - 1);
const lerperY = y / (dimensions.y - 1);
const lerperZ = z / (dimensions.z - 1);
const h = hue + lerperX * 0.5 - lerperY * 0.3 + lerperZ * 0.2;
const s = 1.0 - lerperY * 0.2;
const v = 0.5 + 2.0 * (lerperZ - 0.5) * 0.2;
const color = Cesium.Color.fromHsl(h, s, v, 1.0, scratchColor);
const index = (indexZY + x) * channelCount;
dataColor[index + 0] = color.red;
dataColor[index + 1] = color.green;
dataColor[index + 2] = color.blue;
dataColor[index + 3] = 0.75;
}
}
}
const content = Cesium.VoxelContent.fromMetadataArray([dataColor]);
return Promise.resolve(content);
};
const provider = new ProceduralSingleTileVoxelProvider(
Cesium.VoxelShapeType.BOX,
);
const customShader = new Cesium.CustomShader({
fragmentShaderText: `void fragmentMain(FragmentInput fsInput, inout czm_modelMaterial material)
{
material.diffuse = fsInput.metadata.color.rgb;
float transparency = 1.0 - fsInput.metadata.color.a;
// To mimic light scattering, use exponential decay
float thickness = fsInput.voxel.travelDistance * 16.0;
material.alpha = 1.0 - pow(transparency, thickness);
}`,
});
scene.primitives.add(new Cesium.VoxelPrimitive({ provider, customShader }));
});
await cesiumPage.page.clock.pauseAt(new Date("2023-12-25T14:00:00"));
await cesiumPage.page.waitForLoadState("networkidle");
await cesiumPage.page.clock.runFor(1000);
await expect(cesiumPage.page).toHaveScreenshot();
});

View File

@ -12,7 +12,7 @@
"license": [
"BSD-3-Clause"
],
"version": "2.7.60",
"version": "2.7.62",
"url": "https://www.npmjs.com/package/@zip.js/zip.js"
},
{
@ -44,7 +44,7 @@
"license": [
"Apache-2.0"
],
"version": "3.2.5",
"version": "3.2.6",
"url": "https://www.npmjs.com/package/dompurify",
"notes": "dompurify is available as both MPL-2.0 OR Apache-2.0"
},
@ -158,7 +158,7 @@
"license": [
"BSD-3-Clause"
],
"version": "7.5.0",
"version": "7.5.3",
"url": "https://www.npmjs.com/package/protobufjs"
},
{
@ -166,7 +166,7 @@
"license": [
"MIT"
],
"version": "3.0.1",
"version": "4.0.1",
"url": "https://www.npmjs.com/package/rbush"
},
{

View File

@ -1,6 +1,6 @@
{
"name": "cesium",
"version": "1.129.0",
"version": "1.130.0",
"description": "CesiumJS is a JavaScript library for creating 3D globes and 2D maps in a web browser without a plugin.",
"homepage": "http://cesium.com/cesiumjs/",
"license": "Apache-2.0",
@ -51,8 +51,8 @@
"./Specs/**/*"
],
"dependencies": {
"@cesium/engine": "^17.0.0",
"@cesium/widgets": "^12.0.0"
"@cesium/engine": "^18.0.0",
"@cesium/widgets": "^12.1.0"
},
"devDependencies": {
"@cesium/eslint-config": "^12.0.0",
@ -92,7 +92,7 @@
"karma-safari-launcher": "^1.0.0",
"karma-sourcemap-loader": "^0.4.0",
"karma-spec-reporter": "^0.0.36",
"markdownlint-cli": "^0.44.0",
"markdownlint-cli": "^0.45.0",
"merge-stream": "^2.0.0",
"mkdirp": "^3.0.1",
"node-fetch": "^3.2.10",
@ -100,11 +100,11 @@
"prettier": "3.5.3",
"prismjs": "^1.28.0",
"request": "^2.79.0",
"rimraf": "^5.0.0",
"rimraf": "^6.0.1",
"tsd-jsdoc": "^2.5.0",
"typescript": "^5.3.2",
"typescript-eslint": "^8.30.1",
"yargs": "^17.0.1"
"yargs": "^18.0.0"
},
"scripts": {
"prepare": "gulp prepare && husky && node scripts/isCI.js || playwright install --with-deps",
@ -147,7 +147,7 @@
"cspell": "npx cspell lint -c .vscode/cspell.json --unique --no-progress"
},
"engines": {
"node": ">=18.18.0"
"node": ">=20.19.0"
},
"lint-staged": {
"*.{js,cjs,mjs,ts,tsx,css,html}": [

View File

@ -4,7 +4,7 @@ import Resource from "./Resource.js";
let defaultTokenCredit;
const defaultAccessToken =
"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJqdGkiOiJjNjVkZDYxOC01NmEwLTQ1ZmEtOGE2ZS1lYWUyODM4ZWQxYzQiLCJpZCI6MjU5LCJpYXQiOjE3NDYxMTA0Njl9.uyhPpCQKB1dodfbqTx0ZUPOLhnrSXd-qWixDxc4GYXk";
"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJqdGkiOiI4ZTlhZTM1MC1lNjM0LTRkNTQtYTE3OC02NWI0YjQ3NTAxNzgiLCJpZCI6MjU5LCJpYXQiOjE3NDg4ODUxMzV9.8mGFxgmp1QW0MIdArET4EVn5c7DKlt_HHA_Gnnu7eF4";
/**
* Default settings for accessing the Cesium ion API.
*

View File

@ -392,6 +392,29 @@ PixelFormat.textureSizeInBytes = function (
);
};
/**
* @private
*/
PixelFormat.texture3DSizeInBytes = function (
pixelFormat,
pixelDatatype,
width,
height,
depth,
) {
let componentsLength = PixelFormat.componentsLength(pixelFormat);
if (PixelDatatype.isPacked(pixelDatatype)) {
componentsLength = 1;
}
return (
componentsLength *
PixelDatatype.sizeInBytes(pixelDatatype) *
width *
height *
depth
);
};
/**
* @private
*/
@ -499,6 +522,19 @@ PixelFormat.toInternalFormat = function (pixelFormat, pixelDatatype, context) {
}
}
if (pixelDatatype === PixelDatatype.UNSIGNED_BYTE) {
switch (pixelFormat) {
case PixelFormat.RGBA:
return WebGLConstants.RGBA8;
case PixelFormat.RGB:
return WebGLConstants.RGB8;
case PixelFormat.RG:
return WebGLConstants.RG8;
case PixelFormat.RED:
return WebGLConstants.R8;
}
}
return pixelFormat;
};

View File

@ -0,0 +1,568 @@
import Cartesian3 from "../Core/Cartesian3.js";
import Check from "../Core/Check.js";
import createGuid from "../Core/createGuid.js";
import Frozen from "../Core/Frozen.js";
import defined from "../Core/defined.js";
import destroyObject from "../Core/destroyObject.js";
import DeveloperError from "../Core/DeveloperError.js";
import PixelFormat from "../Core/PixelFormat.js";
import ContextLimits from "./ContextLimits.js";
import MipmapHint from "./MipmapHint.js";
import PixelDatatype from "./PixelDatatype.js";
import Sampler from "./Sampler.js";
import TextureMagnificationFilter from "./TextureMagnificationFilter.js";
import TextureMinificationFilter from "./TextureMinificationFilter.js";
/**
* @typedef {object} Texture3D.ConstructorOptions
*
* @property {Context} context
* @property {object} [source] The source for texel values to be loaded into the texture3D.
* @property {PixelFormat} [pixelFormat=PixelFormat.RGBA] The format of each pixel, i.e., the number of components it has and what they represent.
* @property {PixelDatatype} [pixelDatatype=PixelDatatype.UNSIGNED_BYTE] The data type of each pixel.
* @property {boolean} [flipY=true] If true, the source values will be read as if the y-axis is inverted (y=0 at the top).
* @property {boolean} [skipColorSpaceConversion=false] If true, color space conversions will be skipped when reading the texel values.
* @property {Sampler} [sampler] Information about how to sample the texture3D.
* @property {number} [width] The pixel width of the texture3D. If not supplied, must be available from the source.
* @property {number} [height] The pixel height of the texture3D. If not supplied, must be available from the source.
* @property {number} [depth] The pixel depth of the texture3D. If not supplied, must be available from the source.
* @property {boolean} [preMultiplyAlpha] If true, the alpha channel will be multiplied into the other channels.
* @property {string} [id] A unique identifier for the texture3D. If this is not given, then a GUID will be created.
*
* @private
*/
/**
* A wrapper for a {@link https://developer.mozilla.org/en-US/docs/Web/API/WebGLTexture|WebGLTexture}
* to abstract away the verbose GL calls associated with setting up a texture3D.
*
* @alias Texture3D
* @constructor
*
* @param {Texture3D.ConstructorOptions} options
* @private
*/
function Texture3D(options) {
options = options ?? Frozen.EMPTY_OBJECT;
//>>includeStart('debug', pragmas.debug);
Check.defined("options.context", options.context);
//>>includeEnd('debug');
const {
context,
source,
pixelFormat = PixelFormat.RGBA,
pixelDatatype = PixelDatatype.UNSIGNED_BYTE,
flipY = true,
skipColorSpaceConversion = false,
sampler = new Sampler(),
} = options;
if (!context.webgl2) {
throw new DeveloperError(
"WebGL1 does not support texture3D. Please use a WebGL2 context.",
);
}
let { width, height, depth } = options;
if (defined(source)) {
// Make sure we are using the element's intrinsic width and height where available
if (!defined(width)) {
width = source.width;
}
if (!defined(height)) {
height = source.height;
}
// depth is not used for 2D textures, but is required for 3D textures
if (!defined(depth)) {
depth = source.depth;
}
}
// Use premultiplied alpha for opaque textures should perform better on Chrome:
// http://media.tojicode.com/webglCamp4/#20
const preMultiplyAlpha =
options.preMultiplyAlpha ||
pixelFormat === PixelFormat.RGB ||
pixelFormat === PixelFormat.LUMINANCE;
const internalFormat = PixelFormat.toInternalFormat(
pixelFormat,
pixelDatatype,
context,
);
const isCompressed = PixelFormat.isCompressedFormat(internalFormat);
//>>includeStart('debug', pragmas.debug);
if (!defined(width) || !defined(height) || !defined(depth)) {
throw new DeveloperError(
"options requires a source field to create an initialized texture3D or width, height and depth fields to create a blank texture3D.",
);
}
Check.typeOf.number.greaterThan("width", width, 0);
if (width > ContextLimits.maximumTextureSize) {
throw new DeveloperError(
`Width must be less than or equal to the maximum texture3D size (${ContextLimits.maximumTextureSize}). Check maximumTextureSize.`,
);
}
Check.typeOf.number.greaterThan("height", height, 0);
if (height > ContextLimits.maximumTextureSize) {
throw new DeveloperError(
`Height must be less than or equal to the maximum texture3D size (${ContextLimits.maximumTextureSize}). Check maximumTextureSize.`,
);
}
Check.typeOf.number.greaterThan("depth", depth, 0);
if (depth > ContextLimits.maximumTextureSize) {
throw new DeveloperError(
`Depth must be less than or equal to the maximum texture3D size (${ContextLimits.maximumTextureSize}). Check maximumTextureSize.`,
);
}
if (!PixelFormat.validate(pixelFormat)) {
throw new DeveloperError("Invalid options.pixelFormat.");
}
if (!isCompressed && !PixelDatatype.validate(pixelDatatype)) {
throw new DeveloperError("Invalid options.pixelDatatype.");
}
if (
pixelFormat === PixelFormat.DEPTH_COMPONENT &&
pixelDatatype !== PixelDatatype.UNSIGNED_SHORT &&
pixelDatatype !== PixelDatatype.UNSIGNED_INT
) {
throw new DeveloperError(
"When options.pixelFormat is DEPTH_COMPONENT, options.pixelDatatype must be UNSIGNED_SHORT or UNSIGNED_INT.",
);
}
if (
pixelFormat === PixelFormat.DEPTH_STENCIL &&
pixelDatatype !== PixelDatatype.UNSIGNED_INT_24_8
) {
throw new DeveloperError(
"When options.pixelFormat is DEPTH_STENCIL, options.pixelDatatype must be UNSIGNED_INT_24_8.",
);
}
if (pixelDatatype === PixelDatatype.FLOAT && !context.floatingPointTexture) {
throw new DeveloperError(
"When options.pixelDatatype is FLOAT, this WebGL implementation must support the OES_texture_float extension. Check context.floatingPointTexture.",
);
}
if (
pixelDatatype === PixelDatatype.HALF_FLOAT &&
!context.halfFloatingPointTexture
) {
throw new DeveloperError(
"When options.pixelDatatype is HALF_FLOAT, this WebGL implementation must support the OES_texture_half_float extension. Check context.halfFloatingPointTexture.",
);
}
if (PixelFormat.isDepthFormat(pixelFormat)) {
if (defined(source)) {
throw new DeveloperError(
"When options.pixelFormat is DEPTH_COMPONENT or DEPTH_STENCIL, source cannot be provided.",
);
}
if (!context.depthTexture) {
throw new DeveloperError(
"When options.pixelFormat is DEPTH_COMPONENT or DEPTH_STENCIL, this WebGL implementation must support WEBGL_depth_texture. Check context.depthTexture.",
);
}
}
if (isCompressed) {
throw new DeveloperError(
"Texture3D does not currently support compressed formats.",
);
}
//>>includeEnd('debug');
const gl = context._gl;
const sizeInBytes = PixelFormat.texture3DSizeInBytes(
pixelFormat,
pixelDatatype,
width,
height,
depth,
);
this._id = options.id ?? createGuid();
this._context = context;
this._textureFilterAnisotropic = context._textureFilterAnisotropic;
this._textureTarget = gl.TEXTURE_3D;
this._texture = gl.createTexture();
this._internalFormat = internalFormat;
this._pixelFormat = pixelFormat;
this._pixelDatatype = pixelDatatype;
this._width = width;
this._height = height;
this._depth = depth;
this._dimensions = new Cartesian3(width, height, depth);
this._hasMipmap = false;
this._sizeInBytes = sizeInBytes;
this._preMultiplyAlpha = preMultiplyAlpha;
this._flipY = flipY;
this._initialized = false;
this._sampler = undefined;
this._sampler = sampler;
setupSampler(this, sampler);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(this._textureTarget, this._texture);
if (defined(source)) {
if (skipColorSpaceConversion) {
gl.pixelStorei(gl.UNPACK_COLORSPACE_CONVERSION_WEBGL, gl.NONE);
} else {
gl.pixelStorei(
gl.UNPACK_COLORSPACE_CONVERSION_WEBGL,
gl.BROWSER_DEFAULT_WEBGL,
);
}
if (!defined(source.arrayBufferView)) {
throw new DeveloperError(
"For Texture3D, options.source.arrayBufferView must be defined",
);
}
loadBufferSource(this, source);
this._initialized = true;
} else {
loadNull(this);
}
gl.bindTexture(this._textureTarget, null);
}
/**
* Load texel data from a buffer into a texture3D.
*
* @param {Texture3D} texture3D The texture3D to which texel values will be loaded.
* @param {object} source The source for texel values to be loaded into the texture3D.
*
* @private
*/
function loadBufferSource(texture3D, source) {
const context = texture3D._context;
const gl = context._gl;
const textureTarget = texture3D._textureTarget;
const internalFormat = texture3D._internalFormat;
const { width, height, depth, pixelFormat, pixelDatatype, flipY } = texture3D;
const unpackAlignment = PixelFormat.alignmentInBytes(
pixelFormat,
pixelDatatype,
width,
);
gl.pixelStorei(gl.UNPACK_ALIGNMENT, unpackAlignment);
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, false);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
const { arrayBufferView } = source;
if (flipY) {
console.warn("texture3D.flipY is not supported.");
}
let levels = 1;
if (source.mipLevels && source.mipLevels.length) {
levels = source.mipLevels.length + 1;
}
gl.texStorage3D(textureTarget, levels, internalFormat, width, height, depth);
gl.texSubImage3D(
textureTarget,
0,
0,
0,
0,
width,
height,
depth,
pixelFormat,
PixelDatatype.toWebGLConstant(pixelDatatype, context),
arrayBufferView,
);
if (levels > 1) {
let mipWidth = width;
let mipHeight = height;
let mipDepth = depth;
for (let i = 0; i < source.mipLevels.length; ++i) {
mipWidth = nextMipSize(mipWidth);
mipHeight = nextMipSize(mipHeight);
mipDepth = nextMipSize(mipDepth);
gl.texSubImage3D(
textureTarget,
i + 1,
0,
0,
0,
mipWidth,
mipHeight,
mipDepth,
pixelFormat,
PixelDatatype.toWebGLConstant(pixelDatatype, context),
source.mipLevels[i],
);
}
}
}
/**
* Compute a dimension of the image for the next mip level.
*
* @param {number} currentSize The size of the current mip level.
* @returns {number} The size of the next mip level.
*
* @private
*/
function nextMipSize(currentSize) {
const nextSize = Math.floor(currentSize / 2) | 0;
return Math.max(nextSize, 1);
}
/**
* Allocate a texture3D in GPU memory, without providing any image data.
*
* @param {Texture3D} texture3D The texture3D to be initialized with null values.
*
* @private
*/
function loadNull(texture3D) {
const context = texture3D._context;
context._gl.texImage3D(
texture3D._textureTarget,
0,
texture3D._internalFormat,
texture3D._width,
texture3D._height,
texture3D._depth,
0,
texture3D._pixelFormat,
PixelDatatype.toWebGLConstant(texture3D._pixelDatatype, context),
null,
);
}
/**
* This function is identical to using the Texture3D constructor except that it can be
* replaced with a mock/spy in tests.
* @private
*/
Texture3D.create = function (options) {
return new Texture3D(options);
};
Object.defineProperties(Texture3D.prototype, {
/**
* A unique id for the texture3D
* @memberof Texture3D.prototype
* @type {string}
* @readonly
* @private
*/
id: {
get: function () {
return this._id;
},
},
/**
* The sampler to use when sampling this texture3D.
* Create a sampler by calling {@link Sampler}. If this
* parameter is not specified, a default sampler is used. The default sampler clamps texture3D
* coordinates in both directions, uses linear filtering for both magnification and minification,
* and uses a maximum anisotropy of 1.0.
* @memberof Texture3D.prototype
* @type {Sampler}
* @private
*/
sampler: {
get: function () {
return this._sampler;
},
set: function (sampler) {
setupSampler(this, sampler);
this._sampler = sampler;
},
},
pixelFormat: {
get: function () {
return this._pixelFormat;
},
},
pixelDatatype: {
get: function () {
return this._pixelDatatype;
},
},
dimensions: {
get: function () {
return this._dimensions;
},
},
preMultiplyAlpha: {
get: function () {
return this._preMultiplyAlpha;
},
},
flipY: {
get: function () {
return this._flipY;
},
},
width: {
get: function () {
return this._width;
},
},
height: {
get: function () {
return this._height;
},
},
depth: {
get: function () {
return this._depth;
},
},
sizeInBytes: {
get: function () {
if (this._hasMipmap) {
return Math.floor((this._sizeInBytes * 8) / 7);
}
return this._sizeInBytes;
},
},
_target: {
get: function () {
return this._textureTarget;
},
},
});
/**
* Set up a sampler for use with a texture3D
* @param {Texture3D} texture3D The texture3D to be sampled by this sampler
* @param {Sampler} sampler Information about how to sample the texture3D
* @private
*/
function setupSampler(texture3D, sampler) {
let { minificationFilter, magnificationFilter } = sampler;
const mipmap = [
TextureMinificationFilter.NEAREST_MIPMAP_NEAREST,
TextureMinificationFilter.NEAREST_MIPMAP_LINEAR,
TextureMinificationFilter.LINEAR_MIPMAP_NEAREST,
TextureMinificationFilter.LINEAR_MIPMAP_LINEAR,
].includes(minificationFilter);
const context = texture3D._context;
const pixelFormat = texture3D._pixelFormat;
const pixelDatatype = texture3D._pixelDatatype;
// float textures only support nearest filtering unless the linear extensions are supported
if (
(pixelDatatype === PixelDatatype.FLOAT && !context.textureFloatLinear) ||
(pixelDatatype === PixelDatatype.HALF_FLOAT &&
!context.textureHalfFloatLinear)
) {
// override the sampler's settings
minificationFilter = mipmap
? TextureMinificationFilter.NEAREST_MIPMAP_NEAREST
: TextureMinificationFilter.NEAREST;
magnificationFilter = TextureMagnificationFilter.NEAREST;
}
// WebGL 2 depth texture3D only support nearest filtering. See section 3.8.13 OpenGL ES 3 spec
if (PixelFormat.isDepthFormat(pixelFormat)) {
minificationFilter = TextureMinificationFilter.NEAREST;
magnificationFilter = TextureMagnificationFilter.NEAREST;
}
const gl = context._gl;
const target = texture3D._textureTarget;
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(target, texture3D._texture);
gl.texParameteri(target, gl.TEXTURE_MIN_FILTER, minificationFilter);
gl.texParameteri(target, gl.TEXTURE_MAG_FILTER, magnificationFilter);
gl.texParameteri(target, gl.TEXTURE_WRAP_S, sampler.wrapS);
gl.texParameteri(target, gl.TEXTURE_WRAP_T, sampler.wrapT);
if (defined(texture3D._textureFilterAnisotropic)) {
gl.texParameteri(
target,
texture3D._textureFilterAnisotropic.TEXTURE_MAX_ANISOTROPY_EXT,
sampler.maximumAnisotropy,
);
}
gl.bindTexture(target, null);
}
/**
* @param {MipmapHint} [hint=MipmapHint.DONT_CARE] optional.
* @private
* @exception {DeveloperError} Cannot call generateMipmap when the texture3D pixel format is DEPTH_COMPONENT or DEPTH_STENCIL.
* @exception {DeveloperError} Cannot call generateMipmap when the texture3D pixel format is a compressed format.
* @exception {DeveloperError} hint is invalid.
* @exception {DeveloperError} This texture3D's width must be a power of two to call generateMipmap() in a WebGL1 context.
* @exception {DeveloperError} This texture3D's height must be a power of two to call generateMipmap() in a WebGL1 context.
* @exception {DeveloperError} This texture3D was destroyed, i.e., destroy() was called.
*/
Texture3D.prototype.generateMipmap = function (hint) {
hint = hint ?? MipmapHint.DONT_CARE;
//>>includeStart('debug', pragmas.debug);
if (PixelFormat.isDepthFormat(this._pixelFormat)) {
throw new DeveloperError(
"Cannot call generateMipmap when the texture3D pixel format is DEPTH_COMPONENT or DEPTH_STENCIL.",
);
}
if (PixelFormat.isCompressedFormat(this._pixelFormat)) {
throw new DeveloperError(
"Cannot call generateMipmap with a compressed pixel format.",
);
}
if (!MipmapHint.validate(hint)) {
throw new DeveloperError("hint is invalid.");
}
//>>includeEnd('debug');
this._hasMipmap = true;
const gl = this._context._gl;
const target = this._textureTarget;
gl.hint(gl.GENERATE_MIPMAP_HINT, hint);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(target, this._texture);
gl.generateMipmap(target);
gl.bindTexture(target, null);
};
Texture3D.prototype.isDestroyed = function () {
return false;
};
Texture3D.prototype.destroy = function () {
this._context._gl.deleteTexture(this._texture);
return destroyObject(this);
};
export default Texture3D;

View File

@ -24,6 +24,7 @@ function createUniform(gl, activeUniform, uniformName, location) {
case gl.FLOAT_VEC4:
return new UniformFloatVec4(gl, activeUniform, uniformName, location);
case gl.SAMPLER_2D:
case gl.SAMPLER_3D:
case gl.SAMPLER_CUBE:
return new UniformSampler(gl, activeUniform, uniformName, location);
case gl.INT:

View File

@ -39,6 +39,7 @@ function createUniformArray(gl, activeUniform, uniformName, locations) {
locations,
);
case gl.SAMPLER_2D:
case gl.SAMPLER_3D:
case gl.SAMPLER_CUBE:
return new UniformArraySampler(gl, activeUniform, uniformName, locations);
case gl.INT:

View File

@ -4,7 +4,7 @@ import Resource from "../Core/Resource.js";
let defaultTokenCredit;
const defaultAccessToken =
"AAPTxy8BH1VEsoebNVZXo8HurEOF051kAEKlhkOhBEc9BmRwxo7rIWoXrG5RJkAAlrdL0n_Ha7z7WbgHT-DpRd_LxLSeba-Kg8HsK3kJVUF2iiOVOza8OZ-DIdJzjyBZ9IIc5W0JagV3wyuZ9CKE3RHRSADOw-9FbeeUyfJA8iS2mKcK8768q4g11vlHUzVYFhWoKctRWWeG3Amwm6ez2n3vMc1fTSqDdbFP2O6x5eslQDE.AT1_SOxgo20E";
"AAPTxy8BH1VEsoebNVZXo8HurEOF051kAEKlhkOhBEc9BmTP0IZoWRBEdmOPutLUJ2JcHWgott0sjyyyJ6-Nu6O_MLmmkiswXG4qzrOMMu0N8VU2LUbsVXxF7KR041TgbHlgZPBYpt4LZXFH4jpS6NIBFm_JqZPzxLgFhbAYSs4yh_vDo1I7Qxr9dPzWiIPm5KHQ99Hs2uH4JH4356FxozccoIsuX2H4LyTBRUZ36FqFikI.AT1_FcZPNruQ";
/**
* Default options for accessing the ArcGIS image tile service.
*

View File

@ -62,6 +62,7 @@ import Cesium3DTilesetBaseTraversal from "./Cesium3DTilesetBaseTraversal.js";
import Cesium3DTilesetSkipTraversal from "./Cesium3DTilesetSkipTraversal.js";
import Ray from "../Core/Ray.js";
import DynamicEnvironmentMapManager from "./DynamicEnvironmentMapManager.js";
import ImageryLayerCollection from "./ImageryLayerCollection.js";
/**
* @typedef {Object} Cesium3DTileset.ConstructorOptions
@ -119,6 +120,7 @@ import DynamicEnvironmentMapManager from "./DynamicEnvironmentMapManager.js";
* @property {boolean} [enableCollision=false] When <code>true</code>, enables collisions for camera or CPU picking. While this is <code>true</code> the camera will be prevented from going below the tileset surface if {@link ScreenSpaceCameraController#enableCollisionDetection} is true.
* @property {boolean} [projectTo2D=false] Whether to accurately project the tileset to 2D. If this is true, the tileset will be projected accurately to 2D, but it will use more memory to do so. If this is false, the tileset will use less memory and will still render in 2D / CV mode, but its projected positions may be inaccurate. This cannot be set after the tileset has been created.
* @property {boolean} [enablePick=false] Whether to allow collision and CPU picking with <code>pick</code> when using WebGL 1. If using WebGL 2 or above, this option will be ignored. If using WebGL 1 and this is true, the <code>pick</code> operation will work correctly, but it will use more memory to do so. If running with WebGL 1 and this is false, the model will use less memory, but <code>pick</code> will always return <code>undefined</code>. This cannot be set after the tileset has loaded.
* @property {boolean} [asynchronouslyLoadImagery=false] Whether loading imagery that is draped over the tileset should be done asynchronously. If this is <code>true</code>, then tile content will be displayed with its original texture until the imagery texture is loaded. If this is <code>false</code>, then the tile content will not be displayed until the imagery is ready.
* @property {string} [debugHeatmapTilePropertyName] The tile variable to colorize as a heatmap. All rendered tiles will be colorized relative to each other's specified variable value.
* @property {boolean} [debugFreezeFrame=false] For debugging only. Determines if only the tiles from last frame should be used for rendering.
* @property {boolean} [debugColorizeTiles=false] For debugging only. When true, assigns a random color to each tile.
@ -347,6 +349,60 @@ function Cesium3DTileset(options) {
this._vectorKeepDecodedPositions =
options.vectorKeepDecodedPositions ?? false;
/**
* The collection of <code>ImageryLayer</code> objects providing 2D georeferenced
* image data that will be rendered over the tileset.
*
* @private
* @type {ImageryLayerCollection}
* @readonly
*/
this._imageryLayers = new ImageryLayerCollection(this);
/**
* A counter that will be increased for each modification of the
* imagery layers (i.e. for each layerAdded, layerRemoved,
* layerMoved, or layerShownOrHidden event). This can be used
* by the <code>ModelImagery</code> class to detect changes in
* the imagery, and trigger the appropriate updates.
*
* @private
*/
this._imageryLayersModificationCounter = 0;
/**
* A listener that will be attached to the layerAdded, layerRemoved,
* layerMoved, and layerShownOrHidden events of the imagery layers,
* and increment the imagery layers modification counter for each
* event.
*
* @private
* @readonly
*/
this._imageryLayersListener = () => {
this._imageryLayersModificationCounter++;
};
// Attach the imagery layers listener to all events of
// the imagery layers collection
this.imageryLayers.layerAdded.addEventListener(this._imageryLayersListener);
this.imageryLayers.layerRemoved.addEventListener(this._imageryLayersListener);
this.imageryLayers.layerMoved.addEventListener(this._imageryLayersListener);
this.imageryLayers.layerShownOrHidden.addEventListener(
this._imageryLayersListener,
);
/**
* Whether loading imagery that is draped over the tileset should be
* done asynchronously. If this is <code>true</code>, then tile content
* will be displayed with its original texture until the imagery texture
* is loaded. If this is <code>false</code>, then the tile content will
* not be displayed until the imagery is ready.
*
* @private
*/
this._asynchronouslyLoadImagery = options.asynchronouslyLoadImagery ?? false;
/**
* Preload tiles when <code>tileset.show</code> is <code>false</code>. Loads tiles as if the tileset is visible but does not render them.
*
@ -1097,6 +1153,69 @@ Object.defineProperties(Cesium3DTileset.prototype, {
},
},
/**
* The collection of <code>ImageryLayer</code> objects providing 2D georeferenced
* image data that will be rendered over the tileset.
*
* The imagery will be draped over glTF, B3DM, PNTS, or GeoJSON tile content.
*
* @see ImageryLayer
*
* @memberof Cesium3DTileset.prototype
* @readonly
* @type {ImageryLayerCollection}
*
* @experimental This feature is not final and is subject to change without Cesium's standard deprecation policy.
*
* @example
* // Drape Bing Maps Aerial imagery over the tileset
* const imageryProvider = await Cesium.createWorldImageryAsync({
* style: Cesium.IonWorldImageryStyle.AERIAL,
* });
* const imageryLayer = new ImageryLayer(imageryProvider);
* tileset.imageryLayers.add(imageryLayer);
*/
imageryLayers: {
get: function () {
return this._imageryLayers;
},
},
/**
* The modification counter of the imagery layers.
*
* This is incremented for each modification (layerAdded, layerMoved,
* layerRemoved, layerShownOrHidden) of the imagery layers, and can
* be used <b>internally</b> (by <code>ModelPrimitiveImagery</code>)
* to trigger updates whenever the collection of imagery layers
* changes.
*
* @memberof Cesium3DTileset.prototype
* @readonly
* @type {number}
* @private
*/
imageryLayersModificationCounter: {
get: function () {
return this._imageryLayersModificationCounter;
},
},
/**
* Whether loading imagery that is draped over the tileset should be
* done asynchronously.
*
* @memberof Cesium3DTileset.prototype
* @readonly
* @type {boolean}
* @private
*/
asynchronouslyLoadImagery: {
get: function () {
return this._asynchronouslyLoadImagery;
},
},
/**
* Gets the tileset's properties dictionary object, which contains metadata about per-feature properties.
* <p>
@ -3338,6 +3457,8 @@ Cesium3DTileset.prototype.updateForPass = function (
Check.typeOf.object("tilesetPassState", tilesetPassState);
//>>includeEnd('debug');
this.imageryLayers._update();
const pass = tilesetPassState.pass;
if (
(pass === Cesium3DTilePass.PRELOAD &&
@ -3484,6 +3605,23 @@ Cesium3DTileset.prototype.destroy = function () {
}
this._environmentMapManager = undefined;
if (!this._imageryLayers.isDestroyed()) {
this.imageryLayers.layerAdded.removeEventListener(
this._imageryLayersListener,
);
this.imageryLayers.layerRemoved.removeEventListener(
this._imageryLayersListener,
);
this.imageryLayers.layerMoved.removeEventListener(
this._imageryLayersListener,
);
this.imageryLayers.layerShownOrHidden.removeEventListener(
this._imageryLayersListener,
);
this._imageryLayers.destroy();
}
this._imageryLayers = undefined;
return destroyObject(this);
};

View File

@ -409,17 +409,16 @@ GlobeSurfaceShaderSet.prototype.getShaderProgram = function (options) {
u_dayTextureTexCoordsRectangle[${i}],\n\
u_dayTextureTranslationAndScale[${i}],\n\
${applyAlpha ? `u_dayTextureAlpha[${i}]` : "1.0"},\n\
${applyDayNightAlpha ? `u_dayTextureNightAlpha[${i}]` : "1.0"},\n${
applyDayNightAlpha ? `u_dayTextureDayAlpha[${i}]` : "1.0"
},\n${applyBrightness ? `u_dayTextureBrightness[${i}]` : "0.0"},\n\
${applyDayNightAlpha ? `u_dayTextureNightAlpha[${i}]` : "1.0"},\n\
${applyDayNightAlpha ? `u_dayTextureDayAlpha[${i}]` : "1.0"},\n\
${applyBrightness ? `u_dayTextureBrightness[${i}]` : "0.0"},\n\
${applyContrast ? `u_dayTextureContrast[${i}]` : "0.0"},\n\
${applyHue ? `u_dayTextureHue[${i}]` : "0.0"},\n\
${applySaturation ? `u_dayTextureSaturation[${i}]` : "0.0"},\n\
${applyGamma ? `u_dayTextureOneOverGamma[${i}]` : "0.0"},\n\
${applySplit ? `u_dayTextureSplit[${i}]` : "0.0"},\n\
${colorToAlpha ? `u_colorsToAlpha[${i}]` : "vec4(0.0)"},\n\
nightBlend\
);\n`;
nightBlend\);\n`;
if (hasImageryLayerCutout) {
computeDayColor +=
"\

View File

@ -127,7 +127,7 @@ import TileImagery from "./TileImagery.js";
/**
* An imagery layer that displays tiled image data from a single imagery provider
* on a {@link Globe}.
* on a {@link Globe} or {@link Cesium3DTileset}.
*
* @alias ImageryLayer
* @constructor
@ -135,8 +135,10 @@ import TileImagery from "./TileImagery.js";
* @param {ImageryProvider} [imageryProvider] The imagery provider to use.
* @param {ImageryLayer.ConstructorOptions} [options] An object describing initialization options
*
* @see ImageryLayer.fromProviderAsync
* @see ImageryLayer.fromWorldImagery
* @see {@link ImageryLayer.fromProviderAsync} for creating an imagery layer from an asynchronous imagery provider.
* @see {@link ImageryLayer.fromWorldImagery} for creating an imagery layer for Cesium ion's default global base imagery layer.
* @see {@link Scene#imageryLayers} for adding an imagery layer to the globe.
* @see {@link Cesium3DTileset#imageryLayers} for adding an imagery layer to a 3D tileset.
*
* @example
* // Add an OpenStreetMaps layer
@ -155,6 +157,19 @@ import TileImagery from "./TileImagery.js";
* const imageryLayer = Cesium.ImageryLayer.fromProviderAsync(Cesium.IonImageryProvider.fromAssetId(3812));
* imageryLayer.alpha = 0.5;
* scene.imageryLayers.add(imageryLayer);
*
* @example
* // Drape Bing Maps Aerial imagery over a 3D tileset
* const tileset = await Cesium.Cesium3DTileset.fromUrl(
* "http://localhost:8002/tilesets/Seattle/tileset.json"
* );
* scene.primitives.add(tileset);
*
* const imageryProvider = await Cesium.createWorldImageryAsync({
* style: Cesium.IonWorldImageryStyle.AERIAL,
* });
* const imageryLayer = new ImageryLayer(imageryProvider);
* tileset.imageryLayers.add(imageryLayer);
*/
function ImageryLayer(imageryProvider, options) {
this._imageryProvider = imageryProvider;
@ -663,7 +678,7 @@ ImageryLayer.prototype.getImageryRectangle = function () {
*
* @private
*
* @param {Tile} tile The terrain tile.
* @param {QuadtreeTile} tile The terrain tile.
* @param {TerrainProvider|undefined} terrainProvider The terrain provider associated with the terrain tile.
* @param {number} insertionPoint The position to insert new skeletons before in the tile's imagery list.
* @returns {boolean} true if this layer overlaps any portion of the terrain tile; otherwise, false.

View File

@ -7,11 +7,12 @@ import Rectangle from "../Core/Rectangle.js";
import ImageryLayer from "./ImageryLayer.js";
/**
* An ordered collection of imagery layers.
* An ordered collection of imagery layers for rendering raster imagery on a {@link Globe} or {@link Cesium3DTileset}.
*
* @alias ImageryLayerCollection
* @constructor
*
* @see {@link Scene#imageryLayers} for manipulating imagery layers on the globe.
* @see {@link Cesium3DTileset#imageryLayers} for manipulating imagery layers on a 3D tileset.
* @demo {@link https://sandcastle.cesium.com/index.html?src=Imagery%20Adjustment.html|Cesium Sandcastle Imagery Adjustment Demo}
* @demo {@link https://sandcastle.cesium.com/index.html?src=Imagery%20Layers%20Manipulation.html|Cesium Sandcastle Imagery Manipulation Demo}
*/

View File

@ -0,0 +1,108 @@
/**
* Internal class for texture coordinate and index range computations.
*
* @private
*/
class CartesianRectangle {
/**
* Creates a new instance
*
* @param {number} [minX=0] The minimum x-coordinate
* @param {number} [minY=0] The minimum y-coordinate
* @param {number} [maxX=0] The maximum x-coordinate
* @param {number} [maxY=0] The maximum y-coordinate
*/
constructor(minX, minY, maxX, maxY) {
this._minX = minX ?? 0.0;
this._minY = minY ?? 0.0;
this._maxX = maxX ?? 0.0;
this._maxY = maxY ?? 0.0;
}
/**
* Returns the minimum x-coordinate
*
* @returns {number} The coordinate
*/
get minX() {
return this._minX;
}
set minX(value) {
this._minX = value;
}
/**
* Returns the minimum y-coordinate
*
* @returns {number} The coordinate
*/
get minY() {
return this._minY;
}
set minY(value) {
this._minY = value;
}
/**
* Returns the maximum x-coordinate
*
* @returns {number} The coordinate
*/
get maxX() {
return this._maxX;
}
set maxX(value) {
this._maxX = value;
}
/**
* Returns the maximum y-coordinate
*
* @returns {number} The coordinate
*/
get maxY() {
return this._maxY;
}
set maxY(value) {
this._maxY = value;
}
/**
* Returns whether this rectangle contains the given coordinates,
* using the default containment check, which includes the
* minimum point, but excludes the maximum point
*
* @param {number} x The x-coordinate
* @param {number} y The y-coordinate
* @returns {boolean} The result
*/
contains(x, y) {
return x >= this.minX && x < this.maxX && y >= this.minY && y < this.maxY;
}
/**
* Returns whether this rectangle contains the given coordinates,
* excluding the border
*
* @param {number} x The x-coordinate
* @param {number} y The y-coordinate
* @returns {boolean} The result
*/
containsExclusive(x, y) {
return x > this.minX && x < this.maxX && y > this.minY && y < this.maxY;
}
/**
* Returns whether this rectangle contains the given coordinates,
* including the border
*
* @param {number} x The x-coordinate
* @param {number} y The y-coordinate
* @returns {boolean} The result
*/
containsInclusive(x, y) {
return x >= this.minX && x <= this.maxX && y >= this.minY && y <= this.maxY;
}
}
export default CartesianRectangle;

View File

@ -0,0 +1,25 @@
/**
* A class containing a the values that affect the appearance of
* an <code>ImageryLayer</code>.
*
* This is used in the <code>ModelImagery</code> to detect changes in
* the imagery settings: The <code>ModelImagery</code> stores one
* instance per imagery layer. During the <code>update</code>
* call, it checks whether any of the settings was changed.
* If this is the case, the draw commands of the model are reset.
*
* @private
*/
class ImageryConfiguration {
constructor(imageryLayer) {
this.alpha = imageryLayer.alpha;
this.brightness = imageryLayer.brightness;
this.contrast = imageryLayer.contrast;
this.hue = imageryLayer.hue;
this.saturation = imageryLayer.saturation;
this.gamma = imageryLayer.gamma;
this.colorToAlpha = imageryLayer.colorToAlpha;
}
}
export default ImageryConfiguration;

View File

@ -0,0 +1,488 @@
import defined from "../../Core/defined.js";
import Rectangle from "../../Core/Rectangle.js";
import CartesianRectangle from "./CartesianRectangle.js";
const imageryBoundsScratch = new Rectangle();
const overlappedRectangleScratch = new Rectangle();
const clippedRectangleScratch = new Rectangle();
const nativeInputRectangleScratch = new Rectangle();
const nativeImageryBoundsScratch = new Rectangle();
const nativeClippedImageryBoundsScratch = new Rectangle();
/**
* A class containing information about a piece of imagery.
*
* This represents the result of computing the imagery tiles that
* are covered by a given <code>Rectangle</code> (and which part
* of that imagery is covered, in terms of texture coordinates).
*
* This class represents a plain structure, without member functions.
* Instances are created with the <code>createImageryCoverages</code>
* function.
*
* The instances are used by the <code>ModelPrimitiveImagery</code>, to
* represent the imagery tiles that are covered by the cartographic
* bounding rectangle of the primitive positions.
*
* Implementation note for ImageryCoverage:
*
* Some of the static functions in this class have been extracted from
* <code>ImageryLayer.prototype._createTileImagerySkeletons</code>
* See https://github.com/CesiumGS/cesium/blob/5eaa2280f495d8f300d9e1f0497118c97aec54c8/packages/engine/Source/Scene/ImageryLayer.js#L700
* An instance of this class roughly corresponds to the <code>TileImagery</code>
* that is created there.
*
* @private
*/
class ImageryCoverage {
/**
* Creates a new instance.
*
* @param {number} x x-coordinate of the imagery tile
* @param {number} y y-coordinate of the imagery tile
* @param {number} level level of the imagery tile
* @param {CartesianRectangle} textureCoordinateRectangle The texture coordinate
* rectangle from the imagery tile that is covered
* @param {Imagery} imagery The imagery
*/
constructor(x, y, level, textureCoordinateRectangle, imagery) {
this._x = x;
this._y = y;
this._level = level;
this._textureCoordinateRectangle = textureCoordinateRectangle;
this._imagery = imagery;
}
/**
* The x-coordinate of the imagery tile, typically correlated with longitude
*
* @type {number}
* @readonly
*/
get x() {
return this._x;
}
/**
* The y-coordinate of the imagery tile, typically correlated with latitude
*
* @type {number}
* @readonly
*/
get y() {
return this._y;
}
/**
* The level of the imagery tile
*
* @type {number}
* @readonly
*/
get level() {
return this._level;
}
/**
* The texture coordinate range that is covered from the
* imagery tile.
*
* This is a <code>CartesianRectangle</code> that contains the
* (minU, minV, maxU, maxV) coordinate range.
*
* Clients may not modify the returned instance.
*
* @type {CartesianRectangle}
* @readonly
*/
get textureCoordinateRectangle() {
return this._textureCoordinateRectangle;
}
/**
* Returns the imagery
*
* @type {Imagery}
* @readonly
*/
get imagery() {
return this._imagery;
}
/**
* Computes the <code>ImageryCoverage</code> objects that describe the imagery
* tiles and the respective texture coordinates that are covered by the given
* input rectangle in the given imagery data.
*
* The given imagery level will be clamped if necessary, to be in the valid
* range for the imagery provider of the given imagery layer.
*
* @param {Rectangle} inputRectangle The input rectangle (e.g. tile bounds)
* @param {ImageryLayer} imageryLayer The imagery layer
* @param {number} inputImageryLevel The level for which the imagery coverage
* should be computed.
* @returns {ImageryCoverage[]} The objects describing the covered imagery
* and the respective texture coordinates
*/
static createImageryCoverages(
inputRectangle,
imageryLayer,
inputImageryLevel,
) {
if (!imageryLayer.show) {
return [];
}
const imageryProvider = imageryLayer.imageryProvider;
const imageryLevel = ImageryCoverage._clampImageryLevel(
imageryProvider,
inputImageryLevel,
);
// Compute the range, in integer coordinates, of imagery
// tiles that are covered by the input rectangle
const imageryBounds = Rectangle.intersection(
imageryProvider.rectangle,
imageryLayer.rectangle,
imageryBoundsScratch,
);
const imageryTilingScheme = imageryProvider.tilingScheme;
const imageryRange = ImageryCoverage._computeImageryRange(
inputRectangle,
imageryBounds,
imageryTilingScheme,
imageryLevel,
);
// Convert the input rectangle and the imagery bounds into
// the native coordinate system of the tiling scheme
const nativeInputRectangle = nativeInputRectangleScratch;
imageryTilingScheme.rectangleToNativeRectangle(
inputRectangle,
nativeInputRectangle,
);
const nativeImageryBounds = nativeImageryBoundsScratch;
imageryTilingScheme.rectangleToNativeRectangle(
imageryBounds,
nativeImageryBounds,
);
// A function that returns an imagery rectangle, based on (x, y, level),
// clipped to the imagery bounds (or undefined if there is no intersection
// between the imagery rectangle and the bounds)
const computeClippedImageryRectangle = (x, y, level) => {
const localImageryRectangle = imageryTilingScheme.tileXYToRectangle(
x,
y,
level,
);
const localClippedImageryRectangle = Rectangle.intersection(
localImageryRectangle,
imageryBounds,
clippedRectangleScratch,
);
if (!defined(localClippedImageryRectangle)) {
return undefined;
}
const nativeClippedImageryBounds = nativeClippedImageryBoundsScratch;
imageryTilingScheme.rectangleToNativeRectangle(
localClippedImageryRectangle,
nativeClippedImageryBounds,
);
return nativeClippedImageryBounds;
};
const imageryCoverages = ImageryCoverage._computeImageryCoverages(
imageryLayer,
imageryRange,
imageryLevel,
nativeInputRectangle,
computeClippedImageryRectangle,
);
return imageryCoverages;
}
/**
* Validate the given imagery level against the constraints of the
* given imagery provider.
*
* This will clamp the given level to be in the range
* <code>[minimumLevel, maximumLevel)</code> that is
* defined by the given imagery provider (and cut off
* any fractional part that the input may have)
*
* @param {ImageryProvider} imageryProvider The imagery provider
* @param {number} imageryLevel The imagery level
* @returns {number} The validated level
*/
static _clampImageryLevel(imageryProvider, imageryLevel) {
const minimumLevel = imageryProvider.minimumLevel ?? 0;
const maximumLevel =
imageryProvider.maximumLevel ?? Number.POSITIVE_INFINITY;
const clampedImageryLevel = Math.min(
maximumLevel - 1,
Math.max(minimumLevel, imageryLevel),
);
const validImageryLevel = Math.floor(clampedImageryLevel);
return validImageryLevel;
}
/**
* Compute the rectangle describing the range of imagery that is covered
* with the given rectangle.
*
* This will compute a rectangle with integer coordinates that describe
* the X/Y coordinates of the imagery that is overlapped by the given
* input rectangle, based on the given imagery rectangle.
*
* Extracted from _createTileImagerySkeletons.
*
* @param {Rectangle} inputRectangle The input rectangle
* @param {Rectangle} imageryBounds The imagery bounds
* @param {TilingScheme} imageryTilingScheme The tiling scheme
* @param {number} imageryLevel The imagery level
* @returns {CartesianRectangle} The rectangle
*/
static _computeImageryRange(
inputRectangle,
imageryBounds,
imageryTilingScheme,
imageryLevel,
) {
const overlappedRectangle = ImageryCoverage._computeOverlappedRectangle(
inputRectangle,
imageryBounds,
);
const northwestTileCoordinates = imageryTilingScheme.positionToTileXY(
Rectangle.northwest(overlappedRectangle),
imageryLevel,
);
const southeastTileCoordinates = imageryTilingScheme.positionToTileXY(
Rectangle.southeast(overlappedRectangle),
imageryLevel,
);
const result = new CartesianRectangle();
result.minX = northwestTileCoordinates.x;
result.minY = northwestTileCoordinates.y;
result.maxX = southeastTileCoordinates.x;
result.maxY = southeastTileCoordinates.y;
// As extracted from _createTileImagerySkeletons:
// If the southeast corner of the rectangle lies very close to the north or west side
// of the southeast tile, we don't actually need the southernmost or easternmost
// tiles.
// Similarly, if the northwest corner of the rectangle lies very close to the south or east side
// of the northwest tile, we don't actually need the northernmost or westernmost tiles.
// We define "very close" as being within 1/512 of the width of the tile.
const veryCloseX = inputRectangle.width / 512.0;
const veryCloseY = inputRectangle.height / 512.0;
const northwestTileRectangle = imageryTilingScheme.tileXYToRectangle(
result.minX,
result.minY,
imageryLevel,
);
const deltaNorth = Math.abs(
northwestTileRectangle.south - inputRectangle.north,
);
if (deltaNorth < veryCloseY && result.minY < result.maxY) {
++result.minY;
}
const deltaWest = Math.abs(
northwestTileRectangle.east - inputRectangle.west,
);
if (deltaWest < veryCloseX && result.minX < result.maxX) {
++result.minX;
}
const southeastTileRectangle = imageryTilingScheme.tileXYToRectangle(
result.maxX,
result.maxY,
imageryLevel,
);
const deltaSouth = Math.abs(
southeastTileRectangle.north - inputRectangle.south,
);
if (deltaSouth < veryCloseY && result.maxY > result.minY) {
--result.maxY;
}
const deltaEast = Math.abs(
southeastTileRectangle.west - inputRectangle.east,
);
if (deltaEast < veryCloseX && result.maxX > result.minX) {
--result.maxX;
}
return result;
}
/**
* Clamp the given input rectangle to the given clamp rectangle.
*
* If the input rectangle is completely above/below or left/right
* of the clamp rectangle, then the north/south or east/east
* if the clamp rectangle will be used in the result.
*
* @param {Rectangle} input The input rectangle
* @param {Rectangle} clamp The clamping rectangle
* @param {Rectangle} [result] The result
* @returns {Rectangle} The result
*/
static _clampRectangle(input, clamp, result) {
if (!defined(result)) {
result = new Rectangle();
}
if (input.south >= clamp.north) {
result.north = result.south = clamp.north;
} else if (input.north <= clamp.south) {
result.north = result.south = clamp.south;
} else {
result.south = Math.max(input.south, clamp.south);
result.north = Math.min(input.north, clamp.north);
}
if (input.west >= clamp.east) {
result.west = result.east = clamp.east;
} else if (input.east <= clamp.west) {
result.west = result.east = clamp.west;
} else {
result.west = Math.max(input.west, clamp.west);
result.east = Math.min(input.east, clamp.east);
}
return result;
}
/**
* Compute overlap between the given input rectangle, and the given
* bounds that have been obtained from the imagery provider.
*
* @param {Rectangle} inputRectangle The input
* @param {Rectangle} imageryBounds The imagery bounds
* @returns {Rectangle} The rectangle
*/
static _computeOverlappedRectangle(inputRectangle, imageryBounds) {
const overlappedRectangle = Rectangle.intersection(
inputRectangle,
imageryBounds,
overlappedRectangleScratch,
);
if (defined(overlappedRectangle)) {
return overlappedRectangle;
}
return ImageryCoverage._clampRectangle(
inputRectangle,
imageryBounds,
overlappedRectangleScratch,
);
}
/**
* Computes the <code>ImageryCoverage</code> objects that describe the imagery and
* the texture coordinates that are contained in the given range of
* imagery tile coordinates, referring to the given input rectangle.
*
* @param {ImageryLayer} imageryLayer The imagery layer
* @param {CartesianRectangle} imageryRange The range of imagery tile coordinates
* @param {number} imageryLevel The imagery level
* @param {Rectangle} nativeInputRectangle The input rectangle, in coordinates
* that are native for the tiling scheme
* @param {Function} computeClippedImageryRectangle A function that returns
* an imagery rectangle, based on (x, y, level), clipped to the imagery bounds
* (or undefined if there is no intersection between the imagery rectangle
* and the bounds)
* @returns {ImageryCoverage[]} The objects describing the covered imagery
* and the respective texture coordinates
*/
static _computeImageryCoverages(
imageryLayer,
imageryRange,
imageryLevel,
nativeInputRectangle,
computeClippedImageryRectangle,
) {
const imageryCoverages = [];
for (let i = imageryRange.minX; i <= imageryRange.maxX; i++) {
const clippedImageryRectangleU = computeClippedImageryRectangle(
i,
imageryRange.maxY,
imageryLevel,
);
if (!defined(clippedImageryRectangleU)) {
continue;
}
for (let j = imageryRange.minY; j <= imageryRange.maxY; j++) {
const clippedImageryRectangleV = computeClippedImageryRectangle(
i,
j,
imageryLevel,
);
if (!defined(clippedImageryRectangleV)) {
continue;
}
const textureCoordinateRectangle =
ImageryCoverage._localizeToCartesianRectangle(
clippedImageryRectangleV,
nativeInputRectangle,
undefined,
);
// Note: The getImageryFromCache function will create the whole "chain"
// of ancestor imageries, up to the root, and increases the reference
// counter for each of them, even though it is not called
// getImageryFromCacheAndCreateAllAncestorsAndAddReferences.
// There is currently no way to have a single imagery, because
// somewhere in TileImagery, the parent is assumed to be present.
const imagery = imageryLayer.getImageryFromCache(i, j, imageryLevel);
const imageryCoverage = new ImageryCoverage(
i,
j,
imageryLevel,
textureCoordinateRectangle,
imagery,
);
imageryCoverages.push(imageryCoverage);
}
}
return imageryCoverages;
}
/**
* Compute the coordinates of the first rectangle relative to the
* second rectangle.
*
* The result will describe the bounds of the first rectangle
* in coordinates that are relative to the (south,west) and
* (width, height) of the second rectangle. This is suitable
* for describing the texture coordinates of the first
* rectangle within the second one.
*
* The result will be stored in the given result parameter, or
* in a new rectangle if the result was undefined.
*
* @param {Rectangle} rectangleA The first rectangle
* @param {Rectangle} rectangleB The second rectangle
* @param {CartesianRectangle} [result] The result
* @returns {CartesianRectangle} The result
*/
static _localizeToCartesianRectangle(rectangleA, rectangleB, result) {
if (!defined(result)) {
result = new CartesianRectangle();
}
const invX = 1.0 / rectangleB.width;
const invY = 1.0 / rectangleB.height;
result.minX = (rectangleA.west - rectangleB.west) * invX;
result.minY = (rectangleA.south - rectangleB.south) * invY;
result.maxX = (rectangleA.east - rectangleB.west) * invX;
result.maxY = (rectangleA.north - rectangleB.south) * invY;
return result;
}
}
export default ImageryCoverage;

View File

@ -0,0 +1,31 @@
/**
* A class containing a set of flags indicating which parts of an
* <code>ImageryLayer</code> need to be processed.
*
* This is used in the <code>ImageryPipelineStage</code> to decide the
* structure of the function that blends the imagery texture information
* with the previous pixels.
*
* Each flag indicates that at least one of the <code>ImageryLayer</code> objects
* that are part of the input did <b>not</b> have the default value that
* was defined via the corresponding <code>ImageryLayer.DEFAULT_...</code>>.
*
* Note that the type of the flags can be <code>boolean</code> or
* <code>number</code>. Users should check for these flags having
* a 'truthy' or 'falsy' value.
*
* @private
*/
class ImageryFlags {
constructor() {
this.alpha = false;
this.brightness = false;
this.contrast = false;
this.hue = false;
this.saturation = false;
this.gamma = false;
this.colorToAlpha = false;
}
}
export default ImageryFlags;

View File

@ -0,0 +1,60 @@
/**
* A structure summarizing the input for the shader that is draping imagery
* over 3D Tiles, as part of the <code>ImageryPipelineStage</code>.
*
* The <code>ImageryPipelineStage</code> receives the primitive, and their
* <code>ModelPrimitiveImagery</code> objects. These objects provide the
* <code>ImageryCoverage</code> information, indicating the set of imagery
* tiles that are covered by the primitive.
*
* The <code>ImageryPipelineStage</code> uses the <code>ImageryCoverage</code>
* to fetch the <code>Imagery</code> object and its texture for the (x, y, level)
* of each coverage, computes the texture translation and scale, and the covered
* texture coordinate rectangle of that imagery texture.
*
* This information is summarized in an instance of this class, to later
* be passed to the shader via uniforms.
*
* @private
*/
class ImageryInput {
/**
* Creates a new instance
*
* @param {ImageryLayer} imageryLayer The imagery layer
* @param {Texture} texture The texture from the imagery
* @param {Cartesian4} textureTranslationAndScale The translation
* and scale that have to be applied to the texture, to properly
* be draped on the primitive. This is stored as a Cartesian4
* with (x,y) being the translation and (z,w) being the scale.
* It could be cleaner and clearer to store this as separate
* Cartesian2 objects, but using a single Cartesian4 probably
* was a design choice that was originally made in GlobeFS.glsl,
* with the goal to have fewer uniforms
* @param {Cartesian4} textureCoordinateRectangle The bounding
* rectangle (in texture coordinates). This directly corresponds
* to the <code>ImageryCoverage.textureCoordinateRectangle</code>,
* but converted into a Cartesian4 for the consumption in the
* shader
* @param {number} imageryTexCoordAttributeSetIndex The "set index"
* of the texture coordinate attribute that should be used. This
* will be used to access the texture coordinate attribute
* <code>a_imagery_texCoord_${imageryTexCoordAttributeSetIndex}</code>
* in the shader.
*/
constructor(
imageryLayer,
texture,
textureTranslationAndScale,
textureCoordinateRectangle,
imageryTexCoordAttributeSetIndex,
) {
this.imageryLayer = imageryLayer;
this.texture = texture;
this.textureTranslationAndScale = textureTranslationAndScale;
this.textureCoordinateRectangle = textureCoordinateRectangle;
this.imageryTexCoordAttributeSetIndex = imageryTexCoordAttributeSetIndex;
}
}
export default ImageryInput;

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,84 @@
import Check from "../../Core/Check.js";
/**
* A collection of cartographic positions (and their bounding rectangle) that
* have been computed from cartesian positions, for a specific ellipsoid.
*
* This is used in the <code>ModelPrimitiveImagery</code> class, and stores
* the positions of the primitive, mapped to an ellipsoid that was used
* on one of the imagery layers. This avoids recomputing the transform
* of the primitive POSITION attribute values into ECEF, and the subsequent
* conversion of these positions into cartographic positions.
*
* @private
*/
class MappedPositions {
/**
* Creates a new instance
*
* @param {Iterable<Cartographic>} cartographicPositions The positions
* @param {number} numPositions The number of positions
* @param {Rectangle} cartographicBoundingRectangle The bounding
* rectangle of the positions
* @param {Ellipsoid} ellipsoid The ellipsoid
*/
constructor(
cartographicPositions,
numPositions,
cartographicBoundingRectangle,
ellipsoid,
) {
//>>includeStart('debug', pragmas.debug);
Check.defined("cartographicPositions", cartographicPositions);
Check.typeOf.number.greaterThanOrEquals("numPositions", numPositions, 0);
Check.defined(
"cartographicBoundingRectangle",
cartographicBoundingRectangle,
);
Check.defined("ellipsoid", ellipsoid);
//>>includeEnd('debug');
this._cartographicPositions = cartographicPositions;
this._numPositions = numPositions;
this._cartographicBoundingRectangle = cartographicBoundingRectangle;
this._ellipsoid = ellipsoid;
}
/**
* Returns the cartographic positions
*
* @returns {Iterable<Cartographic>} The positions
*/
get cartographicPositions() {
return this._cartographicPositions;
}
/**
* Returns the number of positions
*
* @returns {number} The number of positions
*/
get numPositions() {
return this._numPositions;
}
/**
* Returns the cartographic bounding rectangle
*
* @returns {Rectangle} The rectangle
*/
get cartographicBoundingRectangle() {
return this._cartographicBoundingRectangle;
}
/**
* Returns the ellipsoid for which these positions have been created
*
* @returns {Ellipsoid} The ellipsoid
*/
get ellipsoid() {
return this._ellipsoid;
}
}
export default MappedPositions;

View File

@ -43,6 +43,7 @@ import oneTimeWarning from "../../Core/oneTimeWarning.js";
import PntsLoader from "./PntsLoader.js";
import StyleCommandsNeeded from "./StyleCommandsNeeded.js";
import pickModel from "./pickModel.js";
import ModelImagery from "./ModelImagery.js";
/**
* <div class="notice">
@ -382,6 +383,8 @@ function Model(options) {
}
this._clippingPolygonsState = 0; // If this value changes, the shaders need to be regenerated.
this._modelImagery = new ModelImagery(this);
this._lightColor = Cartesian3.clone(options.lightColor);
this._imageBasedLighting = defined(options.imageBasedLighting)
@ -1375,6 +1378,28 @@ Object.defineProperties(Model.prototype, {
},
},
/**
* If this model is part of a <code>Model3DTileContent</code> of a tileset,
* then this will return the <code>ImageryLayerCollection</code>
* of that tileset. Otherwise, <code>undefined</code> is returned.
*
* @memberof Model.prototype
* @type {ImageryLayerCollection|undefined}
* @readonly
* @private
*/
imageryLayers: {
get: function () {
if (defined(this._content)) {
const tileset = this._content.tileset;
if (defined(tileset)) {
return tileset.imageryLayers;
}
}
return undefined;
},
},
/**
* The directional light color when shading the model. When <code>undefined</code> the scene's light color is used instead.
* <p>
@ -1928,6 +1953,19 @@ Model.prototype.update = function (frameState) {
return;
}
const modelImagery = this._modelImagery;
modelImagery.update(frameState);
if (!modelImagery.ready) {
// If the imagery loading should not happen asynchronously,
// then do not let the model count as 'ready' until the
// modelImagery is 'ready'
const asynchronouslyLoadImagery =
this._content?.tileset?._asynchronouslyLoadImagery ?? false;
if (!asynchronouslyLoadImagery) {
return;
}
}
updateFeatureTableId(this);
updateStyle(this);
updateFeatureTables(this, frameState);

View File

@ -0,0 +1,403 @@
import Check from "../../Core/Check.js";
import defined from "../../Core/defined.js";
import destroyObject from "../../Core/destroyObject.js";
import DeveloperError from "../../Core/DeveloperError.js";
import ImageryConfiguration from "./ImageryConfiguration.js";
import ModelPrimitiveImagery from "./ModelPrimitiveImagery.js";
/**
* A class managing the draping of imagery on a <code>Model</code>.
*
* An instance of this class is created in the Model constructor. It will
* create the data structures that carry the information that is required
* for mapping imagery textures on model primitives.
*
* It offers two functions for managing the lifecycle of this draping process:
*
* The <code>update</code> function is called from the <code>Model.update</code>
* function in each frame. It will create one <code>ModelPrimitiveImagery</code>
* instance for each primitive that appears in the model, and call the
* <code>update</code> function of these instances, respectively.
*
* The <code>ready</code> getter will be used to determine whether the
* draping computations are done, and the update process of the <code>Model</code>
* can continue, eventually causing the <code>model.ready</code> flag to
* become <code>true</code>. The model imagery counts as "ready" when all
* the imagery layers of the model are <code>ready</code>, and all the
* <code>ModelPrimitiveImagery</code> instances are <code>ready</code>.
*
* @private
*/
class ModelImagery {
/**
* Creates a new instance
*
* @param {Model} model The model
* @throws {DeveloperError} If the model is not defined
*/
constructor(model) {
//>>includeStart('debug', pragmas.debug);
Check.defined("model", model);
//>>includeEnd('debug');
/**
* The model that this instance was created for.
*
* @type {Model}
* @readonly
* @private
*/
this._model = model;
/**
* One <code<ModelPrimitiveImagery</code> for each primitive
* that appears in the model.
*
* Initially, this is <code>undefined</code>. When the <code>update</code>
* function is called and all imagery layers that are associated with the
* model are <code>ready</code>, this is initialized with one instance
* of a <code>ModelPrimitiveImagery</code> per runtime primitive (i.e. one for
* each <code>model.sceneGraph._runtimeNodes[n]._runtimePrimitives[p]</code>)
*
* @type {ModelPrimitiveImagery[]|undefined}
* @private
*/
this._modelPrimitiveImageries = undefined;
/**
* One <code>ImageryConfiguration</code> object for each <code>ImageryLayer</code>
* that is associated with the model.
*
* This is used for determining whether the configuration (relevant property
* values) of an imagery layer has been changed since the previous
* <code>update</code> call, which should cause the draw commands of the
* model to be reset.
*
* @type {ImageryConfiguration[]}
* @private
*/
this._imageryConfigurations = [];
}
/**
* The update function that is called from <code>Model.update</code> in
* each frame.
*
* This checks whether the imagery layer objects that are associated
* with the model are all <code>ready</code>. If they are not yet
* ready, then nothing is done.
*
* Otherwise, this just calls the <code>update</code> function of
* the <code>_modelPrimitiveImageries</code> (creating them if they had
* not been created yet).
*
* @param {FrameState} frameState The frame state
*/
update(frameState) {
//>>includeStart('debug', pragmas.debug);
Check.defined("frameState", frameState);
//>>includeEnd('debug');
if (!this._hasImagery) {
return;
}
if (!this._allImageryLayersReady) {
return;
}
if (!defined(this._modelPrimitiveImageries)) {
this._modelPrimitiveImageries = this._createModelPrimitiveImageries();
}
this._updateModelPrimitiveImageries(frameState);
this._checkForModifiedImageryConfigurations();
}
/**
* Creates the <code>ModelPrimitiveImagery</code> array that contains
* one <code>ModelPrimitiveImagery</code> for each primitive that is
* contained in the model.
*
* @returns {ModelPrimitiveImagery[]} The model primitive imageries
* @private
*/
_createModelPrimitiveImageries() {
const model = this._model;
const runtimeNodesAndPrimitives = this._collectRuntimeNodesAndPrimitives();
const modelPrimitiveImageries = [];
const length = runtimeNodesAndPrimitives.length;
for (let i = 0; i < length; i++) {
const runtimeNodeAndPrimitive = runtimeNodesAndPrimitives[i];
const runtimeNode = runtimeNodeAndPrimitive.runtimeNode;
const runtimePrimitive = runtimeNodeAndPrimitive.runtimePrimitive;
const modelPrimitiveImagery = new ModelPrimitiveImagery(
model,
runtimeNode,
runtimePrimitive,
);
runtimePrimitive.primitive.modelPrimitiveImagery = modelPrimitiveImagery;
modelPrimitiveImageries.push(modelPrimitiveImagery);
}
return modelPrimitiveImageries;
}
/**
* Computes all runtime nodes and primitives of the model.
*
* This is just the array that contains a
* <code>{ runtimeNode, runtimePrimitive }</code>
* for each
* <code>model.sceneGraph._runtimeNodes[n]._runtimePrimitives[p]</code>.
*
* @returns {object[]} The runtime nodes and primitives
* @private
*/
_collectRuntimeNodesAndPrimitives() {
const model = this._model;
const sceneGraph = model.sceneGraph;
const runtimeNodes = sceneGraph._runtimeNodes;
const runtimeNodesAndPrimitives = [];
for (let i = 0; i < runtimeNodes.length; i++) {
const runtimeNode = runtimeNodes[i];
if (!defined(runtimeNode)) {
continue;
}
for (let j = 0; j < runtimeNode.runtimePrimitives.length; j++) {
const runtimePrimitive = runtimeNode.runtimePrimitives[j];
runtimeNodesAndPrimitives.push({
runtimeNode: runtimeNode,
runtimePrimitive: runtimePrimitive,
});
}
}
return runtimeNodesAndPrimitives;
}
/**
* Just calls <code>update</code> on each <code>ModelPrimitiveImagery</code>
* as part of the <code>update</code> of this class.
*
* @private
*/
_updateModelPrimitiveImageries(frameState) {
//>>includeStart('debug', pragmas.debug);
Check.defined("frameState", frameState);
//>>includeEnd('debug');
if (!defined(this._modelPrimitiveImageries)) {
throw new DeveloperError(
"The modelPrimitiveImageries have not been created",
);
}
const modelPrimitiveImageries = this._modelPrimitiveImageries;
const length = modelPrimitiveImageries.length;
for (let i = 0; i < length; i++) {
const modelPrimitiveImagery = modelPrimitiveImageries[i];
modelPrimitiveImagery.update(frameState);
}
}
/**
* Destroy and delete all <code>ModelPrimitiveImagery</code> instances
* if they already have been created.
*/
_deleteModelPrimitiveImageries() {
const modelPrimitiveImageries = this._modelPrimitiveImageries;
if (!defined(modelPrimitiveImageries)) {
return;
}
const length = modelPrimitiveImageries.length;
for (let i = 0; i < length; i++) {
const modelPrimitiveImagery = modelPrimitiveImageries[i];
modelPrimitiveImagery.destroy();
}
delete this._modelPrimitiveImageries;
}
/**
* Returns whether this instance is "ready".
*
* This means that all imagery layers that are associated with the model
* are <code>ready</code>, and all <code>ModelPrimitiveImagery</code>
* instances are <code>ready</code>.
*
* When this is <code>true</code>, then the mapping computations are
* complete and the structures containing the mapping information have
* been initialized. Otherwise, subsequent calls to <code>update</code>
* will perform the necessary computation until this getter eventually
* returns <code>true</code>.
*
* @returns {boolean} Whether this instance is "ready"
*/
get ready() {
if (!this._hasImagery) {
return true;
}
if (!this._allImageryLayersReady) {
return false;
}
if (!this._allModelPrimitiveImageriesReady) {
return false;
}
return true;
}
/**
* Returns whether the model has imagery layers associated with it.
*
* @private
*/
get _hasImagery() {
const model = this._model;
const imageryLayers = model.imageryLayers;
return defined(imageryLayers) && imageryLayers.length > 0;
}
/**
* Returns whether all imagery layers that are associated with the
* model are <code>ready</code>.
*
* If the model does not have imagery, then this always returns
* <code>true</code>. Otherwise, it returns whether each imagery
* layer is <code>ready</code>.
*
* @private
*/
get _allImageryLayersReady() {
if (!this._hasImagery) {
return true;
}
const imageryLayers = this._model.imageryLayers;
const length = imageryLayers.length;
for (let i = 0; i < length; i++) {
const imageryLayer = imageryLayers.get(i);
if (!imageryLayer.ready) {
return false;
}
}
return true;
}
/**
* Returns whether all <code>ModelPrimitiveImagery</code> instances
* are are <code>ready</code>.
*
* @private
*/
get _allModelPrimitiveImageriesReady() {
const modelPrimitiveImageries = this._modelPrimitiveImageries;
if (!defined(modelPrimitiveImageries)) {
return false;
}
const length = modelPrimitiveImageries.length;
for (let i = 0; i < length; i++) {
const modelPrimitiveImagery = modelPrimitiveImageries[i];
if (!modelPrimitiveImagery.ready) {
return false;
}
}
return true;
}
/**
* Check whether any of the settings of any imagery layer (like alpha
* or hue) has been changed since the last call to the <code>update</code>
* function.
*
* If this is the case, the draw commands of the model will be reset.
*/
_checkForModifiedImageryConfigurations() {
if (this._imageryConfigurationsModified()) {
this._updateImageryConfigurations();
const model = this._model;
model.resetDrawCommands();
}
}
/**
* Returns whether any setting of an imagery layer (like alpha or hue) has
* been changed since the last time the <code>ImageryConfiguration</code>
* objects have been updated.
*
* @returns {boolean} Whether there was a modification
*/
_imageryConfigurationsModified() {
const model = this._model;
const imageryLayers = model.imageryLayers;
const imageryConfigurations = this._imageryConfigurations;
if (imageryLayers.length !== imageryConfigurations.length) {
return true;
}
for (let i = 0; i < imageryLayers.length; i++) {
const imageryLayer = imageryLayers.get(i);
const imageryConfiguration = imageryConfigurations[i];
if (imageryLayer.alpha !== imageryConfiguration.alpha) {
return true;
}
if (imageryLayer.brightness !== imageryConfiguration.brightness) {
return true;
}
if (imageryLayer.contrast !== imageryConfiguration.contrast) {
return true;
}
if (imageryLayer.hue !== imageryConfiguration.hue) {
return true;
}
if (imageryLayer.saturation !== imageryConfiguration.saturation) {
return true;
}
if (imageryLayer.gamma !== imageryConfiguration.gamma) {
return true;
}
if (imageryLayer.colorToAlpha !== imageryConfiguration.colorToAlpha) {
return true;
}
}
return false;
}
/**
* Create one <code>ImageryConfiguration</code> object for each imagery
* layer that appears in the model, and store them as the
* <code>_imageryConfigurations</code>.
*/
_updateImageryConfigurations() {
const model = this._model;
const imageryLayers = model.imageryLayers;
const imageryConfigurations = this._imageryConfigurations;
imageryConfigurations.length = imageryLayers.length;
for (let i = 0; i < imageryLayers.length; i++) {
const imageryLayer = imageryLayers.get(i);
imageryConfigurations[i] = new ImageryConfiguration(imageryLayer);
}
}
/**
* Returns whether this object was destroyed.
*
* If this object was destroyed, calling any function other than
* <code>isDestroyed</code> will result in a {@link DeveloperError}.
*
* @returns {boolean} Whether this object was destroyed
*/
isDestroyed() {
return false;
}
/**
* Destroys this object and all its resources.
*/
destroy() {
if (this.isDestroyed()) {
return;
}
this._deleteModelPrimitiveImageries();
return destroyObject(this);
}
}
export default ModelImagery;

View File

@ -0,0 +1,489 @@
import defined from "../../Core/defined.js";
import Cartesian2 from "../../Core/Cartesian2.js";
import Cartesian3 from "../../Core/Cartesian3.js";
import Matrix4 from "../../Core/Matrix4.js";
import Rectangle from "../../Core/Rectangle.js";
import Cartographic from "../../Core/Cartographic.js";
import BoundingRectangle from "../../Core/BoundingRectangle.js";
import ComponentDatatype from "../../Core/ComponentDatatype.js";
import Check from "../../Core/Check.js";
import AttributeType from "../AttributeType.js";
import ModelReader from "./ModelReader.js";
import VertexAttributeSemantic from "../VertexAttributeSemantic.js";
/**
* A class for computing the texture coordinates of imagery that is
* supposed to be mapped on a <code>ModelComponents.Primitive</code>.
*
* @private
*/
class ModelImageryMapping {
/**
* Creates a typed array that contains texture coordinates for
* the given <code>MappedPositions</code>, using the given
* projection.
*
* This will be a typed array that contains the texture coordinates
* that result from projecting the given positions with the given
* projection, and normalizing them to their bounding rectangle.
*
* @param {MappedPositions} mappedPositions The positions
* @param {MapProjection} projection The projection that should be used
* @returns {TypedArray} The result
*/
static createTextureCoordinatesForMappedPositions(
mappedPositions,
projection,
) {
//>>includeStart('debug', pragmas.debug);
Check.defined("mappedPositions", mappedPositions);
Check.defined("projection", projection);
//>>includeEnd('debug');
const cartographicPositions = mappedPositions.cartographicPositions;
const cartographicBoundingRectangle =
mappedPositions.cartographicBoundingRectangle;
const numPositions = mappedPositions.numPositions;
return ModelImageryMapping._createTextureCoordinates(
cartographicPositions,
numPositions,
cartographicBoundingRectangle,
projection,
);
}
/**
* Creates a typed array that contains texture coordinates for
* a primitive with the given positions, using the given
* projection.
*
* This will be a typed array of size <code>numPositions*2</code>
* that contains the texture coordinates that result from
* projecting the given positions with the given projection,
* and normalizing them to the given bounding rectangle.
*
* @param {Iterable<Cartographic>} cartographicPositions The
* cartographic positions
* @param {number} numPositions The number of positions (vertices)
* @param {Rectangle} cartographicBoundingRectangle The bounding
* rectangle of the cartographic positions
* @param {MapProjection} projection The projection that should be used
* @returns {TypedArray} The result
* @private
*/
static _createTextureCoordinates(
cartographicPositions,
numPositions,
cartographicBoundingRectangle,
projection,
) {
//>>includeStart('debug', pragmas.debug);
Check.defined("cartographicPositions", cartographicPositions);
Check.typeOf.number.greaterThanOrEquals("numPositions", numPositions, 0);
Check.defined(
"cartographicBoundingRectangle",
cartographicBoundingRectangle,
);
Check.defined("projection", projection);
//>>includeEnd('debug');
// Convert the bounding `Rectangle`(!) of the cartographic positions
// into a `BoundingRectangle`(!) using the given projection
const boundingRectangle = new BoundingRectangle();
BoundingRectangle.fromRectangle(
cartographicBoundingRectangle,
projection,
boundingRectangle,
);
// Compute the projected positions, using the given projection
const projectedPositions = ModelImageryMapping.createProjectedPositions(
cartographicPositions,
projection,
);
// Relativize the projected positions into the bounding rectangle
// to obtain texture coordinates
const texCoords = ModelImageryMapping.computeTexCoords(
projectedPositions,
boundingRectangle,
);
// Convert the texture coordinates into a typed array
const texCoordsTypedArray =
ModelImageryMapping.createTypedArrayFromCartesians2(
numPositions,
texCoords,
);
return texCoordsTypedArray;
}
/**
* Creates the `ModelComponents.Attribute` for the texture coordinates
* for a primitive
*
* This will create an attribute with
* - semantic: VertexAttributeSemantic.TEXCOORD
* - type: AttributeType.VEC2
* - count: mappedPositions.numPositions
* that contains the texture coordinates for the given vertex positions,
* after they are projected using the given projection, normalized to
* their bounding rectangle.
*
* @param {Iterable<Cartographic>} cartographicPositions The
* cartographic positions
* @param {number} numPositions The number of positions (vertices)
* @param {Rectangle} cartographicBoundingRectangle The bounding
* rectangle of the cartographic positions
* @param {MapProjection} projection The projection that should be used
* @returns {ModelComponents.Attribute} The new attribute
*/
static createTextureCoordinatesAttributeForMappedPositions(
mappedPositions,
projection,
) {
//>>includeStart('debug', pragmas.debug);
Check.defined("mappedPositions", mappedPositions);
Check.defined("projection", projection);
//>>includeEnd('debug');
// Create the typed array that contains the texture coordinates
const texCoordsTypedArray =
ModelImageryMapping.createTextureCoordinatesForMappedPositions(
mappedPositions,
projection,
);
// Create an attribute from the texture coordinates typed array
const texCoordAttribute =
ModelImageryMapping.createTexCoordAttribute(texCoordsTypedArray);
return texCoordAttribute;
}
/**
* Create an iterable that provides the cartographic positions
* of the given POSITION attribute, based on the given ellipsoid
*
* @param {ModelComponents.Attribute} primitivePositionAttribute
* The "POSITION" attribute of the primitive.
* @param {Matrix4} primitivePositionTransform The full transform of the primitive
* @param {Elliposid} ellipsoid The ellipsoid that should be used
* @returns {Iterable<Cartographic>} The iterable over `Cartographic` objects
*/
static createCartographicPositions(
primitivePositionAttribute,
primitivePositionTransform,
ellipsoid,
) {
//>>includeStart('debug', pragmas.debug);
Check.defined("primitivePositionAttribute", primitivePositionAttribute);
Check.defined("primitivePositionTransform", primitivePositionTransform);
Check.defined("ellipsoid", ellipsoid);
//>>includeEnd('debug');
// Extract the positions as a typed array
const typedArray = ModelReader.readAttributeAsTypedArray(
primitivePositionAttribute,
);
// Create an iterable over the positions
const type = primitivePositionAttribute.type;
const numComponents = AttributeType.getNumberOfComponents(type);
const positions =
ModelImageryMapping.createIterableCartesian3FromTypedArray(
typedArray,
numComponents,
);
// Compute the positions after they are transformed with the given matrix
const transformedPositions = ModelImageryMapping.transformCartesians3(
positions,
primitivePositionTransform,
);
// Compute the cartographic positions for the given ellipsoid
const cartographicPositions = ModelImageryMapping.transformToCartographic(
transformedPositions,
ellipsoid,
);
return cartographicPositions;
}
/**
* Creates an iterable over `Cartesian3` objects from the given
* typed array.
*
* The resulting iterable will always return the same `Cartesian3`
* object. Clients should not store and modify this object.
*
* @param {TypedArray} typedArray The typed array
* @param {number} stride The stride between to consecutive
* `Cartesian3` elements in the given array. Must be at least 3.
* @returns {Iterable<Cartesian3>} The iterable
*/
static createIterableCartesian3FromTypedArray(typedArray, stride) {
//>>includeStart('debug', pragmas.debug);
Check.defined("typedArray", typedArray);
Check.typeOf.number.greaterThanOrEquals("stride", stride, 3);
//>>includeEnd('debug');
const cartesian = new Cartesian3();
const numElements = typedArray.length / stride;
const result = {
[Symbol.iterator]: function* () {
for (let i = 0; i < numElements; i++) {
cartesian.x = typedArray[i * stride + 0];
cartesian.y = typedArray[i * stride + 1];
cartesian.z = typedArray[i * stride + 2];
yield cartesian;
}
},
};
return result;
}
/**
* Creates a new iterable that applies the given mapper to the given iterable.
*
* @param {Iterable} iterable The input iterable
* @param {Function} mapper The mapper
* @returns {Iterable} The mapped iterable
*/
static map(iterable, mapper) {
//>>includeStart('debug', pragmas.debug);
Check.defined("iterable", iterable);
Check.defined("mapper", mapper);
//>>includeEnd('debug');
const result = {
[Symbol.iterator]: function* () {
for (const element of iterable) {
yield mapper(element);
}
},
};
return result;
}
/**
* Computes the bounding rectangle of the given cartographic positions,
* stores it in the given result, and returns it.
*
* If the given result is `undefined`, a new rectangle will be created
* and returned.
*
* @param {Iterable<Cartographic>} cartographicPositions The cartographics
* @param {Rectangle} [result] The result
* @returns {Rectangle} The result
*/
static computeCartographicBoundingRectangle(cartographicPositions, result) {
//>>includeStart('debug', pragmas.debug);
Check.defined("cartographicPositions", cartographicPositions);
//>>includeEnd('debug');
if (!defined(result)) {
result = new Rectangle();
}
// One could store these directly in the result, but that would
// violate the constraint of the PI-related ranges..
let north = Number.NEGATIVE_INFINITY;
let south = Number.POSITIVE_INFINITY;
let east = Number.NEGATIVE_INFINITY;
let west = Number.POSITIVE_INFINITY;
for (const cartographicPosition of cartographicPositions) {
north = Math.max(north, cartographicPosition.latitude);
south = Math.min(south, cartographicPosition.latitude);
east = Math.max(east, cartographicPosition.longitude);
west = Math.min(west, cartographicPosition.longitude);
}
result.north = north;
result.south = south;
result.east = east;
result.west = west;
return result;
}
/**
* Creates a new iterable that provides `Cartesian3` objects that
* are created by transforming the `Cartesian3` objects of the
* given iterable with the given matrix.
*
* The resulting iterable will always return the same `Cartesian3`
* object. Clients should not store and modify this object.
*
* @param {Iterable<Cartesian3>} positions The positions
* @param {Matrix4} matrix The matrix
* @returns {Iterable<Cartesian3>} The transformed cartesians
*/
static transformCartesians3(positions, matrix) {
//>>includeStart('debug', pragmas.debug);
Check.defined("positions", positions);
Check.defined("matrix", matrix);
//>>includeEnd('debug');
const transformedPosition = new Cartesian3();
const transformedPositions = ModelImageryMapping.map(positions, (p) => {
Matrix4.multiplyByPoint(matrix, p, transformedPosition);
return transformedPosition;
});
return transformedPositions;
}
/**
* Creates a new iterable that provides `Cartographic` objects that
* are created by converting the given `Cartesian3` objects to
* cartographics, based on the given ellipsoid.
*
* The resulting iterable will always return the same `Cartographic`
* object. Clients should not store and modify this object.
*
* @param {Iterable<Cartesian3>} positions The positions
* @param {Ellipsoid} ellipsoid The ellipsoid
* @returns {Iterable<Cartographic>} The cartographic positions
*/
static transformToCartographic(positions, ellipsoid) {
//>>includeStart('debug', pragmas.debug);
Check.defined("positions", positions);
Check.defined("ellipsoid", ellipsoid);
//>>includeEnd('debug');
const cartographicPosition = new Cartographic();
const cartographicPositions = ModelImageryMapping.map(positions, (p) => {
// Note: This will not yield valid results for p=(0,0,0).
// But there is no sensible cartographic position for
// that, so simply accept the unspecified output here.
ellipsoid.cartesianToCartographic(p, cartographicPosition);
return cartographicPosition;
});
return cartographicPositions;
}
/**
* Creates an iterable over the results of applying the given projection
* to the given cartographic positions.
*
* The resulting iterable will always return the same `Cartesian3`
* object. Clients should not store and modify this object.
*
* @param {Iterable<Cartographic>} cartographicPositions The cartographic
* positions
* @param {MapProjection} projection The projection to use
* @returns {Iterable<Cartesian3>} The projected positions
*/
static createProjectedPositions(cartographicPositions, projection) {
//>>includeStart('debug', pragmas.debug);
Check.defined("cartographicPositions", cartographicPositions);
Check.defined("projection", projection);
//>>includeEnd('debug');
const projectedPosition = new Cartesian3();
const projectedPositions = ModelImageryMapping.map(
cartographicPositions,
(c) => {
projection.project(c, projectedPosition);
return projectedPosition;
},
);
return projectedPositions;
}
/**
* Computes the texture coordinates for the given positions, relative
* to the given bounding rectangle.
*
* This will make the x/y coordinates of the given cartesians relative
* to the given bounding rectangle and clamp them to [0,0]-[1,1].
*
* NOTE: This could be broken down into
* 1. mapping to 2D
* 2. relativizing for the bounding recangle
* 3. clamping to [0,0]-[1,1]
*
* @param {Iterable<Cartesian3>} positions The positions
* @param {BoundingRectangle} boundingRectangle The rectangle
* @returns {Iterable<Cartesian2>} The texture coordinates
*/
static computeTexCoords(positions, boundingRectangle) {
//>>includeStart('debug', pragmas.debug);
Check.defined("positions", positions);
Check.defined("boundingRectangle", boundingRectangle);
//>>includeEnd('debug');
const texCoord = new Cartesian2();
const invSizeX = 1.0 / boundingRectangle.width;
const invSizeY = 1.0 / boundingRectangle.height;
const texCoords = ModelImageryMapping.map(positions, (p) => {
const uRaw = (p.x - boundingRectangle.x) * invSizeX;
const vRaw = (p.y - boundingRectangle.y) * invSizeY;
const u = Math.min(Math.max(uRaw, 0.0), 1.0);
const v = Math.min(Math.max(vRaw, 0.0), 1.0);
texCoord.x = u;
texCoord.y = v;
return texCoord;
});
return texCoords;
}
/**
* Creates a new typed array from the given `Cartesian2` objects.
*
* @param {number} numElements The number of elements
* @param {Iterable<Cartesian2>} elements The elements
* @returns {TypedArray} The typed array
*/
static createTypedArrayFromCartesians2(numElements, elements) {
//>>includeStart('debug', pragmas.debug);
Check.typeOf.number.greaterThanOrEquals("numElements", numElements, 0);
Check.defined("elements", elements);
//>>includeEnd('debug');
const typedArray = new Float32Array(numElements * 2);
let index = 0;
for (const element of elements) {
typedArray[index * 2 + 0] = element.x;
typedArray[index * 2 + 1] = element.y;
index++;
}
return typedArray;
}
/**
* Create a new texture coordinates attribute from the given data.
*
* This will create an attribute with
* - semantic: VertexAttributeSemantic.TEXCOORD
* - type: AttributeType.VEC2
* - count: texCoordsTypedArray.length / 2
* that contains the data from the given typed array.
*
* @param {TypedArray} texCoordsTypedArray The typed array
* @returns {ModelComponents.Attribute} The attribute
*/
static createTexCoordAttribute(texCoordsTypedArray) {
//>>includeStart('debug', pragmas.debug);
Check.defined("texCoordsTypedArray", texCoordsTypedArray);
//>>includeEnd('debug');
const texCoordAttribute = {
name: "Imagery Texture Coordinates",
semantic: VertexAttributeSemantic.TEXCOORD,
setIndex: 0,
componentDatatype: ComponentDatatype.FLOAT,
type: AttributeType.VEC2,
normalized: false,
count: texCoordsTypedArray.length / 2,
min: undefined,
max: undefined,
constant: new Cartesian2(0, 0),
quantization: undefined,
typedArray: texCoordsTypedArray,
byteOffset: 0,
byteStride: undefined,
};
return texCoordAttribute;
}
}
export default ModelImageryMapping;

View File

@ -0,0 +1,972 @@
import defined from "../../Core/defined.js";
import DeveloperError from "../../Core/DeveloperError.js";
import Matrix4 from "../../Core/Matrix4.js";
import Check from "../../Core/Check.js";
import destroyObject from "../../Core/destroyObject.js";
import ImageryState from "../ImageryState.js";
import ImageryCoverage from "./ImageryCoverage.js";
import ModelImageryMapping from "./ModelImageryMapping.js";
import ModelUtility from "./ModelUtility.js";
import MappedPositions from "./MappedPositions.js";
import Buffer from "../../Renderer/Buffer.js";
import BufferUsage from "../../Renderer/BufferUsage.js";
/**
* A class managing the draping of imagery on a single model primitive.
*
* The <code>ModelImagery</code> class creates one instance of this
* class for each primitive that appears in the model.
*
* It is responsible for computing
* - the mapped (cartographic) positions of the primitive
* - the imagery tiles that are covered by these mapped positions
* - the texture coordinates (attributes) that correspond to these mapped positions
*
* @private
*/
class ModelPrimitiveImagery {
/**
* Creates a new instance
*
* @param {Model} model The model
* @param {ModelRuntimeNode} runtimeNode The node that the primitive is attached to
* @param {ModelRuntimePrimitive} runtimePrimitive The primitive
* @throws {DeveloperError} If any argument is not defined
*/
constructor(model, runtimeNode, runtimePrimitive) {
//>>includeStart('debug', pragmas.debug);
Check.defined("model", model);
Check.defined("runtimeNode", runtimeNode);
Check.defined("runtimePrimitive", runtimePrimitive);
//>>includeEnd('debug');
/**
* The model that this instance was created for.
*
* @type {Model}
* @readonly
* @private
*/
this._model = model;
/**
* The node that the primitive is attached to
*
* @type {ModelRuntimeNode}
* @readonly
* @private
*/
this._runtimeNode = runtimeNode;
/**
* The primitive that this instance was created for.
*
* @type {ModelRuntimePrimitive}
* @readonly
* @private
*/
this._runtimePrimitive = runtimePrimitive;
/**
* The <code>MappedPositions</code> objects, one for each ellipsoid
* of one of the imagery layers
*
* These objects are just plain structures that summarize the
* cartographic positions of the primitive for one specific
* ellipsoid
*
* @type {MappedPositions[]|undefined}
* @private
*/
this._mappedPositionsPerEllipsoid = undefined;
/**
* The last <code>model.modelMatrix</code> for which the mapped
* positions have been computed.
*
* This is used for detecting changes in the model matrix that
* make it necessary to re-compute the mapped positions.
*
* @type {Matrix4}
* @readonly
* @private
*/
this._mappedPositionsModelMatrix = new Matrix4();
/**
* The value that the <code>Cesium3DTileset.imageryLayersModificationCounter</code>
* had during the last update call. This is used for triggering updates when the
* imagery layer collection in the tileset changes.
*/
this._lastImageryLayersModificationCounter = 0;
/**
* The texture coordinate attributes, one for each projection.
*
* This contains one <code>ModelComponents.Attribute</code> for each
* unique projection that is used in the imagery layers. These
* texture coordinate attributes are computed based on the mapped
* positions for the respective ellipsoid of that projection.
*/
this._imageryTexCoordAttributesPerProjection = undefined;
/**
* The current imagery layers.
*
* This is initialized when the _coveragesPerLayer are computed,
* and tracked to that the reference counters of the imageries
* can be decreased when the coverages per layer are deleted.
*
* @type {ImageryLayer[]|undefined}
* @private
*/
this._currentImageryLayers = undefined;
/**
* Information about the imagery tiles that are covered by the positions
* of the primitive.
*
* This is computed in the <code>update</code> function, based on the
* mapped positions of the primitive. After this computation,
* <code>_coveragesPerLayer[layerIndex]</code> is an array that contains
* the <code>ImageryCoverage</code> objects that describe the imagery
* tiles that are covered, including their texture coordinate rectangle.
*
* @type {ImageryCoverage[][]|undefined}
* @private
*/
this._coveragesPerLayer = undefined;
/**
* A flag indicating whether all imagery objects that are covered
* are "ready".
*
* This is initially <code>false</code>. During the calls to the
* <code>update</code> function (which are triggered from the
* <code>Model.update</code> function, each frame), the
* <code>_updateImageries</code> function will be called, and
* process the imagery tiles, until all them them are in a
* state like <code>ImageryState.READY</code>, at which point
* this flag is set to <code>true</code>.
*
* @type {boolean}
* @private
*/
this._allImageriesReady = false;
}
/**
* Returns the <code>ImageryCoverage</code> array that has been
* computed for the given imagery layer.
*
* This assumes that the given imagery layer is part of the
* imagery layer collection of the model, and that this
* model primitive imagery is "ready", meaning that the
* coverages have already been computed.
*
* Clients may <b>not</b> modify the returned array or any
* of its objects!
*
* @param {ImageryLayer} imageryLayer The imagery layer
* @returns {ImageryCoverage[]} The coverage information
*/
coveragesForImageryLayer(imageryLayer) {
const model = this._model;
const imageryLayers = model.imageryLayers;
const index = imageryLayers.indexOf(imageryLayer);
if (index === -1) {
throw new DeveloperError("Imagery layer is not part of the model");
}
const coveragesPerLayer = this._coveragesPerLayer;
if (!defined(coveragesPerLayer)) {
throw new DeveloperError(
`The coveragesPerLayer have not been computed yet`,
);
}
return coveragesPerLayer[index];
}
/**
* Update the state of this instance.
*
* This is called as part of <code>ModelImagery.update</code>, which in
* turn is part of the <code>Model.update</code> that is called in each
* frame.
*
* This will perform the computations that are required to establish
* the mapping between the imagery and the primitive. It will...
* <ul>
* <li>
* Compute the <code>MappedPositions</code> of the primitive,
* one instance for each ellipsoid
* </li>
* <li>
* Compute the "coverages per layer", containing the information
* about which parts of the respective imagery layer are covered
* by the mapped positions
* </li>
* <li>
* Compute the texture coordinate attributes for the imagery, one
* for each projection, and store them as the
* <code>_imageryTexCoordAttributesPerProjection</code>
* </li>
* <li>
* Update the imageries (i.e. processing their state machine by
* calling <code>Imagery.processStateMachine</code>) until they
* are in the <code>ImageryState.READY</code> state
* </li>
* </ul>
*
* @param {FrameState} frameState The frame state
*/
update(frameState) {
//>>includeStart('debug', pragmas.debug);
Check.defined("frameState", frameState);
//>>includeEnd('debug');
// If the imagery layers have been modified since the last call
// to this function, then re-build everything
const model = this._model;
const content = model.content;
const tileset = content.tileset;
const modificationCounter = tileset.imageryLayersModificationCounter;
if (this._lastImageryLayersModificationCounter !== modificationCounter) {
delete this._mappedPositionsPerEllipsoid;
this._lastImageryLayersModificationCounter = modificationCounter;
}
if (this._mappedPositionsNeedUpdate) {
model.resetDrawCommands();
this._mappedPositionsPerEllipsoid =
this._computeMappedPositionsPerEllipsoid();
this._deleteCoveragesPerLayer();
this._destroyImageryTexCoordAttributes();
}
if (!defined(this._imageryTexCoordAttributesPerProjection)) {
this._imageryTexCoordAttributesPerProjection =
this._computeImageryTexCoordsAttributesPerProjection();
this._uploadImageryTexCoordAttributes(frameState.context);
}
if (!defined(this._coveragesPerLayer)) {
this._computeCoveragesPerLayer();
this._allImageriesReady = false;
}
if (!this._allImageriesReady) {
this._updateImageries(frameState);
}
}
/**
* Delete the <code>_coveragesPerLayer</code> if they are defined.
*
* This will call <code>deleteCoverages</code> for each set of coverages,
* and eventually delete the <code>_coveragesPerLayer</code>.
*
* This will cause the reference counters of the imageries to be
* decreased.
*/
_deleteCoveragesPerLayer() {
const coveragesPerLayer = this._coveragesPerLayer;
if (!defined(coveragesPerLayer)) {
return;
}
const imageryLayers = this._currentImageryLayers;
const length = coveragesPerLayer.length;
for (let i = 0; i < length; i++) {
const imageryLayer = imageryLayers[i];
const coverages = coveragesPerLayer[i];
this._deleteCoverages(imageryLayer, coverages);
}
delete this._currentImageryLayers;
delete this._coveragesPerLayer;
}
/**
* Delete the given imagery coverage objects for the given imagery
* layer, meaning that it will cause the reference counters of the
* imageries to be decreased.
*
* If the imagery layer already has been destroyed, then nothing
* will be done.
*
* @param {ImageryLayer} imageryLayer The imagery layer
* @param {ImageryCoverage[]} coverages The coverages
*/
_deleteCoverages(imageryLayer, coverages) {
if (imageryLayer.isDestroyed()) {
return;
}
const length = coverages.length;
for (let i = 0; i < length; i++) {
const coverage = coverages[i];
const imagery = coverage.imagery;
imagery.releaseReference();
}
}
/**
* Create the GPU buffers for the typed arrays that are contained
* in the <code>_imageryTexCoordAttributesPerProjection</code>
*
* @param {Context} context The GL context
*/
_uploadImageryTexCoordAttributes(context) {
//>>includeStart('debug', pragmas.debug);
Check.defined("context", context);
//>>includeEnd('debug');
const attributes = this._imageryTexCoordAttributesPerProjection;
if (!defined(attributes)) {
return;
}
const n = attributes.length;
for (let i = 0; i < n; i++) {
const attribute = attributes[i];
// Allocate the GL resources for the new attribute
const imageryTexCoordBuffer = Buffer.createVertexBuffer({
context: context,
typedArray: attribute.typedArray,
usage: BufferUsage.STATIC_DRAW,
});
// TODO_DRAPING Review this. Probably, some cleanup
// has to happen somewhere else after setting this.
// Check that the call to "destroy" in
// _destroyImageryTexCoordAttributes is the right
// thing to do here.
imageryTexCoordBuffer.vertexArrayDestroyable = false;
attribute.buffer = imageryTexCoordBuffer;
}
}
/**
* Destroy the <code>_imageryTexCoordAttributesPerProjection</code>
* array.
*
* This is called for cleaning up the allocated GPU resources, before
* they are supposed to be re-computed with
* <code>_computeImageryTexCoordsAttributesPerProjection</code>
*/
_destroyImageryTexCoordAttributes() {
const attributes = this._imageryTexCoordAttributesPerProjection;
if (!defined(attributes)) {
return;
}
const n = attributes.length;
for (let i = 0; i < n; i++) {
const attribute = attributes[i];
if (defined(attribute)) {
if (defined(attribute.buffer)) {
if (!attribute.buffer.isDestroyed()) {
attribute.buffer.destroy();
}
}
attributes[i] = undefined;
}
}
delete this._imageryTexCoordAttributesPerProjection;
}
/**
* Returns whether the <code>MappedPositions</code> have to be
* re-computed with <code>_computeMappedPositionsPerEllipsoid</code>.
*
* This is <code>true</code> when the positions have not yet been
* computed, or when the <code>modelMatrix</code> of the model
* changed since the previous call.
*
* @returns {boolean} Whether the mapped positions need an update
* @private
*/
get _mappedPositionsNeedUpdate() {
if (!defined(this._mappedPositionsPerEllipsoid)) {
return true;
}
const model = this._model;
const lastModelMatrix = this._mappedPositionsModelMatrix;
if (!Matrix4.equals(model.modelMatrix, lastModelMatrix)) {
return true;
}
return false;
}
/**
* Computes the mapped positions of the primitive, one for each ellipsoid.
*
* This computes the <i>unique</i> ellipsoids that appear in the imagery
* layers of the model, and creates one <code>MappedPositions</code>
* object for each of them.
*
* The respective <code>MappedPositions</code> objects will contain
* the cartographic positions that are computed from the positions
* of the primitive. These will serve as the basis for computing the
* part of the imagery that is covered by the primitive.
*
* These mapped positions depend on the current <code>modelMatrix</code>
* of the model. So they have to be re-computed when the model matrix
* changes.
*
* @returns {MappedPositions[]} The mapped positions
* @private
*/
_computeMappedPositionsPerEllipsoid() {
const model = this._model;
const runtimeNode = this._runtimeNode;
const runtimePrimitive = this._runtimePrimitive;
const primitivePositionAttribute =
ModelPrimitiveImagery._obtainPrimitivePositionAttribute(
runtimePrimitive.primitive,
);
const numPositions = primitivePositionAttribute.count;
const primitivePositionTransform =
ModelPrimitiveImagery._computePrimitivePositionTransform(
model,
runtimeNode,
undefined,
);
const mappedPositionsPerEllipsoid = [];
const ellipsoids = ModelPrimitiveImagery._computeUniqueEllipsoids(
model.imageryLayers,
);
const length = ellipsoids.length;
for (let i = 0; i < length; i++) {
const ellipsoid = ellipsoids[i];
const cartographicPositions =
ModelImageryMapping.createCartographicPositions(
primitivePositionAttribute,
primitivePositionTransform,
ellipsoid,
);
const cartographicBoundingRectangle =
ModelImageryMapping.computeCartographicBoundingRectangle(
cartographicPositions,
);
const mappedPositions = new MappedPositions(
cartographicPositions,
numPositions,
cartographicBoundingRectangle,
ellipsoid,
);
mappedPositionsPerEllipsoid.push(mappedPositions);
}
Matrix4.clone(model.modelMatrix, this._mappedPositionsModelMatrix);
return mappedPositionsPerEllipsoid;
}
/**
* Computes an array containing the <i>unique</i> ellipsoids that
* appear in the imagery layers of the given collection.
*
* @param {ImageryLayerCollection} imageryLayers
* @returns {Ellipsoid[]} The ellipsoids
* @private
*/
static _computeUniqueEllipsoids(imageryLayers) {
//>>includeStart('debug', pragmas.debug);
Check.defined("imageryLayers", imageryLayers);
//>>includeEnd('debug');
const ellipsoidsSet = new Set();
const length = imageryLayers.length;
for (let i = 0; i < length; i++) {
const imageryLayer = imageryLayers.get(i);
const ellipsoid = ModelPrimitiveImagery._getEllipsoid(imageryLayer);
ellipsoidsSet.add(ellipsoid);
}
return [...ellipsoidsSet];
}
/**
* Computes one coordinate attribute for each unique projection
* that is used in the imagery layers.
*
* This is taking the mapped positions, projecting them with
* the respective projection, and creating a texture coordinate
* attribute that describes the texture coordinates of these
* positions, relative to the cartographic bounding rectangle
* of the mapped positions.
*
* @returns {ModelComponents.Attribute[]} The attributes
*/
_computeImageryTexCoordsAttributesPerProjection() {
const model = this._model;
const imageryLayers = model.imageryLayers;
// Compute the arrays containing ALL projections and the array
// containing the UNIQUE projections from the imagery layers.
// Texture coordinate attributes only have to be created once
// for each projection.
const allProjections =
ModelPrimitiveImagery._extractProjections(imageryLayers);
const uniqueProjections = [...new Set(allProjections)];
// Create one texture coordinate attribute for each distinct
// projection that is used in the imagery layers
const attributes = this._createImageryTexCoordAttributes(uniqueProjections);
return attributes;
}
/**
* Computes an array containing the projections that are used in
* the given imagery layers.
*
* (Note that this array may contain duplicates)
*
* @param {ImageryLayerCollection} imageryLayers The imagery layers
* @returns {MapProjection[]} The projections
* @private
*/
static _extractProjections(imageryLayers) {
//>>includeStart('debug', pragmas.debug);
Check.defined("imageryLayers", imageryLayers);
//>>includeEnd('debug');
const projections = [];
const length = imageryLayers.length;
for (let i = 0; i < length; i++) {
const imageryLayer = imageryLayers.get(i);
const projection = ModelPrimitiveImagery._getProjection(imageryLayer);
projections.push(projection);
}
return projections;
}
/**
* Returns the projection of the given imagery layer.
*
* This only exists to hide a train wreck
*
* @param {ImageryLayer} imageryLayer The imagery layer
* @returns {MapProjection} The projection
* @private
*/
static _getProjection(imageryLayer) {
//>>includeStart('debug', pragmas.debug);
Check.defined("imageryLayer", imageryLayer);
//>>includeEnd('debug');
const projection = imageryLayer.imageryProvider.tilingScheme.projection;
return projection;
}
/**
* Create texture coordinates, one for each projection.
*
* This will create a texture coordinate attribute for each of the given projections,
* using <code>ModelImageryMapping.createTextureCoordinatesAttributeForMappedPositions</code>,
*
* (This means that the given projections should indeed be unique,
* i.e. contain no duplicates)
*
* @param {MapProjection[]} uniqueProjections The projections
* @returns {ModelComponents.Attribute[]} The attributes
*/
_createImageryTexCoordAttributes(uniqueProjections) {
//>>includeStart('debug', pragmas.debug);
Check.defined("uniqueProjections", uniqueProjections);
//>>includeEnd('debug');
const imageryTexCoordAttributePerUniqueProjection = [];
const length = uniqueProjections.length;
for (let i = 0; i < length; i++) {
// Obtain the mapped positions for the ellipsoid that is used
// in the projection (i.e. the cartographic positions of the
// primitive, for the respective ellipsoid)
const projection = uniqueProjections[i];
const ellipsoid = projection.ellipsoid;
const mappedPositions = this.mappedPositionsForEllipsoid(ellipsoid);
// Create the actual attribute
const imageryTexCoordAttribute =
ModelImageryMapping.createTextureCoordinatesAttributeForMappedPositions(
mappedPositions,
projection,
);
imageryTexCoordAttributePerUniqueProjection.push(
imageryTexCoordAttribute,
);
}
return imageryTexCoordAttributePerUniqueProjection;
}
/**
* Compute the coverage information for the primitive, based on the
* imagery layers that are associated with the model.
*
* This updates the <code>_coveragesPerLayer[layerIndex]</code>, which
* is an array that contains the <code>ImageryCoverage</code> objects that
* describe the imagery tiles that are covered by the primitive, including
* their texture coordinate rectangle.
*
* This has to be called after the mapped positions for the primitive
* have been computed with <code>_computeMappedPositionsPerEllipsoid</code>.
*
* @private
*/
_computeCoveragesPerLayer() {
const coveragesPerLayer = [];
const currentImageryLayers = [];
const model = this._model;
const imageryLayers = model.imageryLayers;
const length = imageryLayers.length;
for (let i = 0; i < length; i++) {
const imageryLayer = imageryLayers.get(i);
const coverages = this._computeCoverage(imageryLayer);
coveragesPerLayer.push(coverages);
currentImageryLayers.push(imageryLayer);
}
this._coveragesPerLayer = coveragesPerLayer;
this._currentImageryLayers = currentImageryLayers;
}
/**
* Compute the coverage information for the primitive, based on the
* given imagery layer.
*
* This returns an array that contains the <code>ImageryCoverage</code>
* objects that describe the imagery tiles that are covered by the
* primitive, including their texture coordinate rectangle.
*
* This has to be called after the mapped positions for the primitive
* have been computed with <code>_computeMappedPositionsPerEllipsoid</code>.
*
* @param {ImageryLayer} imageryLayer The imagery layer
* @returns {ImageryCoverage[]} The coverage information
* @private
*/
_computeCoverage(imageryLayer) {
const mappedPositions = this.mappedPositionsForImageryLayer(imageryLayer);
const cartographicBoundingRectangle =
mappedPositions.cartographicBoundingRectangle;
const imageryLevel = this._computeImageryLevel(
imageryLayer,
cartographicBoundingRectangle,
);
const coverages = ImageryCoverage.createImageryCoverages(
cartographicBoundingRectangle,
imageryLayer,
imageryLevel,
);
return coverages;
}
/**
* Compute a <code>level</code> for accessing the imagery from the given
* imagery layer that is suitable for a primitive with the given bounding
* rectangle.
*
* @param {ImageryLayer} imageryLayer The imagery layer
* @param {Rectangle} cartographicBoundingRectangle The cartographic
* bounding rectangle, as obtained from the MappedPositions for
* the given imagery layer
* @returns {number} The imagery level
*/
_computeImageryLevel(imageryLayer, cartographicBoundingRectangle) {
const imageryProvider = imageryLayer.imageryProvider;
const tilingScheme = imageryProvider.tilingScheme;
const rectangle = tilingScheme.rectangle;
// The number of tiles covered by the boundingRectangle (b)
// for a certain level, based on the tiling scheme rectangle (r) is
// numberOfTilesCovered = b / (r / 2^level)
// Solving for "level" yields
// level = log2( numberOfTilesCovered * r / b)
// The goal here is to drape approximately (!) one imagery
// tile on each primitive. In practice, it may be more
// (up to 9 in theory)
const desiredNumberOfTilesCovered = 1;
// Perform the computation of the desired level, based on the
// number of tiles that should be covered (by whatever is
// larger, the width or the height)
let boundingRectangleSize = cartographicBoundingRectangle.width;
let rectangleSize = rectangle.width;
if (
cartographicBoundingRectangle.height > cartographicBoundingRectangle.width
) {
boundingRectangleSize = cartographicBoundingRectangle.height;
rectangleSize = rectangle.height;
}
const desiredLevel = Math.log2(
(desiredNumberOfTilesCovered * rectangleSize) / boundingRectangleSize,
);
// Clamp the level to a valid range, and an integer value
const imageryLevel = ImageryCoverage._clampImageryLevel(
imageryProvider,
desiredLevel,
);
return imageryLevel;
}
/**
* Update all <code>Imagery</code> objects.
*
* This is called as part of <code>update</code>, until all required
* imagery tiles are "ready", as indicated by their <code>state</code>
* being <code>ImageryState.READY</code>.
*
* This is called after it has been determined which imagery tiles are
* covered by the primitive (i.e. after the <code>_coveragesPerLayer</code>
* have been computed by calling <code>_computeCoverages</code>).
*
* For each covered imagery tile, this will call
* <code>Imagery.processStateMachine</code> until they are either
* READY, FAILED, or INVALID.
*
* Once they all are in one of these final states, it will set the
* <code>_allImageriesReady</code> flag to <code>true</code>.
*
* @param {FrameState} frameState The frame state, to be passed to
* <code>imagery.processStateMachine</code>
* @private
*/
_updateImageries(frameState) {
const model = this._model;
const coveragesPerLayer = this._coveragesPerLayer;
const length = coveragesPerLayer.length;
let allImageriesReady = true;
for (let i = 0; i < length; i++) {
const coverages = coveragesPerLayer[i];
const n = coverages.length;
for (let j = 0; j < n; j++) {
const coverage = coverages[j];
const imagery = coverage.imagery;
// In the context of loading the imagery for draping
// it over the primitive, the imagery counts as "ready"
// when it is really ready, but also when it failed
// or was invalid (otherwise, the primitive would
// never turn "ready"
const countsAsReady =
imagery.state === ImageryState.READY ||
imagery.state === ImageryState.FAILED ||
imagery.state === ImageryState.INVALID;
if (!countsAsReady) {
allImageriesReady = false;
imagery.processStateMachine(frameState, false, false);
}
}
}
// When the imageries turned ready, reset the draw commands
// to trigger a rendering with the updated draw commands
// that include the imagery now.
if (allImageriesReady) {
model.resetDrawCommands();
}
this._allImageriesReady = allImageriesReady;
}
/**
* Returns the <code>MappedPositions</code> object that contains
* information about the primitive positions that have been computed
* for the given imagery layer.
*
* This assumes that <code>_computeMappedPositionsPerEllipsoid</code> has
* already been called.
*
* @param {ImageryLayer} imageryLayer The imageryLayer
* @returns {MappedPositions} The mapped positions
* @throws {DeveloperError} If the mapped positions for the
* ellipsoid could not be found.
*/
mappedPositionsForImageryLayer(imageryLayer) {
const ellipsoid = ModelPrimitiveImagery._getEllipsoid(imageryLayer);
return this.mappedPositionsForEllipsoid(ellipsoid);
}
/**
* Returns the <code>MappedPositions</code> object that contains
* information about the primitive positions that have been computed
* from the given ellipsoid.
*
* This assumes that <code>_computeMappedPositions</code> has
* already been called.
*
* @param {Ellipsoid} ellipsoid The ellipsoid
* @returns {MappedPositions} The mapped positions
* @throws {DeveloperError} If the mapped positions for the
* given ellipsoid could not be found.
*/
mappedPositionsForEllipsoid(ellipsoid) {
const mappedPositionsPerEllipsoid = this._mappedPositionsPerEllipsoid;
if (!defined(mappedPositionsPerEllipsoid)) {
throw new DeveloperError(
`The mappedPositions have not been computed yet`,
);
}
const length = mappedPositionsPerEllipsoid.length;
for (let i = 0; i < length; i++) {
const mappedPositions = mappedPositionsPerEllipsoid[i];
if (mappedPositions.ellipsoid === ellipsoid) {
return mappedPositions;
}
}
throw new DeveloperError(
`Could not find mapped positions for ellipsoid ${ellipsoid}`,
);
}
/**
* Returns the texture coordinate attributes for the primitive that
* are used for draping the imagery.
*
* This will be available when this object is <code>ready</code>, and
* will contain one attribute for each unique projection that appears
* in the imagery layers.
*
* @returns {ModelComponents.Attribute[]} The attributes
*/
imageryTexCoordAttributesPerProjection() {
const imageryTexCoordAttributesPerProjection =
this._imageryTexCoordAttributesPerProjection;
if (!defined(imageryTexCoordAttributesPerProjection)) {
throw new DeveloperError(
`The imagery texture coordinate attributes have not been computed yet`,
);
}
return this._imageryTexCoordAttributesPerProjection;
}
/**
* Returns whether the draping computations are "ready".
*
* This means that the <code>coveragesPerLayer</code> information
* has been computed, which describes the set of imagery tiles
* that are covered by the primitive, <b>and</b> that all the
* covered imagery tiles are in a state that counts as "ready"
* (i.e. <code>ImageryState.READY</code>, <code>FAILED</code>,
* or <code>INVALID</code>).
*
* @returns {boolean} Whether the draping computations are ready
*/
get ready() {
const coveragesPerLayer = this._coveragesPerLayer;
if (!defined(coveragesPerLayer)) {
return false;
}
return this._allImageriesReady;
}
/**
* Returns whether this object was destroyed.
*
* If this object was destroyed, calling any function other than
* <code>isDestroyed</code> will result in a {@link DeveloperError}.
*
* @returns {boolean} Whether this object was destroyed
*/
isDestroyed() {
return false;
}
/**
* Destroys this object and all its resources.
*/
destroy() {
if (this.isDestroyed()) {
return;
}
this._deleteCoveragesPerLayer();
this._destroyImageryTexCoordAttributes();
return destroyObject(this);
}
/**
* Compute the transform that apparently has to be applied to
* the positions attribute of a primitive, to compute the
* actual, final positions in ECEF coordinates.
*
* This is based on the computation of the bounding
* sphere that is done at the end of buildDrawCommands
*
* @param {Model} model The model
* @param {ModelComponents.Node} runtimeNode The runtime node
* that the primitive is attached to
* @param {Matrix4} [result] The result
* @returns {Matrix4} The result
* @private
*/
static _computePrimitivePositionTransform(model, runtimeNode, result) {
//>>includeStart('debug', pragmas.debug);
Check.defined("model", model);
Check.defined("runtimeNode", runtimeNode);
//>>includeEnd('debug');
if (!defined(result)) {
result = new Matrix4();
}
const modelSceneGraph = model.sceneGraph;
Matrix4.clone(Matrix4.IDENTITY, result);
Matrix4.multiply(result, model.modelMatrix, result);
Matrix4.multiply(result, modelSceneGraph.components.transform, result);
Matrix4.multiply(result, modelSceneGraph.axisCorrectionMatrix, result);
Matrix4.multiply(result, runtimeNode.computedTransform, result);
return result;
}
/**
* Returns the <code>"POSITION"</code> attribute from the given primitive.
*
* The <code>"POSITION"</code> attribute is required. If it is not
* defined for the given primitive, then a <code>DeveloperError</code>
* is thrown.
*
* @param {ModelComponents.Primitive} primitive The primitive
* @returns {ModelComponents.Attribute} The position attribute
* @throws {DeveloperError} If there is no position attribute
* @private
*/
static _obtainPrimitivePositionAttribute(primitive) {
//>>includeStart('debug', pragmas.debug);
Check.defined("primitive", primitive);
//>>includeEnd('debug');
const primitivePositionAttribute = ModelUtility.getAttributeBySemantic(
primitive,
"POSITION",
);
if (!defined(primitivePositionAttribute)) {
throw new DeveloperError(
"The primitive does not have a POSITION attribute",
);
}
return primitivePositionAttribute;
}
/**
* Returns the ellipsoid of the given imagery layer.
*
* This only exists to hide a train wreck
*
* @param {ImageryLayer} imageryLayer The imagery layer
* @returns {Ellipsoid} The ellipsoid
* @private
*/
static _getEllipsoid(imageryLayer) {
const ellipsoid =
imageryLayer.imageryProvider.tilingScheme.projection.ellipsoid;
return ellipsoid;
}
}
export default ModelPrimitiveImagery;

View File

@ -0,0 +1,810 @@
import Check from "../../Core/Check.js";
import defined from "../../Core/defined.js";
import Cartesian2 from "../../Core/Cartesian2.js";
import Cartesian3 from "../../Core/Cartesian3.js";
import Cartesian4 from "../../Core/Cartesian4.js";
import DeveloperError from "../../Core/DeveloperError.js";
import ComponentDatatype from "../../Core/ComponentDatatype.js";
import AttributeCompression from "../../Core/AttributeCompression.js";
import IndexDatatype from "../../Core/IndexDatatype.js";
import PrimitiveType from "../../Core/PrimitiveType.js";
import Matrix4 from "../../Core/Matrix4.js";
import AttributeType from "../AttributeType.js";
/**
* A class for reading the data from a <code>ModelComponents.Attribute</code>.
*
* NOTE: Much of the functionality here already exists, scattered in many places.
* In most cases, the functionality is tailored for "one case" (like only handling
* positions, or only normals, or not considering quantization, or not handling
* interleaved buffers, ...). In many cases, the functionality is tailored for an
* 'accessor' (and often, the functions also expect the 'gltf' to be given).
* Most of what is done here (and in the existing functions) is pretty low-level
* and generic, though: The functions could often be fed with some (count, type,
* componentType), and there could be convenience functions that EITHER take these
* values from an 'accessor' OR from an 'attribute'. The tl;dr: Large parts of
* this could be "nicer", or "more generic", and "better" along all dimensions
* of this term. Just give me time...
*
* NOTE: The fact that all this has to operate on TypedArray is unfortunate.
* Most of the subsequent processing could operate on some abstraction of
* that. The fact that that TypedArrays can be read/written as "bulk", and
* then offer access that is "as efficient as it can be" could be a
* justification, as part of the performance-genericity trade-off
*
* NOTE: All this does not properly handle MATn types. There should be SOME
* abstraction for element- and component-wise access of the data. See
* https://github.com/javagl/JglTF/blob/84ce6d019fec3b75b6af1649bbe834005b2c620f/jgltf-model/src/main/java/de/javagl/jgltf/model/AbstractAccessorData.java#L149
*
* @private
*/
class ModelReader {
/**
* Reads the data of the given atttribute into a typed array.
*
* This will read the data into a compact, flat array with the data
* type corresponding to the data type of the attribute.
*
* If the attribute is contained in an interleaved buffer, marked as
* 'normalized', quantized, or oct-encoded, then it will be deinterleaved,
* normalization will be applied, it will be dequantized and oct-decoded
* as necessary.
*
* The result will be THE actual attribute data.
*
* @param {ModelComponents.Attribute} attribute The attribute
* @returns {TypedArray} The attribute data
*/
static readAttributeAsTypedArray(attribute) {
//>>includeStart('debug', pragmas.debug);
Check.defined("attribute", attribute);
//>>includeEnd('debug');
// Obtain a compact (non-interleaved) typed array that contains
// the components.
const compactTypedArray =
ModelReader.readAttributeAsRawCompactTypedArray(attribute);
// If the attribute is not normalized and the data is not quantized
// and not normalized, then this can be returned directly
const normalized = attribute.normalized;
const quantization = attribute.quantization;
if (!defined(quantization) && !normalized) {
return compactTypedArray;
}
const elementType = attribute.type;
const elementCount = attribute.count;
// If the attribute is normalized, normalize the data from
// the typed array
let normalizedTypedArray = compactTypedArray;
if (normalized) {
// Note that although this is called "dequantize", it does
// not really "dequantize" based on the quantization. It only
// performs the conversion from the (normalized) integer
// component types into floating point.
normalizedTypedArray = AttributeCompression.dequantize(
compactTypedArray,
attribute.componentDatatype,
elementType,
elementCount,
);
}
if (!defined(quantization)) {
return normalizedTypedArray;
}
// Now, this one actually DOES dequantize...
const dequantizedTypedArray = ModelReader.dequantize(
normalizedTypedArray,
elementCount,
elementType,
quantization,
);
return dequantizedTypedArray;
}
/**
* Read the data of the given attribute into a compact typed array.
*
* If the attribute is stored as interleaved data, then the result
* will be the deinterleaved data. If the data is quantized or
* normalized, then the resulting data will be the "raw" data,
* without applying normalization or dequantization.
*
* @param {ModelComponents.Attribute} attribute The attribute
* @returns {TypedArray} The raw attribute data
*/
static readAttributeAsRawCompactTypedArray(attribute) {
//>>includeStart('debug', pragmas.debug);
Check.defined("attribute", attribute);
//>>includeEnd('debug');
const elementType = attribute.type;
const elementCount = attribute.count;
const componentsPerElement =
AttributeType.getNumberOfComponents(elementType);
const totalComponentCount = elementCount * componentsPerElement;
// If the data is quantized, use the quantized component type
let componentType = attribute.componentDatatype;
const quantization = attribute.quantization;
if (defined(quantization)) {
componentType = quantization.componentDatatype;
}
const buffer = attribute.buffer;
// If the byte stride is the default (i.e. the total element size),
// then just fetch the whole buffer data into a typed array of the
// desired target type, and return it
const byteOffset = attribute.byteOffset;
const byteStride = attribute.byteStride;
const bytesPerComponent = ComponentDatatype.getSizeInBytes(componentType);
const defaultByteStride = componentsPerElement * bytesPerComponent;
if (!defined(byteStride) || byteStride === defaultByteStride) {
const typedArray = ComponentDatatype.createTypedArray(
componentType,
totalComponentCount,
);
buffer.getBufferData(typedArray, byteOffset);
return typedArray;
}
// Fetch the whole buffer in its raw form, to pick out the
// interleaved values.
// Note: When ALL attributes have to be fetched from an
// interleaved buffer, then this getBufferData call will
// be performed multiple times. It would be preferable to
// have ONE "TypedArray[] getThemFrom(buffer)" call that
// returns all of the (interleaved) attributes at once,
// but this requires abstractions that we don't have.
const fullTypedArray = new Uint8Array(buffer.sizeInBytes);
buffer.getBufferData(fullTypedArray);
// Read the components of each element, and write them into
// a typed array in a compact form
const compactTypedArray = ComponentDatatype.createTypedArray(
componentType,
totalComponentCount,
);
const elementByteStride = byteStride ?? defaultByteStride;
const dataView = new DataView(
fullTypedArray.buffer,
fullTypedArray.byteOffset,
fullTypedArray.byteLength,
);
const components = new Array(componentsPerElement);
const componentsReader = ModelReader.createComponentsReader(componentType);
for (let i = 0; i < elementCount; ++i) {
const elementByteOffset = byteOffset + i * elementByteStride;
componentsReader(
dataView,
elementByteOffset,
componentsPerElement,
components,
);
for (let j = 0; j < componentsPerElement; ++j) {
compactTypedArray[i * componentsPerElement + j] = components[j];
}
}
return compactTypedArray;
}
/**
* Dequantize the data from the given input array, based on the given
* quantization information, and return the result.
*
* This assumes that normalization has already been applied. This means that
* when the <code>quantization.normalized</code> flag is <code>true</code>,
* then the input is assumed to contain floating point values in the range
* [-1, 1].
*
* @param {TypedArray} quantizedTypedArray The quantized typed array
* @param {number} elementCount The number of elements
* @param {AttributeType} elementType The element type
* @param {ModelComponents.Quantization} quantization The quantization
* @returns {TypedArray} The result
* @throws DeveloperError When the element type is not SCALAR, VEC2,
* VEC3, or VEC4
*/
static dequantize(
quantizedTypedArray,
elementCount,
elementType,
quantization,
) {
//>>includeStart('debug', pragmas.debug);
Check.defined("quantizedTypedArray", quantizedTypedArray);
Check.typeOf.number.greaterThanOrEquals("elementCount", elementCount, 0);
Check.defined("elementType", elementType);
Check.defined("quantization", quantization);
//>>includeEnd('debug');
if (quantization.octEncoded) {
const dequantizedTypedArray = ModelReader.octDecode(
quantizedTypedArray,
elementCount,
quantization.normalizationRange,
undefined,
);
if (quantization.octEncodedZXY) {
ModelReader.convertZxyToXyz(
dequantizedTypedArray,
dequantizedTypedArray,
);
}
return dequantizedTypedArray;
}
// These could be generalized, if the offset/stepSize were not
// CartesianX objects, but arrays...
const stepSize = quantization.quantizedVolumeStepSize;
const offset = quantization.quantizedVolumeOffset;
if (elementType === AttributeType.SCALAR) {
return ModelReader.dequantize1D(
quantizedTypedArray,
elementCount,
stepSize,
offset,
undefined,
);
}
if (elementType === AttributeType.VEC2) {
return ModelReader.dequantize2D(
quantizedTypedArray,
elementCount,
stepSize,
offset,
undefined,
);
}
if (elementType === AttributeType.VEC3) {
return ModelReader.dequantize3D(
quantizedTypedArray,
elementCount,
stepSize,
offset,
undefined,
);
}
if (elementType === AttributeType.VEC4) {
return ModelReader.dequantize4D(
quantizedTypedArray,
elementCount,
stepSize,
offset,
undefined,
);
}
throw new DeveloperError(
`Element type for dequantization must be SCALAR, VEC2, VEC3, or VEC4, but is ${elementType}`,
);
}
/**
* Decode oct-encoded normals from the given input, and write the
* result into the given output, allocating and returning a new
* array if the result was undefined.
*
* This will apply the <code>AttributeCompression.octDecodeInRange</code>
* function to each three components of the input.
*
* @param {TypedArray} quantizedTypedArray The input
* @param {number} elementCount The number of elements
* @param {number} normalizationRange The normalization range
* @param {TypedArray} [dequantizedTypedArray] The result
* @returns {TypedArray} The result
*/
static octDecode(
quantizedTypedArray,
elementCount,
normalizationRange,
dequantizedTypedArray,
) {
//>>includeStart('debug', pragmas.debug);
Check.defined("quantizedTypedArray", quantizedTypedArray);
Check.typeOf.number.greaterThanOrEquals("elementCount", elementCount, 0);
Check.typeOf.number.greaterThan(
"normalizationRange",
normalizationRange,
0,
);
//>>includeEnd('debug');
if (!defined(dequantizedTypedArray)) {
dequantizedTypedArray = new Float32Array(quantizedTypedArray.length);
}
const c = new Cartesian3();
for (let i = 0; i < elementCount; i++) {
Cartesian3.unpack(quantizedTypedArray, i * 3, c);
AttributeCompression.octDecodeInRange(c, normalizationRange, c);
Cartesian3.pack(dequantizedTypedArray, c, i * 3);
}
return dequantizedTypedArray;
}
/**
* Swizzle all three consecutive elements in the given input array
* from (z, x, y) to (x, y, z), and write the result into the
* given output array, creating a new array if the given output
* array was undefined.
*
* @param {TypedArray} input The input
* @param {number} elementCount The number of elements
* @param {TypedArray} [output] The result
* @returns {TypedArray} The result
*/
static convertZxyToXyz(input, elementCount, output) {
//>>includeStart('debug', pragmas.debug);
Check.defined("input", input);
Check.typeOf.number.greaterThanOrEquals("elementCount", elementCount, 0);
//>>includeEnd('debug');
if (!defined(output)) {
output = new Float32Array(input.length);
}
let offset = 0;
for (let i = 0; i < elementCount; i++, offset += 3) {
const z = input[offset + 0];
const x = input[offset + 1];
const y = input[offset + 2];
output[offset + 0] = x;
output[offset + 1] = y;
output[offset + 2] = z;
}
return output;
}
/**
* Dequantize the given quantized array, based on the given quantization
* information, and write the result into the given output array, creating
* the output array if it was undefined.
*
* This will simply fill the output array with
* <code>output[i] = input[i] * stepSize + offset</code>
*
* @param {TypedArray} quantizedTypedArray The quantized array
* @param {number} elementCount The number of elements
* @param {number} stepSize The quantization step size
* @param {number} offset The quantization offset
* @param {TypedArray} [dequantizedTypedArray] The result
* @returns {TypedArray} The result
*/
static dequantize1D(
quantizedTypedArray,
elementCount,
stepSize,
offset,
dequantizedTypedArray,
) {
//>>includeStart('debug', pragmas.debug);
Check.defined("quantizedTypedArray", quantizedTypedArray);
Check.typeOf.number.greaterThanOrEquals("elementCount", elementCount, 0);
Check.defined("stepSize", stepSize);
Check.defined("offset", offset);
//>>includeEnd('debug');
if (!defined(dequantizedTypedArray)) {
dequantizedTypedArray = new Float32Array(quantizedTypedArray.length);
}
for (let i = 0; i < elementCount; i++) {
const q = quantizedTypedArray[i];
const d = q * stepSize + offset;
dequantizedTypedArray[i] = d;
}
return dequantizedTypedArray;
}
/**
* Dequantize the given quantized array, based on the given quantization
* information, and write the result into the given output array, creating
* the output array if it was undefined.
*
* This will simply fill the output array with
* <code>output[i] = input[i] * stepSize + offset</code>
* when interpreting the input and output as arrays of Cartesian2.
*
* @param {TypedArray} quantizedTypedArray The quantized array
* @param {number} elementCount The number of elements
* @param {Cartesian2} stepSize The quantization step size
* @param {Cartesian2} offset The quantization offset
* @param {TypedArray} [dequantizedTypedArray] The result
* @returns {TypedArray} The result
*/
static dequantize2D(
quantizedTypedArray,
elementCount,
stepSize,
offset,
dequantizedTypedArray,
) {
//>>includeStart('debug', pragmas.debug);
Check.defined("quantizedTypedArray", quantizedTypedArray);
Check.typeOf.number.greaterThanOrEquals("elementCount", elementCount, 0);
Check.defined("stepSize", stepSize);
Check.defined("offset", offset);
//>>includeEnd('debug');
if (!defined(dequantizedTypedArray)) {
dequantizedTypedArray = new Float32Array(quantizedTypedArray.length);
}
const c = new Cartesian2();
for (let i = 0; i < elementCount; i++) {
Cartesian2.unpack(quantizedTypedArray, i * 2, c);
Cartesian2.multiplyComponents(c, stepSize, c);
Cartesian2.add(c, offset, c);
Cartesian2.pack(c, dequantizedTypedArray, i * 2);
}
return dequantizedTypedArray;
}
/**
* Dequantize the given quantized array, based on the given quantization
* information, and write the result into the given output array, creating
* the output array if it was undefined.
*
* This will simply fill the output array with
* <code>output[i] = input[i] * stepSize + offset</code>
* when interpreting the input and output as arrays of Cartesian3.
*
* @param {TypedArray} quantizedTypedArray The quantized array
* @param {number} elementCount The number of elements
* @param {Cartesian3} stepSize The quantization step size
* @param {Cartesian3} offset The quantization offset
* @param {TypedArray} [dequantizedTypedArray] The result
* @returns {TypedArray} The result
*/
static dequantize3D(
quantizedTypedArray,
elementCount,
stepSize,
offset,
dequantizedTypedArray,
) {
//>>includeStart('debug', pragmas.debug);
Check.defined("quantizedTypedArray", quantizedTypedArray);
Check.typeOf.number.greaterThanOrEquals("elementCount", elementCount, 0);
Check.defined("stepSize", stepSize);
Check.defined("offset", offset);
//>>includeEnd('debug');
if (!defined(dequantizedTypedArray)) {
dequantizedTypedArray = new Float32Array(quantizedTypedArray.length);
}
const c = new Cartesian3();
for (let i = 0; i < elementCount; i++) {
Cartesian3.unpack(quantizedTypedArray, i * 3, c);
Cartesian3.multiplyComponents(c, stepSize, c);
Cartesian3.add(c, offset, c);
Cartesian3.pack(c, dequantizedTypedArray, i * 3);
}
return dequantizedTypedArray;
}
/**
* Dequantize the given quantized array, based on the given quantization
* information, and write the result into the given output array, creating
* the output array if it was undefined.
*
* This will simply fill the output array with
* <code>output[i] = input[i] * stepSize + offset</code>
* when interpreting the input and output as arrays of Cartesian4.
*
* @param {TypedArray} quantizedTypedArray The quantized array
* @param {number} elementCount The number of elements
* @param {Cartesian4} stepSize The quantization step size
* @param {Cartesian4} offset The quantization offset
* @param {TypedArray} [dequantizedTypedArray] The result
* @returns {TypedArray} The result
*/
static dequantize4D(
quantizedTypedArray,
elementCount,
stepSize,
offset,
dequantizedTypedArray,
) {
//>>includeStart('debug', pragmas.debug);
Check.defined("quantizedTypedArray", quantizedTypedArray);
Check.typeOf.number.greaterThanOrEquals("elementCount", elementCount, 0);
Check.defined("stepSize", stepSize);
Check.defined("offset", offset);
//>>includeEnd('debug');
if (!defined(dequantizedTypedArray)) {
dequantizedTypedArray = new Float32Array(quantizedTypedArray.length);
}
const c = new Cartesian4();
for (let i = 0; i < elementCount; i++) {
Cartesian4.unpack(quantizedTypedArray, i * 4, c);
Cartesian4.multiplyComponents(c, stepSize, c);
Cartesian4.add(c, offset, c);
Cartesian4.pack(c, dequantizedTypedArray, i * 4);
}
return dequantizedTypedArray;
}
/**
* Reads and returns a value with the given type
* at the given byte offset from the data view, in little-endian
* order
* @callback ComponentsReaderCallback
* @param {DataView} dataView Typed data view into a binary buffer
* @param {number} byteOffset The offset, in bytes, from the start of the view to read the data from
* @param {number} numberOfComponents The number of components to read
* @param {number[]} result The array in which to read the result
*/
/**
* Creates a function that reads the specified number of components with
* the given type from the given data view, in little-endian
* order, and writes them into a given result array.
*
* @param {ComponentDatatype} componentType The component type
* @returns {ComponentsReaderCallback} The reader
*/
static createComponentsReader(componentType) {
const componentReader = ModelReader.createComponentReader(componentType);
const sizeInBytes = ComponentDatatype.getSizeInBytes(componentType);
return function (dataView, byteOffset, numberOfComponents, result) {
let offset = byteOffset;
for (let i = 0; i < numberOfComponents; ++i) {
result[i] = componentReader(dataView, offset);
offset += sizeInBytes;
}
};
}
/**
* Reads and returns a value with the given type
* at the given byte offset from the data view, in little-endian
* order
* @callback ComponentReaderCallback
* @param {DataView} dataView Typed data view into a binary buffer
* @param {number} byteOffset The offset, in bytes, from the start of the view to read the data from
* @returns {number|BigInt} The value read from the dataView
*/
/**
* Creates a function that reads and returns a value with the given type
* at the given byte offset from the data view, in little-endian
* order
* @param {ComponentDatatype} componentType The component type
* @returns {ComponentReaderCallback} The reader
*/
static createComponentReader(componentType) {
switch (componentType) {
case ComponentDatatype.BYTE:
return function (dataView, byteOffset) {
return dataView.getInt8(byteOffset);
};
case ComponentDatatype.UNSIGNED_BYTE:
return function (dataView, byteOffset) {
return dataView.getUint8(byteOffset);
};
case ComponentDatatype.SHORT:
return function (dataView, byteOffset) {
return dataView.getInt16(byteOffset, true);
};
case ComponentDatatype.UNSIGNED_SHORT:
return function (dataView, byteOffset) {
return dataView.getUint16(byteOffset, true);
};
case ComponentDatatype.INT:
return function (dataView, byteOffset) {
return dataView.getInt32(byteOffset, true);
};
case ComponentDatatype.UNSIGNED_INT:
return function (dataView, byteOffset) {
return dataView.getUint32(byteOffset, true);
};
case ComponentDatatype.FLOAT:
return function (dataView, byteOffset) {
return dataView.getFloat32(byteOffset, true);
};
case ComponentDatatype.DOUBLE:
return function (dataView, byteOffset) {
return dataView.getFloat64(byteOffset, true);
};
}
throw new DeveloperError(
`The componentType must be a valid ComponentDatatype, but is ${componentType}`,
);
}
/**
* Transform the elements of the given array with the given 4x4 matrix,
* interpreting each 3 consecutive elements as a 3D point, and write
* the result into the given result array, creating the result array
* if it was undefined.
*
* @param {TypedArray} input The input array
* @param {Matrix4} matrix The matrix
* @param {TypedArray} [result] The result
* @returns {TypedArray} The result
*/
static transform3D(input, matrix, result) {
//>>includeStart('debug', pragmas.debug);
Check.defined("input", input);
Check.defined("matrix", matrix);
//>>includeEnd('debug');
if (!defined(result)) {
result = new Float32Array(input.length);
}
const c = new Cartesian3();
const elementCount = input.length / 3;
for (let i = 0; i < elementCount; i++) {
Cartesian3.unpack(input, i * 3, c);
Matrix4.multiplyByPoint(matrix, c, c);
Cartesian3.pack(c, result, i * 3);
}
return result;
}
/**
* Read the indices values from the given primitive indices, and
* return them as a typed array.
*
* If the given object already has a <code>typedArray/code> property, then it
* is assumed that this contains the proper indices, and they are returned.
*
* Otherwise, this reads the data from the <code>buffer</code> of the given
* primitive indices object, into a typed array with a type that matches the
* <code>indexDataType</code>, and returns it.
*
* Clients may not modify the returned typed array.
*
* @param {ModelComponents.Indices} primitiveIndices The primitive indices
* @returns {TypedArray} The indices values
* @throws {DeveloperError} If the <code>indexDataType</code> of the given
* object is neither <code>UNSIGNED_BYTE</code>, nor <code>UNSIGNED_SHORT</code>,
* nor <code>UNSIGNED_INT</code>
*/
static readIndicesAsTypedArray(primitiveIndices) {
const existingIndices = primitiveIndices.typedArray;
if (defined(existingIndices)) {
return existingIndices;
}
const indicesBuffer = primitiveIndices.buffer;
const indicesCount = primitiveIndices.count;
const indexDatatype = primitiveIndices.indexDatatype;
const indices = ModelReader.createIndexTypedArray(
indexDatatype,
indicesCount,
);
indicesBuffer.getBufferData(indices);
return indices;
}
/**
* Read the indices values from the given primitive indices object, and return
* them as a typed array of triangle vertex indices.
*
* If the given primitive type is <code>TRIANGLES</code>, then the indices
* values will be read from the given object, and returned.
*
* If the primitive type is <code>TRIANGLE_STRIP</code> or <code>TRIANGLE_FAN</code>,
* then the original indices values, will be read, converted into triangle indices
* (i.e. their equivalent <code>TRIANGLES</code> representation), and the result
* will be returned.
*
* The type of the returned array will match the <code>indexDataType</code>
* of the given object.
*
* Clients may not modify the returned typed array.
*
* @param {ModelComponents.Indices} primitiveIndices The primitive indices
* @returns {TypedArray} The indices, converted to triangle indices if necessary
* @throws {DeveloperError} If the <code>indexDataType</code> of the given
* object is neither <code>UNSIGNED_BYTE</code>, nor <code>UNSIGNED_SHORT</code>,
* nor <code>UNSIGNED_INT</code>, or the given <code>primitiveType</code>
* is neither <code>TRIANGLES</code>, nor <code>TRIANGLE_STRIP</code>,
* nor <code>TRIANGLE_FAN</code>
*/
static readIndicesAsTriangleIndicesTypedArray(
primitiveIndices,
primitiveType,
) {
const originalIndices =
ModelReader.readIndicesAsTypedArray(primitiveIndices);
if (primitiveType === PrimitiveType.TRIANGLES) {
return originalIndices;
}
if (primitiveType === PrimitiveType.TRIANGLE_STRIP) {
const triangleIndices =
ModelReader.convertTriangleStripToTriangleIndices(originalIndices);
return triangleIndices;
}
if (primitiveType === PrimitiveType.TRIANGLE_FAN) {
const triangleIndices =
ModelReader.convertTriangleFanToTriangleIndices(originalIndices);
return triangleIndices;
}
throw new DeveloperError(
`The primitiveType must be TRIANGLES (${PrimitiveType.TRIANGLES}, ` +
`TRIANGLE_STRIP (${PrimitiveType.TRIANGLE_STRIP}, or ` +
`TRIANGLE_FAN (${PrimitiveType.TRIANGLE_FAN}, but is ${primitiveType}`,
);
}
/**
* Converts the given indices from a <code>TRIANGLE_STRIP</code> representation
* into a <code>TRIANGLES</code> representation, and returns the result.
*
* The type of the result will be the same as the type of the input array.
*
* @param {TypedArray} indices The input indices
* @returns {TypedArray} The resulting triangle indices
*/
static convertTriangleStripToTriangleIndices(indices) {
const triangleIndices = indices.constructor((indices.length - 2) * 3);
for (let i = 0; i < indices.length - 2; i++) {
if (i % 2 === 1) {
triangleIndices[i * 3 + 0] = indices[i + 0];
triangleIndices[i * 3 + 1] = indices[i + 2];
triangleIndices[i * 3 + 2] = indices[i + 1];
} else {
triangleIndices[i * 3 + 0] = indices[i + 0];
triangleIndices[i * 3 + 1] = indices[i + 1];
triangleIndices[i * 3 + 2] = indices[i + 2];
}
}
return triangleIndices;
}
/**
* Converts the given indices from a <code>TRIANGLE_FAN</code> representation
* into a <code>TRIANGLES</code> representation, and returns the result.
*
* The type of the result will be the same as the type of the input array.
*
* @param {TypedArray} indices The input indices
* @returns {TypedArray} The resulting triangle indices
*/
static convertTriangleFanToTriangleIndices(indices) {
const triangleIndices = indices.constructor((indices.length - 2) * 3);
for (let i = 0; i < indices.length - 2; i++) {
triangleIndices[i * 3 + 0] = indices[i + 0];
triangleIndices[i * 3 + 1] = indices[i + 1];
triangleIndices[i * 3 + 2] = indices[i + 2];
}
return triangleIndices;
}
/**
* Create a typed array with a type that matches the given index data type,
* and the given size.
*
* @param {number} indexDatatype The <code>IndexDataType</code>
* @param {number} size The size of the array that will be created
* @returns {TypedArray} The typed array
* @throws {DeveloperError} If the <code>indexDataType</code> is neither
* <code>UNSIGNED_BYTE</code>, nor <code>UNSIGNED_SHORT</code>,
* nor <code>UNSIGNED_INT</code>, or the size is negative.
*/
static createIndexTypedArray(indexDatatype, size) {
//>>includeStart('debug', pragmas.debug);
Check.typeOf.number.greaterThanOrEquals("size", size, 0);
//>>includeEnd('debug');
switch (indexDatatype) {
case IndexDatatype.UNSIGNED_BYTE:
return new Uint8Array(size);
case IndexDatatype.UNSIGNED_SHORT:
return new Uint16Array(size);
case IndexDatatype.UNSIGNED_INT:
return new Uint32Array(size);
}
throw new DeveloperError(
`The indexDatatype must be UNSIGNED_BYTE (${IndexDatatype.UNSIGNED_BYTE}, ` +
`UNSIGNED_SHORT (${IndexDatatype.UNSIGNED_SHORT}, or ` +
`UNSIGNED_INT (${IndexDatatype.UNSIGNED_INT}, but is ${indexDatatype}`,
);
}
}
export default ModelReader;

View File

@ -101,6 +101,19 @@ function ModelRenderResources(model) {
* @private
*/
this.hasSkipLevelOfDetail = false;
/**
* The NodeRenderResources for the nodes of the scene graph.
*
* Note that elements of this array may be <code>undefined</code> when
* there was no runtime node for a certain node.
*
* @type {NodeRenderResources[]}
* @default []
*
* @private
*/
this.nodeRenderResources = [];
}
export default ModelRenderResources;

View File

@ -12,6 +12,7 @@ import CustomShaderPipelineStage from "./CustomShaderPipelineStage.js";
import DequantizationPipelineStage from "./DequantizationPipelineStage.js";
import FeatureIdPipelineStage from "./FeatureIdPipelineStage.js";
import GeometryPipelineStage from "./GeometryPipelineStage.js";
import ImageryPipelineStage from "./ImageryPipelineStage.js";
import LightingPipelineStage from "./LightingPipelineStage.js";
import MaterialPipelineStage from "./MaterialPipelineStage.js";
import MetadataPickingPipelineStage from "./MetadataPickingPipelineStage.js";
@ -27,6 +28,7 @@ import SelectedFeatureIdPipelineStage from "./SelectedFeatureIdPipelineStage.js"
import SkinningPipelineStage from "./SkinningPipelineStage.js";
import VerticalExaggerationPipelineStage from "./VerticalExaggerationPipelineStage.js";
import WireframePipelineStage from "./WireframePipelineStage.js";
import oneTimeWarning from "../../Core/oneTimeWarning.js";
/**
* In memory representation of a single primitive, that is, a primitive
@ -206,6 +208,12 @@ ModelRuntimePrimitive.prototype.configurePipeline = function (frameState) {
const hasMorphTargets =
defined(primitive.morphTargets) && primitive.morphTargets.length > 0;
const hasSkinning = defined(node.skin);
// Check whether the model is part of a `Model3DTileContent` that
// belongs to a tileset that has imagery layers. If this is the
// case, then the `ImageryPipelineStage` will be required.
const hasImageryLayers = defined(model.imageryLayers);
const hasCustomShader = defined(customShader);
const hasCustomFragmentShader =
hasCustomShader && defined(customShader.fragmentShaderText);
@ -270,6 +278,17 @@ ModelRuntimePrimitive.prototype.configurePipeline = function (frameState) {
pipelineStages.push(DequantizationPipelineStage);
}
if (hasImageryLayers) {
if (hasOutlines) {
oneTimeWarning(
"outlines-and-draping",
"Primitive outlines disable imagery draping",
);
} else {
pipelineStages.push(ImageryPipelineStage);
}
}
if (materialsEnabled) {
pipelineStages.push(MaterialPipelineStage);
}

View File

@ -445,6 +445,7 @@ const scratchModelPositionMin = new Cartesian3();
const scratchModelPositionMax = new Cartesian3();
const scratchPrimitivePositionMin = new Cartesian3();
const scratchPrimitivePositionMax = new Cartesian3();
/**
* Generates the {@link ModelDrawCommand} for each primitive in the model.
* If the model is used for classification, a {@link ClassificationModelDrawCommand}
@ -456,6 +457,30 @@ const scratchPrimitivePositionMax = new Cartesian3();
* @private
*/
ModelSceneGraph.prototype.buildDrawCommands = function (frameState) {
const modelRenderResources = this.buildRenderResources(frameState);
this.computeBoundingVolumes(modelRenderResources);
this.createDrawCommands(modelRenderResources, frameState);
};
/**
* Generates the {@link ModelRenderResources} for the model.
*
* This will traverse the model, nodes and primitives of the scene graph,
* and perform the following tasks:
*
* - configure the pipeline stages by calling `configurePipeline`,
* `runtimeNode.configurePipeline`, and `runtimePrimitive.configurePipeline`
* - create the `ModelRenderResources`, `NodeRenderResources`, and
* `PrimitiveRenderResources`
* - Process the render resources with the respective pipelines
*
* @param {FrameState} frameState The current frame state. This is needed to
* allocate GPU resources as needed.
* @returns {ModelRenderResources} The model render resources
*
* @private
*/
ModelSceneGraph.prototype.buildRenderResources = function (frameState) {
const model = this._model;
const modelRenderResources = new ModelRenderResources(model);
@ -465,26 +490,12 @@ ModelSceneGraph.prototype.buildDrawCommands = function (frameState) {
this.configurePipeline(frameState);
const modelPipelineStages = this.modelPipelineStages;
let i, j, k;
for (i = 0; i < modelPipelineStages.length; i++) {
for (let i = 0; i < modelPipelineStages.length; i++) {
const modelPipelineStage = modelPipelineStages[i];
modelPipelineStage.process(modelRenderResources, model, frameState);
}
const modelPositionMin = Cartesian3.fromElements(
Number.MAX_VALUE,
Number.MAX_VALUE,
Number.MAX_VALUE,
scratchModelPositionMin,
);
const modelPositionMax = Cartesian3.fromElements(
-Number.MAX_VALUE,
-Number.MAX_VALUE,
-Number.MAX_VALUE,
scratchModelPositionMax,
);
for (i = 0; i < this._runtimeNodes.length; i++) {
for (let i = 0; i < this._runtimeNodes.length; i++) {
const runtimeNode = this._runtimeNodes[i];
// If a node in the model was unreachable from the scene graph, there will
@ -500,8 +511,9 @@ ModelSceneGraph.prototype.buildDrawCommands = function (frameState) {
modelRenderResources,
runtimeNode,
);
modelRenderResources.nodeRenderResources[i] = nodeRenderResources;
for (j = 0; j < nodePipelineStages.length; j++) {
for (let j = 0; j < nodePipelineStages.length; j++) {
const nodePipelineStage = nodePipelineStages[j];
nodePipelineStage.process(
@ -511,8 +523,7 @@ ModelSceneGraph.prototype.buildDrawCommands = function (frameState) {
);
}
const nodeTransform = runtimeNode.computedTransform;
for (j = 0; j < runtimeNode.runtimePrimitives.length; j++) {
for (let j = 0; j < runtimeNode.runtimePrimitives.length; j++) {
const runtimePrimitive = runtimeNode.runtimePrimitives[j];
runtimePrimitive.configurePipeline(frameState);
@ -522,16 +533,76 @@ ModelSceneGraph.prototype.buildDrawCommands = function (frameState) {
nodeRenderResources,
runtimePrimitive,
);
nodeRenderResources.primitiveRenderResources[j] =
primitiveRenderResources;
for (k = 0; k < primitivePipelineStages.length; k++) {
for (let k = 0; k < primitivePipelineStages.length; k++) {
const primitivePipelineStage = primitivePipelineStages[k];
primitivePipelineStage.process(
primitiveRenderResources,
runtimePrimitive.primitive,
frameState,
);
}
}
}
return modelRenderResources;
};
/**
* Computes the bounding volumes for the scene graph and the model.
*
* This will traverse the model, nodes and primitives of the scene graph,
* and compute the bounding volumes. Specifically, it will compute
*
* - this._boundingSphere
* - model._boundingSphere
*
* With the latter being modified as of
*
* - model._initialRadius = model._boundingSphere.radius;
* - model._boundingSphere.radius *= model._clampedScale;
*
* NOTE: This contains some bugs. See https://github.com/CesiumGS/cesium/issues/12108
*
* @param {ModelRenderResources} modelRenderResources The model render resources
*
* @private
*/
ModelSceneGraph.prototype.computeBoundingVolumes = function (
modelRenderResources,
) {
const model = this._model;
const modelPositionMin = Cartesian3.fromElements(
Number.MAX_VALUE,
Number.MAX_VALUE,
Number.MAX_VALUE,
scratchModelPositionMin,
);
const modelPositionMax = Cartesian3.fromElements(
-Number.MAX_VALUE,
-Number.MAX_VALUE,
-Number.MAX_VALUE,
scratchModelPositionMax,
);
for (let i = 0; i < this._runtimeNodes.length; i++) {
const runtimeNode = this._runtimeNodes[i];
// If a node in the model was unreachable from the scene graph, there will
// be no corresponding runtime node and therefore should be skipped.
if (!defined(runtimeNode)) {
continue;
}
const nodeRenderResources = modelRenderResources.nodeRenderResources[i];
const nodeTransform = runtimeNode.computedTransform;
for (let j = 0; j < runtimeNode.runtimePrimitives.length; j++) {
const runtimePrimitive = runtimeNode.runtimePrimitives[j];
const primitiveRenderResources =
nodeRenderResources.primitiveRenderResources[j];
runtimePrimitive.boundingSphere = BoundingSphere.clone(
primitiveRenderResources.boundingSphere,
@ -559,12 +630,6 @@ ModelSceneGraph.prototype.buildDrawCommands = function (frameState) {
primitivePositionMax,
modelPositionMax,
);
const drawCommand = ModelDrawCommands.buildModelDrawCommand(
primitiveRenderResources,
frameState,
);
runtimePrimitive.drawCommand = drawCommand;
}
}
@ -596,6 +661,47 @@ ModelSceneGraph.prototype.buildDrawCommands = function (frameState) {
model._boundingSphere.radius *= model._clampedScale;
};
/**
* Creates the draw commands for the primitives in the scene graph.
*
* This will traverse the model, nodes and primitives of the scene graph,
* and create the respective draw commands for the primitives, storing
* them as the `runtimePrimitive.drawCommand`, respectively.
*
* @param {ModelRenderResources} modelRenderResources The model render resources
*
* @private
*/
ModelSceneGraph.prototype.createDrawCommands = function (
modelRenderResources,
frameState,
) {
for (let i = 0; i < this._runtimeNodes.length; i++) {
const runtimeNode = this._runtimeNodes[i];
// If a node in the model was unreachable from the scene graph, there will
// be no corresponding runtime node and therefore should be skipped.
if (!defined(runtimeNode)) {
continue;
}
const nodeRenderResources = modelRenderResources.nodeRenderResources[i];
for (let j = 0; j < runtimeNode.runtimePrimitives.length; j++) {
const runtimePrimitive = runtimeNode.runtimePrimitives[j];
const primitiveRenderResources =
nodeRenderResources.primitiveRenderResources[j];
const drawCommand = ModelDrawCommands.buildModelDrawCommand(
primitiveRenderResources,
frameState,
);
runtimePrimitive.drawCommand = drawCommand;
}
}
};
/**
* Configure the model pipeline stages. If the pipeline needs to be re-run, call
* this method again to ensure the correct sequence of pipeline stages are

View File

@ -155,6 +155,16 @@ function NodeRenderResources(modelRenderResources, runtimeNode) {
* @private
*/
this.instanceCount = 0;
/**
* The PrimitiveRenderResources for the primitives of the node.
*
* @type {PrimitiveRenderResources[]}
* @default []
*
* @private
*/
this.primitiveRenderResources = [];
}
export default NodeRenderResources;

View File

@ -620,6 +620,16 @@ function Primitive() {
* @private
*/
this.outlineCoordinates = undefined;
/**
* If the model is part of a Model3DTileContent of a Cesium3DTileset that
* has 'imageryLayers', then this will represent the information that is
* required for draping the imagery over this primitive.
*
* @type {ModelPrimitiveImagery|undefined}
* @private
*/
this.modelPrimitiveImagery = undefined;
}
/**

View File

@ -45,6 +45,7 @@ import VoxelMetadataOrder from "./VoxelMetadataOrder.js";
* @see VoxelProvider
* @see Cesium3DTilesVoxelProvider
* @see VoxelShapeType
* @see {@link https://github.com/CesiumGS/cesium/tree/main/Documentation/CustomShaderGuide|Custom Shader Guide}
*
* @experimental This feature is not final and is subject to change without Cesium's standard deprecation policy.
*/
@ -338,6 +339,12 @@ function VoxelPrimitive(options) {
*/
this._transformPositionWorldToUv = new Matrix4();
/**
* @type {Matrix3}
* @private
*/
this._transformDirectionWorldToUv = new Matrix3();
/**
* @type {Matrix4}
* @private
@ -350,12 +357,6 @@ function VoxelPrimitive(options) {
*/
this._transformDirectionWorldToLocal = new Matrix3();
/**
* @type {Matrix3}
* @private
*/
this._transformNormalLocalToWorld = new Matrix3();
// Rendering
/**
* @type {boolean}
@ -442,8 +443,8 @@ function VoxelPrimitive(options) {
transformPositionViewToUv: new Matrix4(),
transformPositionUvToView: new Matrix4(),
transformDirectionViewToLocal: new Matrix3(),
transformNormalLocalToWorld: new Matrix3(),
cameraPositionUv: new Cartesian3(),
cameraDirectionUv: new Cartesian3(),
ndcSpaceAxisAlignedBoundingBox: new Cartesian4(),
clippingPlanesTexture: undefined,
clippingPlanesMatrix: new Matrix4(),
@ -1075,6 +1076,7 @@ Object.defineProperties(VoxelPrimitive.prototype, {
*
* @memberof VoxelPrimitive.prototype
* @type {CustomShader}
* @see {@link https://github.com/CesiumGS/cesium/tree/main/Documentation/CustomShaderGuide|Custom Shader Guide}
*/
customShader: {
get: function () {
@ -1135,10 +1137,6 @@ Object.defineProperties(VoxelPrimitive.prototype, {
const scratchIntersect = new Cartesian4();
const scratchNdcAabb = new Cartesian4();
const scratchScale = new Cartesian3();
const scratchLocalScale = new Cartesian3();
const scratchRotation = new Matrix3();
const scratchRotationAndLocalScale = new Matrix3();
const scratchTransformPositionWorldToLocal = new Matrix4();
const scratchTransformPositionLocalToWorld = new Matrix4();
const scratchTransformPositionLocalToProjection = new Matrix4();
@ -1306,16 +1304,20 @@ VoxelPrimitive.prototype.update = function (frameState) {
transformDirectionViewToWorld,
uniforms.transformDirectionViewToLocal,
);
uniforms.transformNormalLocalToWorld = Matrix3.clone(
this._transformNormalLocalToWorld,
uniforms.transformNormalLocalToWorld,
);
const cameraPositionWorld = frameState.camera.positionWC;
uniforms.cameraPositionUv = Matrix4.multiplyByPoint(
this._transformPositionWorldToUv,
cameraPositionWorld,
frameState.camera.positionWC,
uniforms.cameraPositionUv,
);
uniforms.cameraDirectionUv = Matrix3.multiplyByVector(
this._transformDirectionWorldToUv,
frameState.camera.directionWC,
uniforms.cameraDirectionUv,
);
uniforms.cameraDirectionUv = Cartesian3.normalize(
uniforms.cameraDirectionUv,
uniforms.cameraDirectionUv,
);
uniforms.stepSize = this._stepSizeMultiplier;
// Render the primitive
@ -1606,23 +1608,6 @@ function updateShapeAndTransforms(primitive, shape, provider) {
transformPositionLocalToWorld,
scratchTransformPositionWorldToLocal,
);
const rotation = Matrix4.getRotation(
transformPositionLocalToWorld,
scratchRotation,
);
// Note that inverse(rotation) is the same as transpose(rotation)
const scale = Matrix4.getScale(transformPositionLocalToWorld, scratchScale);
const maximumScaleComponent = Cartesian3.maximumComponent(scale);
const localScale = Cartesian3.divideByScalar(
scale,
maximumScaleComponent,
scratchLocalScale,
);
const rotationAndLocalScale = Matrix3.multiplyByScale(
rotation,
localScale,
scratchRotationAndLocalScale,
);
// Set member variables when the shape is dirty
primitive._transformPositionWorldToUv = Matrix4.multiplyTransformation(
@ -1630,6 +1615,10 @@ function updateShapeAndTransforms(primitive, shape, provider) {
transformPositionWorldToLocal,
primitive._transformPositionWorldToUv,
);
primitive._transformDirectionWorldToUv = Matrix4.getMatrix3(
primitive._transformPositionWorldToUv,
primitive._transformDirectionWorldToUv,
);
primitive._transformPositionUvToWorld = Matrix4.multiplyTransformation(
transformPositionLocalToWorld,
transformPositionUvToLocal,
@ -1639,10 +1628,6 @@ function updateShapeAndTransforms(primitive, shape, provider) {
transformPositionWorldToLocal,
primitive._transformDirectionWorldToLocal,
);
primitive._transformNormalLocalToWorld = Matrix3.inverseTranspose(
rotationAndLocalScale,
primitive._transformNormalLocalToWorld,
);
return true;
}

View File

@ -140,13 +140,13 @@ function VoxelRenderResources(primitive) {
}
shaderBuilder.addFragmentLines([IntersectClippingPlanes]);
}
shaderBuilder.addFragmentLines([IntersectDepth]);
if (primitive._depthTest) {
shaderBuilder.addDefine(
"DEPTH_TEST",
undefined,
ShaderDestination.FRAGMENT,
);
shaderBuilder.addFragmentLines([IntersectDepth]);
}
if (shapeType === "BOX") {

View File

@ -38,7 +38,7 @@ function processVoxelProperties(renderResources, primitive) {
// PropertyStatistics structs
for (let i = 0; i < attributeLength; i++) {
const name = names[i];
const type = types[i];
const glslType = getGlslType(types[i]);
const propertyStatisticsStructId = `PropertyStatistics_${name}`;
const propertyStatisticsStructName = `PropertyStatistics_${name}`;
shaderBuilder.addStruct(
@ -46,18 +46,17 @@ function processVoxelProperties(renderResources, primitive) {
propertyStatisticsStructName,
ShaderDestination.FRAGMENT,
);
const glslType = getGlslType(type);
shaderBuilder.addStructField(propertyStatisticsStructId, glslType, "min");
shaderBuilder.addStructField(propertyStatisticsStructId, glslType, "max");
}
// Statistics struct
const statisticsStructId = "Statistics";
const statisticsStructName = "Statistics";
const statisticsFieldName = "statistics";
// MetadataStatistics struct
const metadataStatisticsStructId = "MetadataStatistics";
const metadataStatisticsStructName = "MetadataStatistics";
const metadataStatisticsFieldName = "metadataStatistics";
shaderBuilder.addStruct(
statisticsStructId,
statisticsStructName,
metadataStatisticsStructId,
metadataStatisticsStructName,
ShaderDestination.FRAGMENT,
);
for (let i = 0; i < attributeLength; i++) {
@ -65,7 +64,7 @@ function processVoxelProperties(renderResources, primitive) {
const propertyStructName = `PropertyStatistics_${name}`;
const propertyFieldName = name;
shaderBuilder.addStructField(
statisticsStructId,
metadataStatisticsStructId,
propertyStructName,
propertyFieldName,
);
@ -80,51 +79,22 @@ function processVoxelProperties(renderResources, primitive) {
metadataStructName,
ShaderDestination.FRAGMENT,
);
shaderBuilder.addStructField(
metadataStructId,
statisticsStructName,
statisticsFieldName,
);
for (let i = 0; i < attributeLength; i++) {
const name = names[i];
const type = types[i];
const glslType = getGlslType(type);
shaderBuilder.addStructField(metadataStructId, glslType, name);
const glslType = getGlslType(types[i]);
shaderBuilder.addStructField(metadataStructId, glslType, names[i]);
}
// VoxelProperty structs
for (let i = 0; i < attributeLength; i++) {
const name = names[i];
const type = types[i];
const glslType = getGlslPartialDerivativeType(type);
const voxelPropertyStructId = `VoxelProperty_${name}`;
const voxelPropertyStructName = `VoxelProperty_${name}`;
shaderBuilder.addStruct(
voxelPropertyStructId,
voxelPropertyStructName,
ShaderDestination.FRAGMENT,
);
shaderBuilder.addStructField(
voxelPropertyStructId,
glslType,
"partialDerivativeLocal",
);
shaderBuilder.addStructField(
voxelPropertyStructId,
glslType,
"partialDerivativeWorld",
);
shaderBuilder.addStructField(
voxelPropertyStructId,
glslType,
"partialDerivativeView",
);
shaderBuilder.addStructField(
voxelPropertyStructId,
glslType,
"partialDerivativeValid",
);
}
// Attributes struct
const attributesStructId = "Attributes";
const attributesStructName = "Attributes";
const attributesFieldName = "attributes";
shaderBuilder.addStruct(
attributesStructId,
attributesStructName,
ShaderDestination.FRAGMENT,
);
shaderBuilder.addStructField(attributesStructId, "vec3", "positionEC");
shaderBuilder.addStructField(attributesStructId, "vec3", "normalEC");
// Voxel struct
const voxelStructId = "Voxel";
@ -135,22 +105,12 @@ function processVoxelProperties(renderResources, primitive) {
voxelStructName,
ShaderDestination.FRAGMENT,
);
for (let i = 0; i < attributeLength; i++) {
const name = names[i];
const voxelPropertyStructName = `VoxelProperty_${name}`;
shaderBuilder.addStructField(voxelStructId, voxelPropertyStructName, name);
}
shaderBuilder.addStructField(voxelStructId, "vec3", "positionEC");
shaderBuilder.addStructField(voxelStructId, "vec3", "positionUv");
shaderBuilder.addStructField(voxelStructId, "vec3", "positionShapeUv");
shaderBuilder.addStructField(voxelStructId, "vec3", "positionUvLocal");
shaderBuilder.addStructField(voxelStructId, "vec3", "viewDirUv");
shaderBuilder.addStructField(voxelStructId, "vec3", "viewDirWorld");
shaderBuilder.addStructField(voxelStructId, "vec3", "surfaceNormal");
shaderBuilder.addStructField(voxelStructId, "float", "travelDistance");
shaderBuilder.addStructField(voxelStructId, "int", "stepCount");
shaderBuilder.addStructField(voxelStructId, "int", "tileIndex");
shaderBuilder.addStructField(voxelStructId, "int", "sampleIndex");
shaderBuilder.addStructField(voxelStructId, "float", "distanceToDepthBuffer");
// FragmentInput struct
const fragmentInputStructId = "FragmentInput";
@ -160,11 +120,21 @@ function processVoxelProperties(renderResources, primitive) {
fragmentInputStructName,
ShaderDestination.FRAGMENT,
);
shaderBuilder.addStructField(
fragmentInputStructId,
metadataStatisticsStructName,
metadataStatisticsFieldName,
);
shaderBuilder.addStructField(
fragmentInputStructId,
metadataStructName,
metadataFieldName,
);
shaderBuilder.addStructField(
fragmentInputStructId,
attributesStructName,
attributesFieldName,
);
shaderBuilder.addStructField(
fragmentInputStructId,
voxelStructName,
@ -181,10 +151,8 @@ function processVoxelProperties(renderResources, primitive) {
ShaderDestination.FRAGMENT,
);
for (let i = 0; i < attributeLength; i++) {
const name = names[i];
const type = types[i];
const glslType = getGlslType(type);
shaderBuilder.addStructField(propertiesStructId, glslType, name);
const glslType = getGlslType(types[i]);
shaderBuilder.addStructField(propertiesStructId, glslType, names[i]);
}
// Fragment shader functions
@ -201,12 +169,9 @@ function processVoxelProperties(renderResources, primitive) {
`${propertiesStructName} ${propertiesFieldName};`,
]);
for (let i = 0; i < attributeLength; i++) {
const name = names[i];
const type = types[i];
const componentType = componentTypes[i];
const glslType = getGlslType(type, componentType);
const glslType = getGlslType(types[i], componentTypes[i]);
shaderBuilder.addFunctionLines(functionId, [
`${propertiesFieldName}.${name} = ${glslType}(0.0);`,
`${propertiesFieldName}.${names[i]} = ${glslType}(0.0);`,
]);
}
shaderBuilder.addFunctionLines(functionId, [
@ -248,9 +213,8 @@ function processVoxelProperties(renderResources, primitive) {
`${propertiesStructName} scaledProperties = ${propertiesFieldName};`,
]);
for (let i = 0; i < attributeLength; i++) {
const name = names[i];
shaderBuilder.addFunctionLines(functionId, [
`scaledProperties.${name} *= scale;`,
`scaledProperties.${names[i]} *= scale;`,
]);
}
shaderBuilder.addFunctionLines(functionId, [`return scaledProperties;`]);
@ -299,7 +263,7 @@ function processVoxelProperties(renderResources, primitive) {
const functionId = "setStatistics";
shaderBuilder.addFunction(
functionId,
`void setStatistics(inout ${statisticsStructName} ${statisticsFieldName})`,
`void setStatistics(inout ${metadataStatisticsStructName} ${metadataStatisticsFieldName})`,
ShaderDestination.FRAGMENT,
);
for (let i = 0; i < attributeLength; i++) {
@ -314,10 +278,10 @@ function processVoxelProperties(renderResources, primitive) {
continue;
}
shaderBuilder.addFunctionLines(functionId, [
`${statisticsFieldName}.${name}.min${glslField} = ${getGlslNumberAsFloat(
`${metadataStatisticsFieldName}.${name}.min${glslField} = ${getGlslNumberAsFloat(
minimumValue,
)};`,
`${statisticsFieldName}.${name}.max${glslField} = ${getGlslNumberAsFloat(
`${metadataStatisticsFieldName}.${name}.max${glslField} = ${getGlslNumberAsFloat(
maximumValue,
)};`,
]);
@ -337,12 +301,11 @@ function processVoxelProperties(renderResources, primitive) {
`${propertiesStructName} ${propertiesFieldName};`,
]);
for (let i = 0; i < attributeLength; i++) {
const name = names[i];
const type = types[i];
const componentType = componentTypes[i];
const glslTextureSwizzle = getGlslTextureSwizzle(type, componentType);
shaderBuilder.addFunctionLines(functionId, [
`properties.${name} = texture(u_megatextureTextures[${i}], texcoord)${glslTextureSwizzle};`,
`properties.${names[i]} = texture(u_megatextureTextures[${i}], texcoord)${glslTextureSwizzle};`,
]);
}
shaderBuilder.addFunctionLines(functionId, [
@ -395,28 +358,6 @@ function getGlslTextureSwizzle(type) {
}
}
/**
* Gets the GLSL type of the partial derivative of {@link MetadataType}.
*
* @function
*
* @param {MetadataType} type The {@link MetadataType}.
* @returns {string} The GLSL type.
*
* @private
*/
function getGlslPartialDerivativeType(type) {
if (type === MetadataType.SCALAR) {
return "vec3";
} else if (type === MetadataType.VEC2) {
return "mat2";
} else if (type === MetadataType.VEC3) {
return "mat3";
} else if (type === MetadataType.VEC4) {
return "mat4";
}
}
/**
* GLSL needs to have `.0` at the end of whole number floats or else it's
* treated like an integer.

View File

@ -76,7 +76,6 @@ vec4 czm_screenToEyeCoordinates(vec2 screenCoordinateXY, float depthOrLogDepth)
vec4 screenCoord = vec4(screenCoordinateXY, far * (1.0 - near / depthFromCamera) / (far - near), 1.0);
vec4 eyeCoordinate = czm_screenToEyeCoordinates(screenCoord);
eyeCoordinate.w = 1.0 / depthFromCamera; // Better precision
return eyeCoordinate;
#else
vec4 screenCoord = vec4(screenCoordinateXY, depthOrLogDepth, 1.0);
vec4 eyeCoordinate = czm_screenToEyeCoordinates(screenCoord);

View File

@ -87,25 +87,47 @@ flat in int v_regionIndex;
uniform float u_minimumBrightness;
#endif
// Based on colorCorrect
// The colorCorrect flag can only be true when tileProvider.hue/saturation/brightnessShift
// are nonzero AND when (applyFog || showGroundAtmosphere) in the tile provider
// - The tileProvider.hue/saturation/brightnessShift are just passed through
// from the Globe hue/saturation/brightness, like atmosphereBrightnessShift
// - The applyFog depends on enableFog, and some tile distance from the viewer
// - The showGroundAtmosphere is a flag that is passed through from the Globe,
// and is true by default when the ellipsoid is WGS84
#ifdef COLOR_CORRECT
uniform vec3 u_hsbShift; // Hue, saturation, brightness
#endif
// Based on highlightFillTile
// This is set for terrain tiles when they are "fill" tiles, and
// the terrainProvider.fillHighlightColor was set to a value with
// nonzero alpha
#ifdef HIGHLIGHT_FILL_TILE
uniform vec4 u_fillHighlightColor;
#endif
// Based on translucent
// This is set depending on the GlobeTranslucencyState
#ifdef TRANSLUCENT
uniform vec4 u_frontFaceAlphaByDistance;
uniform vec4 u_backFaceAlphaByDistance;
uniform vec4 u_translucencyRectangle;
#endif
// Based on showUndergroundColor
// This is set when GlobeSurfaceTileProvider.isUndergroundVisible
// returns true, AND the tileProvider.undergroundColor had a value with
// nonzero alpha, and the tileProvider.undergroundColorAlphaByDistance
// was in the right range
#ifdef UNDERGROUND_COLOR
uniform vec4 u_undergroundColor;
uniform vec4 u_undergroundColorAlphaByDistance;
#endif
// Based on enableLighting && hasVertexNormals
// The enableLighting flag is passed in directly from the Globe.
// The hasVertexNormals flag is from the tileProvider
#ifdef ENABLE_VERTEX_LIGHTING
uniform float u_lambertDiffuseMultiplier;
uniform float u_vertexShadowDarkness;

View File

@ -444,6 +444,10 @@ void materialStage(inout czm_modelMaterial material, ProcessedAttributes attribu
baseColorWithAlpha = u_baseColorFactor;
#endif
#ifdef HAS_IMAGERY
baseColorWithAlpha = blendBaseColorWithImagery(baseColorWithAlpha);
#endif // HAS_IMAGERY
#ifdef HAS_POINT_CLOUD_COLOR_STYLE
baseColorWithAlpha = v_pointCloudColor;
#elif defined(HAS_COLOR_0)

View File

@ -14,6 +14,10 @@ void main()
ProcessedAttributes attributes;
initializeAttributes(attributes);
#ifdef HAS_IMAGERY
initializeImageryAttributes();
#endif
// Dequantize the quantized ones and add them to the
// attributes struct.
#ifdef USE_DEQUANTIZATION

View File

@ -9,15 +9,22 @@ uniform mat4 u_transformPositionViewToUv;
void intersectDepth(in vec2 screenCoord, in Ray ray, inout Intersections ix) {
float logDepthOrDepth = czm_unpackDepth(texture(czm_globeDepthTexture, screenCoord));
float entry;
float exit;
if (logDepthOrDepth != 0.0) {
// Calculate how far the ray must travel before it hits the depth buffer.
vec4 eyeCoordinateDepth = czm_screenToEyeCoordinates(screenCoord, logDepthOrDepth);
eyeCoordinateDepth /= eyeCoordinateDepth.w;
vec3 depthPositionUv = vec3(u_transformPositionViewToUv * eyeCoordinateDepth);
float t = dot(depthPositionUv - ray.pos, ray.dir);
setIntersectionPair(ix, DEPTH_INTERSECTION_INDEX, vec2(t, +INF_HIT));
entry = dot(depthPositionUv - ray.pos, ray.dir);
exit = +INF_HIT;
} else {
// There's no depth at this location.
setIntersectionPair(ix, DEPTH_INTERSECTION_INDEX, vec2(NO_HIT));
entry = NO_HIT;
exit = NO_HIT;
}
ix.distanceToDepthBuffer = entry;
#if defined(DEPTH_TEST)
setIntersectionPair(ix, DEPTH_INTERSECTION_INDEX, vec2(entry, exit));
#endif
}

View File

@ -28,9 +28,7 @@ RayShapeIntersection intersectScene(in vec2 screenCoord, in Ray ray, out Interse
#endif
// Depth
#if defined(DEPTH_TEST)
intersectDepth(screenCoord, ray, ix);
#endif
intersectDepth(screenCoord, ray, ix);
// Find the first intersection that's in front of the ray
#if (INTERSECTION_COUNT > 1)

View File

@ -56,6 +56,7 @@ struct Intersections {
// INTERSECTION_COUNT is the number of ray-*shape* (volume) intersections,
// so we need twice as many to track ray-*surface* intersections
vec4 intersections[INTERSECTION_COUNT * 2];
float distanceToDepthBuffer;
#if (INTERSECTION_COUNT > 1)
// Maintain state for future nextIntersection calls

View File

@ -15,8 +15,10 @@
#define ALPHA_ACCUM_MAX 0.98 // Must be > 0.0 and <= 1.0
#endif
uniform mat4 u_transformPositionUvToView;
uniform mat3 u_transformDirectionViewToLocal;
uniform vec3 u_cameraPositionUv;
uniform vec3 u_cameraDirectionUv;
uniform float u_stepSize;
#if defined(PICKING)
@ -111,26 +113,38 @@ int getSampleIndex(in SampleData sampleData) {
return sampleIndex.x + u_inputDimensions.x * (sampleIndex.y + u_inputDimensions.y * sampleIndex.z);
}
void main()
{
vec4 fragCoord = gl_FragCoord;
vec2 screenCoord = (fragCoord.xy - czm_viewport.xy) / czm_viewport.zw; // [0,1]
vec3 eyeDirection = normalize(czm_windowToEyeCoordinates(fragCoord).xyz);
vec3 viewDirWorld = normalize(czm_inverseViewRotation * eyeDirection); // normalize again just in case
vec3 viewDirUv = normalize(u_transformDirectionViewToLocal * eyeDirection); // normalize again just in case
vec3 viewPosUv = u_cameraPositionUv;
/**
* Compute the view ray at the current fragment, in the local UV coordinates of the shape.
*/
Ray getViewRayUv() {
vec4 eyeCoordinates = czm_windowToEyeCoordinates(gl_FragCoord);
vec3 viewDirUv;
vec3 viewPosUv;
if (czm_orthographicIn3D == 1.0) {
eyeCoordinates.z = 0.0;
viewPosUv = (u_transformPositionViewToUv * eyeCoordinates).xyz;
viewDirUv = normalize(u_cameraDirectionUv);
} else {
viewPosUv = u_cameraPositionUv;
viewDirUv = normalize(u_transformDirectionViewToLocal * eyeCoordinates.xyz);
}
#if defined(SHAPE_ELLIPSOID)
// viewDirUv has been scaled to a space where the ellipsoid is a sphere.
// Undo this scaling to get the raw direction.
vec3 rawDir = viewDirUv * u_ellipsoidRadiiUv;
Ray viewRayUv = Ray(viewPosUv, viewDirUv, rawDir);
return Ray(viewPosUv, viewDirUv, rawDir);
#else
Ray viewRayUv = Ray(viewPosUv, viewDirUv, viewDirUv);
return Ray(viewPosUv, viewDirUv, viewDirUv);
#endif
}
void main()
{
Ray viewRayUv = getViewRayUv();
Intersections ix;
vec2 screenCoord = (gl_FragCoord.xy - czm_viewport.xy) / czm_viewport.zw; // [0,1]
RayShapeIntersection shapeIntersection = intersectScene(screenCoord, viewRayUv, ix);
// Exit early if the scene was completely missed.
if (shapeIntersection.entry.w == NO_HIT) {
discard;
@ -138,7 +152,7 @@ void main()
float currentT = shapeIntersection.entry.w;
float endT = shapeIntersection.exit.w;
vec3 positionUv = viewPosUv + currentT * viewDirUv;
vec3 positionUv = viewRayUv.pos + currentT * viewRayUv.dir;
PointJacobianT pointJacobian = convertUvToShapeUvSpaceDerivative(positionUv);
// Traverse the tree from the start position
@ -150,14 +164,15 @@ void main()
#if defined(JITTER)
float noise = hash(screenCoord); // [0,1]
currentT += noise * step.w;
positionUv += noise * step.w * viewDirUv;
positionUv += noise * step.w * viewRayUv.dir;
#endif
FragmentInput fragmentInput;
#if defined(STATISTICS)
setStatistics(fragmentInput.metadata.statistics);
setStatistics(fragmentInput.metadataStatistics);
#endif
czm_modelMaterial materialOutput;
vec4 colorAccum = vec4(0.0);
for (int stepCount = 0; stepCount < STEP_COUNT_MAX; ++stepCount) {
@ -166,19 +181,19 @@ void main()
// Prepare the custom shader inputs
copyPropertiesToMetadata(properties, fragmentInput.metadata);
fragmentInput.voxel.positionUv = positionUv;
fragmentInput.voxel.positionShapeUv = pointJacobian.point;
fragmentInput.voxel.positionUvLocal = sampleDatas[0].tileUv;
fragmentInput.voxel.viewDirUv = viewDirUv;
fragmentInput.voxel.viewDirWorld = viewDirWorld;
fragmentInput.voxel.surfaceNormal = step.xyz;
fragmentInput.attributes.positionEC = vec3(u_transformPositionUvToView * vec4(positionUv, 1.0));
fragmentInput.attributes.normalEC = normalize(czm_normal * step.xyz);
fragmentInput.voxel.viewDirUv = viewRayUv.dir;
fragmentInput.voxel.travelDistance = step.w;
fragmentInput.voxel.stepCount = stepCount;
fragmentInput.voxel.tileIndex = sampleDatas[0].megatextureIndex;
fragmentInput.voxel.sampleIndex = getSampleIndex(sampleDatas[0]);
fragmentInput.voxel.distanceToDepthBuffer = ix.distanceToDepthBuffer - currentT;
// Run the custom shader
czm_modelMaterial materialOutput;
fragmentMain(fragmentInput, materialOutput);
// Sanitize the custom shader output
@ -203,8 +218,6 @@ void main()
// Keep raymarching
currentT += step.w;
positionUv = viewPosUv + currentT * viewDirUv;
// Check if there's more intersections.
if (currentT > endT) {
#if (INTERSECTION_COUNT == 1)
@ -217,10 +230,10 @@ void main()
// Found another intersection. Resume raymarching there
currentT = shapeIntersection.entry.w;
endT = shapeIntersection.exit.w;
positionUv = viewPosUv + currentT * viewDirUv;
}
#endif
}
positionUv = viewRayUv.pos + currentT * viewRayUv.dir;
// Traverse the tree from the current ray position.
// This is similar to traverseOctreeFromBeginning but is faster when the ray is in the same tile as the previous step.

View File

@ -1,6 +1,18 @@
import { PixelDatatype, PixelFormat } from "../../index.js";
import { PixelDatatype, PixelFormat, WebGLConstants } from "../../index.js";
import createContext from "../../../../Specs/createContext.js";
describe("Core/PixelFormat", function () {
let context;
beforeAll(function () {
context = createContext();
});
afterAll(function () {
context.destroyForSpecs();
});
it("flipY works", function () {
const width = 1;
const height = 2;
@ -34,4 +46,103 @@ describe("Core/PixelFormat", function () {
);
expect(flipped).toBe(dataBuffer);
});
it("returns the correct internal formats for PixelDatatype.FLOAT", function () {
if (!context.webgl2) {
return;
}
const internalFormatR32F = PixelFormat.toInternalFormat(
PixelFormat.RED,
PixelDatatype.FLOAT,
context,
);
expect(internalFormatR32F).toBe(WebGLConstants.R32F);
const internalFormatRG32F = PixelFormat.toInternalFormat(
PixelFormat.RG,
PixelDatatype.FLOAT,
context,
);
expect(internalFormatRG32F).toBe(WebGLConstants.RG32F);
const internalFormatRGB32F = PixelFormat.toInternalFormat(
PixelFormat.RGB,
PixelDatatype.FLOAT,
context,
);
expect(internalFormatRGB32F).toBe(WebGLConstants.RGB32F);
const internalFormatRGBA32F = PixelFormat.toInternalFormat(
PixelFormat.RGBA,
PixelDatatype.FLOAT,
context,
);
expect(internalFormatRGBA32F).toBe(WebGLConstants.RGBA32F);
});
it("returns the correct internal formats for PixelDatatype.HALF_FLOAT", function () {
if (!context.webgl2) {
return;
}
const internalFormatR16F = PixelFormat.toInternalFormat(
PixelFormat.RED,
PixelDatatype.HALF_FLOAT,
context,
);
expect(internalFormatR16F).toBe(WebGLConstants.R16F);
const internalFormatRG16F = PixelFormat.toInternalFormat(
PixelFormat.RG,
PixelDatatype.HALF_FLOAT,
context,
);
expect(internalFormatRG16F).toBe(WebGLConstants.RG16F);
const internalFormatRGB16F = PixelFormat.toInternalFormat(
PixelFormat.RGB,
PixelDatatype.HALF_FLOAT,
context,
);
expect(internalFormatRGB16F).toBe(WebGLConstants.RGB16F);
const internalFormatRGBA16F = PixelFormat.toInternalFormat(
PixelFormat.RGBA,
PixelDatatype.HALF_FLOAT,
context,
);
expect(internalFormatRGBA16F).toBe(WebGLConstants.RGBA16F);
});
it("returns the correct internal formats for PixelDatatype.UNSIGNED_BYTE", function () {
if (!context.webgl2) {
return;
}
const internalFormatR8 = PixelFormat.toInternalFormat(
PixelFormat.RED,
PixelDatatype.UNSIGNED_BYTE,
context,
);
expect(internalFormatR8).toBe(WebGLConstants.R8);
const internalFormatRG8 = PixelFormat.toInternalFormat(
PixelFormat.RG,
PixelDatatype.UNSIGNED_BYTE,
context,
);
expect(internalFormatRG8).toBe(WebGLConstants.RG8);
const internalFormatRGB8 = PixelFormat.toInternalFormat(
PixelFormat.RGB,
PixelDatatype.UNSIGNED_BYTE,
context,
);
expect(internalFormatRGB8).toBe(WebGLConstants.RGB8);
const internalFormatRGBA8 = PixelFormat.toInternalFormat(
PixelFormat.RGBA,
PixelDatatype.UNSIGNED_BYTE,
context,
);
expect(internalFormatRGBA8).toBe(WebGLConstants.RGBA8);
});
});

View File

@ -0,0 +1,228 @@
import {
Color,
PixelFormat,
ClearCommand,
PixelDatatype,
Sampler,
Texture3D,
TextureMagnificationFilter,
TextureMinificationFilter,
} from "../../index.js";
import createContext from "../../../../Specs/createContext.js";
describe("Renderer/Texture3D", function () {
{
let context;
let source;
const size = 2;
const data = new Uint8Array(size * size * size * 4);
data.fill(255);
const sampler = new Sampler({
minificationFilter: TextureMinificationFilter.LINEAR,
magnificationFilter: TextureMagnificationFilter.LINEAR,
});
const fs = `
precision highp sampler3D;
uniform sampler3D u_texture;
void main() { out_FragColor = texture(u_texture, vec3(0.0)); }
`;
let texture;
const uniformMap = {
u_texture: function () {
return texture;
},
};
beforeAll(function () {
context = createContext();
});
afterAll(function () {
if (context) {
context.destroyForSpecs();
}
});
beforeEach(function () {
source = {
arrayBufferView: data,
width: size,
height: size,
depth: size,
};
});
afterEach(function () {
texture = texture && texture.destroy();
});
it("has expected default values for pixel format and datatype", function () {
if (!context.webgl2) {
return;
}
texture = new Texture3D({
context: context,
source: source,
sampler: sampler,
});
expect(texture.id).toBeDefined();
expect(texture.pixelFormat).toEqual(PixelFormat.RGBA);
expect(texture.pixelDatatype).toEqual(PixelDatatype.UNSIGNED_BYTE);
});
it("can create a texture from the arrayBuffer", function () {
if (!context.webgl2) {
return;
}
const command = new ClearCommand({
color: Color.RED,
});
command.execute(context);
texture = new Texture3D({
context: context,
source: source,
sampler: sampler,
});
expect(texture.width).toEqual(size);
expect(texture.height).toEqual(size);
expect(texture.depth).toEqual(size);
expect(texture.sizeInBytes).toEqual(
size * size * size * PixelFormat.componentsLength(texture.pixelFormat),
);
command.color = Color.WHITE;
command.execute(context);
expect(context).toReadPixels([255, 255, 255, 255]);
expect({
context: context,
fragmentShader: fs,
uniformMap: uniformMap,
}).contextToRender([255, 255, 255, 255]);
});
function expectTextureByteSize(
width,
height,
depth,
pixelFormat,
pixelDatatype,
expectedSize,
) {
texture = new Texture3D({
context: context,
width: width,
height: height,
depth: depth,
pixelFormat: pixelFormat,
pixelDatatype: pixelDatatype,
});
expect(texture.sizeInBytes).toBe(expectedSize);
texture = texture && texture.destroy();
}
it("can get the size in bytes of a texture", function () {
if (!context.webgl2) {
return;
}
// Depth textures
if (context.depthTexture) {
expectTextureByteSize(
16,
16,
16,
PixelFormat.DEPTH_COMPONENT,
PixelDatatype.UNSIGNED_SHORT,
16 * 16 * 16 * 2,
);
expectTextureByteSize(
16,
16,
16,
PixelFormat.DEPTH_COMPONENT,
PixelDatatype.UNSIGNED_INT,
16 * 16 * 16 * 4,
);
expectTextureByteSize(
16,
16,
16,
PixelFormat.DEPTH_STENCIL,
PixelDatatype.UNSIGNED_INT_24_8,
16 * 16 * 16 * 4,
);
}
// Uncompressed formats
expectTextureByteSize(
16,
16,
16,
PixelFormat.ALPHA,
PixelDatatype.UNSIGNED_BYTE,
16 * 16 * 16,
);
expectTextureByteSize(
16,
16,
16,
PixelFormat.RGB,
PixelDatatype.UNSIGNED_BYTE,
16 * 16 * 16 * 3,
);
expectTextureByteSize(
16,
16,
16,
PixelFormat.RGBA,
PixelDatatype.UNSIGNED_BYTE,
16 * 16 * 16 * 4,
);
expectTextureByteSize(
16,
16,
16,
PixelFormat.LUMINANCE,
PixelDatatype.UNSIGNED_BYTE,
16 * 16 * 16,
);
expectTextureByteSize(
16,
16,
16,
PixelFormat.LUMINANCE_ALPHA,
PixelDatatype.UNSIGNED_BYTE,
16 * 16 * 16 * 2,
);
});
it("can be destroyed", function () {
if (!context.webgl2) {
return;
}
const t = new Texture3D({
context: context,
source: source,
pixelFormat: PixelFormat.RGBA,
});
expect(t.isDestroyed()).toEqual(false);
t.destroy();
expect(t.isDestroyed()).toEqual(true);
});
it("throws when creating a texture without a options", function () {
if (!context.webgl2) {
return;
}
expect(function () {
texture = new Texture3D();
}).toThrowDeveloperError();
});
}
});

View File

@ -0,0 +1,86 @@
import {
Rectangle,
MappedPositions,
Ellipsoid,
Cartographic,
} from "../../../index.js";
describe("Scene/Model/MappedPositions", function () {
it("constructor throws without cartographicPositions", function () {
const cartographicPositions = undefined;
const numPositions = 4;
const cartographicBoundingRectangle = new Rectangle(0.0, 0.0, 0.1, 0.1);
const ellipsoid = Ellipsoid.WGS84;
expect(function () {
// eslint-disable-next-line no-new
new MappedPositions(
cartographicPositions,
numPositions,
cartographicBoundingRectangle,
ellipsoid,
);
}).toThrowDeveloperError();
});
it("constructor throws with invalid numPositions", function () {
const cartographicPositions = [
new Cartographic(0.0, 0.0),
new Cartographic(0.1, 0.0),
new Cartographic(0.0, 0.1),
new Cartographic(0.1, 0.1),
];
const numPositions = -1;
const cartographicBoundingRectangle = new Rectangle(0.0, 0.0, 0.1, 0.1);
const ellipsoid = Ellipsoid.WGS84;
expect(function () {
// eslint-disable-next-line no-new
new MappedPositions(
cartographicPositions,
numPositions,
cartographicBoundingRectangle,
ellipsoid,
);
}).toThrowDeveloperError();
});
it("constructor throws without cartographicBoundingRectangle", function () {
const cartographicPositions = [
new Cartographic(0.0, 0.0),
new Cartographic(0.1, 0.0),
new Cartographic(0.0, 0.1),
new Cartographic(0.1, 0.1),
];
const numPositions = 4;
const cartographicBoundingRectangle = undefined;
const ellipsoid = Ellipsoid.WGS84;
expect(function () {
// eslint-disable-next-line no-new
new MappedPositions(
cartographicPositions,
numPositions,
cartographicBoundingRectangle,
ellipsoid,
);
}).toThrowDeveloperError();
});
it("constructor throws without ellipsoid", function () {
const cartographicPositions = [
new Cartographic(0.0, 0.0),
new Cartographic(0.1, 0.0),
new Cartographic(0.0, 0.1),
new Cartographic(0.1, 0.1),
];
const numPositions = 4;
const cartographicBoundingRectangle = new Rectangle(0.0, 0.0, 0.1, 0.1);
const ellipsoid = undefined;
expect(function () {
// eslint-disable-next-line no-new
new MappedPositions(
cartographicPositions,
numPositions,
cartographicBoundingRectangle,
ellipsoid,
);
}).toThrowDeveloperError();
});
});

View File

@ -0,0 +1,667 @@
import {
Rectangle,
MappedPositions,
Ellipsoid,
Matrix4,
Cartographic,
ModelImageryMapping,
Cartesian3,
BoundingRectangle,
Math as CesiumMath,
Cartesian2,
WebMercatorProjection,
AttributeType,
VertexAttributeSemantic,
} from "../../../index.js";
describe("Scene/Model/ModelImageryMapping", function () {
it("createTextureCoordinatesForMappedPositions throws without mappedPositions", function () {
const mappedPositions = undefined;
const projection = new WebMercatorProjection();
expect(function () {
ModelImageryMapping.createTextureCoordinatesForMappedPositions(
mappedPositions,
projection,
);
}).toThrowDeveloperError();
});
it("createTextureCoordinatesForMappedPositions throws without projection", function () {
const cartographicPositions = [
new Cartographic(0.0, 0.0),
new Cartographic(0.1, 0.0),
new Cartographic(0.0, 0.1),
new Cartographic(0.1, 0.1),
];
const numPositions = 4;
const cartographicBoundingRectangle = new Rectangle(0.0, 0.0, 0.1, 0.1);
const ellipsoid = Ellipsoid.WGS84;
const mappedPositions = new MappedPositions(
cartographicPositions,
numPositions,
cartographicBoundingRectangle,
ellipsoid,
);
const projection = undefined;
expect(function () {
ModelImageryMapping.createTextureCoordinatesForMappedPositions(
mappedPositions,
projection,
);
}).toThrowDeveloperError();
});
it("createTextureCoordinatesAttributeForMappedPositions throws without mappedPositions", function () {
const mappedPositions = undefined;
const projection = new WebMercatorProjection();
expect(function () {
ModelImageryMapping.createTextureCoordinatesAttributeForMappedPositions(
mappedPositions,
projection,
);
}).toThrowDeveloperError();
});
it("createTextureCoordinatesAttributeForMappedPositions throws without projection", function () {
const cartographicPositions = [
new Cartographic(0.0, 0.0),
new Cartographic(0.1, 0.0),
new Cartographic(0.0, 0.1),
new Cartographic(0.1, 0.1),
];
const numPositions = 4;
const cartographicBoundingRectangle = new Rectangle(0.0, 0.0, 0.1, 0.1);
const ellipsoid = Ellipsoid.WGS84;
const mappedPositions = new MappedPositions(
cartographicPositions,
numPositions,
cartographicBoundingRectangle,
ellipsoid,
);
const projection = undefined;
expect(function () {
ModelImageryMapping.createTextureCoordinatesAttributeForMappedPositions(
mappedPositions,
projection,
);
}).toThrowDeveloperError();
});
it("createTextureCoordinatesAttributeForMappedPositions throws without projection", function () {
const cartographicPositions = [
new Cartographic(0.0, 0.0),
new Cartographic(0.1, 0.0),
new Cartographic(0.0, 0.1),
new Cartographic(0.1, 0.1),
];
const numPositions = 4;
const cartographicBoundingRectangle = new Rectangle(0.0, 0.0, 0.1, 0.1);
const ellipsoid = Ellipsoid.WGS84;
const mappedPositions = new MappedPositions(
cartographicPositions,
numPositions,
cartographicBoundingRectangle,
ellipsoid,
);
const projection = new WebMercatorProjection();
const attribute =
ModelImageryMapping.createTextureCoordinatesAttributeForMappedPositions(
mappedPositions,
projection,
);
expect(attribute.semantic).toBe(VertexAttributeSemantic.TEXCOORD);
expect(attribute.type).toBe(AttributeType.VEC2);
expect(attribute.count).toBe(4);
});
it("_createTextureCoordinates throws without cartographicPositions", function () {
const cartographicPositions = undefined;
const numPositions = 4;
const cartographicBoundingRectangle = new Rectangle(0.0, 0.0, 0.1, 0.1);
const projection = new WebMercatorProjection();
expect(function () {
ModelImageryMapping._createTextureCoordinates(
cartographicPositions,
numPositions,
cartographicBoundingRectangle,
projection,
);
}).toThrowDeveloperError();
});
it("_createTextureCoordinates throws with invalid numPositions", function () {
const cartographicPositions = [
new Cartographic(0.0, 0.0),
new Cartographic(0.1, 0.0),
new Cartographic(0.0, 0.1),
new Cartographic(0.1, 0.1),
];
const numPositions = -1;
const cartographicBoundingRectangle = new Rectangle(0.0, 0.0, 0.1, 0.1);
const projection = new WebMercatorProjection();
expect(function () {
ModelImageryMapping._createTextureCoordinates(
cartographicPositions,
numPositions,
cartographicBoundingRectangle,
projection,
);
}).toThrowDeveloperError();
});
it("_createTextureCoordinates throws without cartographicBoundingRectangle", function () {
const cartographicPositions = [
new Cartographic(0.0, 0.0),
new Cartographic(0.1, 0.0),
new Cartographic(0.0, 0.1),
new Cartographic(0.1, 0.1),
];
const numPositions = 4;
const cartographicBoundingRectangle = undefined;
const projection = new WebMercatorProjection();
expect(function () {
ModelImageryMapping._createTextureCoordinates(
cartographicPositions,
numPositions,
cartographicBoundingRectangle,
projection,
);
}).toThrowDeveloperError();
});
it("_createTextureCoordinates throws without projection", function () {
const cartographicPositions = [
new Cartographic(0.0, 0.0),
new Cartographic(0.1, 0.0),
new Cartographic(0.0, 0.1),
new Cartographic(0.1, 0.1),
];
const numPositions = 4;
const cartographicBoundingRectangle = new Rectangle(0.0, 0.0, 0.1, 0.1);
const projection = undefined;
expect(function () {
ModelImageryMapping._createTextureCoordinates(
cartographicPositions,
numPositions,
cartographicBoundingRectangle,
projection,
);
}).toThrowDeveloperError();
});
it("_createTextureCoordinates creates texture coordinates", function () {
const cartographicPositions = [
new Cartographic(0.0, 0.0),
new Cartographic(0.1, 0.0),
new Cartographic(0.0, 0.1),
new Cartographic(0.1, 0.1),
];
const numPositions = 4;
const cartographicBoundingRectangle = new Rectangle(0.0, 0.0, 0.1, 0.1);
const projection = new WebMercatorProjection();
const actualTextureCoordinates =
ModelImageryMapping._createTextureCoordinates(
cartographicPositions,
numPositions,
cartographicBoundingRectangle,
projection,
);
const expectedTextureCoordinates = new Float32Array([
0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0,
]);
expect(actualTextureCoordinates).toEqual(expectedTextureCoordinates);
});
it("createTexCoordAttribute throws without texCoordsTypedArray", function () {
const texCoordsTypedArray = undefined;
expect(function () {
ModelImageryMapping.createTexCoordAttribute(texCoordsTypedArray);
}).toThrowDeveloperError();
});
it("createTexCoordAttribute creates a texture coordinate attribute", function () {
const texCoordsTypedArray = new Float32Array([
0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0,
]);
const texCoordAttribute =
ModelImageryMapping.createTexCoordAttribute(texCoordsTypedArray);
expect(texCoordAttribute.semantic).toBe(VertexAttributeSemantic.TEXCOORD);
expect(texCoordAttribute.type).toBe(AttributeType.VEC2);
expect(texCoordAttribute.count).toBe(4);
expect(texCoordAttribute.typedArray).toBeDefined();
});
it("createIterableCartesian3FromTypedArray throws with undefined typedArray", function () {
const typedArray = undefined;
const stride = 3;
expect(function () {
ModelImageryMapping.createIterableCartesian3FromTypedArray(
typedArray,
stride,
);
}).toThrowDeveloperError();
});
it("createIterableCartesian3FromTypedArray throws with invalid stride", function () {
const typedArray = new Float32Array([
0.0, 0.1, 0.2, 1.0, 1.1, 1.2, 2.0, 2.1, 2.2,
]);
const stride = 2;
expect(function () {
ModelImageryMapping.createIterableCartesian3FromTypedArray(
typedArray,
stride,
);
}).toThrowDeveloperError();
});
it("createIterableCartesian3FromTypedArray creates proper cartesians with stride 3", function () {
const typedArray = new Float32Array([
0.0, 0.1, 0.2, 1.0, 1.1, 1.2, 2.0, 2.1, 2.2,
]);
const stride = 3;
const iterable = ModelImageryMapping.createIterableCartesian3FromTypedArray(
typedArray,
stride,
);
const actualCartesians = [
...ModelImageryMapping.map(iterable, (c) => Cartesian3.clone(c)),
];
const expectedCartesians = [
new Cartesian3(0.0, 0.1, 0.2),
new Cartesian3(1.0, 1.1, 1.2),
new Cartesian3(2.0, 2.1, 2.2),
];
for (let i = 0; i < actualCartesians.length; i++) {
expect(
Cartesian3.equalsEpsilon(
actualCartesians[i],
expectedCartesians[i],
CesiumMath.EPSILON6,
),
).toBeTrue();
}
});
it("createIterableCartesian3FromTypedArray creates proper cartesians with stride 4", function () {
const typedArray = new Float32Array([
0.0, 0.1, 0.2, 9.9, 1.0, 1.1, 1.2, 9.9, 2.0, 2.1, 2.2, 9.9,
]);
const stride = 4;
const iterable = ModelImageryMapping.createIterableCartesian3FromTypedArray(
typedArray,
stride,
);
const actualCartesians = [
...ModelImageryMapping.map(iterable, (c) => Cartesian3.clone(c)),
];
const expectedCartesians = [
new Cartesian3(0.0, 0.1, 0.2),
new Cartesian3(1.0, 1.1, 1.2),
new Cartesian3(2.0, 2.1, 2.2),
];
for (let i = 0; i < actualCartesians.length; i++) {
expect(
Cartesian3.equalsEpsilon(
actualCartesians[i],
expectedCartesians[i],
CesiumMath.EPSILON6,
),
).toBeTrue();
}
});
it("map throws with undefined iterable", function () {
const iterable = undefined;
const mapper = (c) => Cartesian3.length(c);
expect(function () {
ModelImageryMapping.map(iterable, mapper);
}).toThrowDeveloperError();
});
it("map throws with undefined mapper", function () {
const iterable = [
new Cartesian3(1.0, 0.0, 0.0),
new Cartesian3(0.0, 2.0, 0.0),
new Cartesian3(0.0, 0.0, 3.0),
];
const mapper = undefined;
expect(function () {
ModelImageryMapping.map(iterable, mapper);
}).toThrowDeveloperError();
});
it("map maps", function () {
const iterable = [
new Cartesian3(1.0, 0.0, 0.0),
new Cartesian3(0.0, 2.0, 0.0),
new Cartesian3(0.0, 0.0, 3.0),
];
const mapper = (c) => Cartesian3.magnitude(c);
const resultIterable = ModelImageryMapping.map(iterable, mapper);
const actualValues = [...resultIterable];
const expectedValues = [1.0, 2.0, 3.0];
expect(actualValues).toEqualEpsilon(expectedValues, CesiumMath.EPSILON6);
});
it("computeCartographicBoundingRectangle throws with undefined cartographicPositions", function () {
const cartographicPositions = undefined;
const result = new Rectangle();
expect(function () {
ModelImageryMapping.computeCartographicBoundingRectangle(
cartographicPositions,
result,
);
}).toThrowDeveloperError();
});
it("computeCartographicBoundingRectangle computes the bounding rectangle", function () {
const cartographicPositions = [
new Cartographic(0.0, 0.0),
new Cartographic(0.1, 0.0),
new Cartographic(0.0, 0.1),
new Cartographic(0.1, 0.1),
];
const result = new Rectangle();
const expectedRectangle = new Rectangle(0.0, 0.0, 0.1, 0.1);
const actualRectangle =
ModelImageryMapping.computeCartographicBoundingRectangle(
cartographicPositions,
result,
);
expect(
Rectangle.equalsEpsilon(
actualRectangle,
expectedRectangle,
CesiumMath.EPSILON6,
),
).toBeTrue();
});
it("computeCartographicBoundingRectangle computes the bounding rectangle with undefined result", function () {
const cartographicPositions = [
new Cartographic(0.0, 0.0),
new Cartographic(0.1, 0.0),
new Cartographic(0.0, 0.1),
new Cartographic(0.1, 0.1),
];
const result = undefined;
const expectedRectangle = new Rectangle(0.0, 0.0, 0.1, 0.1);
const actualRectangle =
ModelImageryMapping.computeCartographicBoundingRectangle(
cartographicPositions,
result,
);
expect(
Rectangle.equalsEpsilon(
actualRectangle,
expectedRectangle,
CesiumMath.EPSILON6,
),
).toBeTrue();
});
it("transformCartesians3 throws with undefined positions", function () {
const positions = undefined;
const matrix = Matrix4.IDENTITY;
expect(function () {
ModelImageryMapping.transformCartesians3(positions, matrix);
}).toThrowDeveloperError();
});
it("transformCartesians3 throws with undefined matrix", function () {
const positions = [
new Cartesian3(1.0, 0.0, 0.0),
new Cartesian3(0.0, 2.0, 0.0),
new Cartesian3(0.0, 0.0, 3.0),
];
const matrix = undefined;
expect(function () {
ModelImageryMapping.transformCartesians3(positions, matrix);
}).toThrowDeveloperError();
});
it("transformCartesians3 transforms the cartesians", function () {
const positions = [
new Cartesian3(1.0, 0.0, 0.0),
new Cartesian3(0.0, 2.0, 0.0),
new Cartesian3(0.0, 0.0, 3.0),
];
const matrix = Matrix4.fromTranslation(new Cartesian3(1.0, 2.0, 3.0));
const resultIterable = ModelImageryMapping.transformCartesians3(
positions,
matrix,
);
const actualCartesians = [
...ModelImageryMapping.map(resultIterable, (c) => Cartesian3.clone(c)),
];
const expectedCartesians = [
new Cartesian3(2.0, 2.0, 3.0),
new Cartesian3(1.0, 4.0, 3.0),
new Cartesian3(1.0, 2.0, 6.0),
];
for (let i = 0; i < actualCartesians.length; i++) {
expect(
Cartesian3.equalsEpsilon(
actualCartesians[i],
expectedCartesians[i],
CesiumMath.EPSILON6,
),
).toBeTrue();
}
});
it("transformToCartographic throws with undefined positions", function () {
const positions = undefined;
const ellipsoid = Ellipsoid.WGS84;
expect(function () {
ModelImageryMapping.transformToCartographic(positions, ellipsoid);
}).toThrowDeveloperError();
});
it("transformToCartographic throws with undefined ellipsoid", function () {
const positions = [
new Cartesian3(1.0, 0.0, 0.0),
new Cartesian3(0.0, 2.0, 0.0),
new Cartesian3(0.0, 0.0, 3.0),
];
const ellipsoid = undefined;
expect(function () {
ModelImageryMapping.transformToCartographic(positions, ellipsoid);
}).toThrowDeveloperError();
});
it("transformToCartographic transforms to cartographic", function () {
const positions = [
new Cartesian3(1.0, 0.0, 0.0),
new Cartesian3(0.0, 2.0, 0.0),
new Cartesian3(0.0, 0.0, 3.0),
];
const ellipsoid = Ellipsoid.WGS84;
const resultIterable = ModelImageryMapping.transformToCartographic(
positions,
ellipsoid,
);
const actualCartographics = [
...ModelImageryMapping.map(resultIterable, (c) => Cartographic.clone(c)),
];
// Let's hope these values never appear in reality.
const expectedCartographics = [
new Cartographic(0.0, 0.0, -6378137.0 + 1.0),
new Cartographic(CesiumMath.PI / 2.0, 0.0, -6378137.0 + 2.0),
new Cartographic(0.0, CesiumMath.PI / 2.0, -6356752.314245179 + 3.0),
];
for (let i = 0; i < actualCartographics.length; i++) {
expect(
Cartographic.equalsEpsilon(
actualCartographics[i],
expectedCartographics[i],
CesiumMath.EPSILON6,
),
).toBeTrue();
}
});
it("createProjectedPositions throws with undefined cartographicPositions", function () {
const cartographicPositions = undefined;
const projection = new WebMercatorProjection();
expect(function () {
ModelImageryMapping.createProjectedPositions(
cartographicPositions,
projection,
);
}).toThrowDeveloperError();
});
it("createProjectedPositions throws with undefined projection", function () {
const cartographicPositions = [
new Cartographic(0.0, 0.0),
new Cartographic(0.1, 0.0),
new Cartographic(0.0, 0.1),
new Cartographic(0.1, 0.1),
];
const projection = undefined;
expect(function () {
ModelImageryMapping.createProjectedPositions(
cartographicPositions,
projection,
);
}).toThrowDeveloperError();
});
it("createProjectedPositions creates projected positions", function () {
const cartographicPositions = [
new Cartographic(0.0, 0.0),
new Cartographic(0.1, 0.0),
new Cartographic(0.0, 0.1),
new Cartographic(0.1, 0.1),
];
const projection = new WebMercatorProjection();
const resultIterable = ModelImageryMapping.createProjectedPositions(
cartographicPositions,
projection,
);
const actualCartesians = [
...ModelImageryMapping.map(resultIterable, (c) => Cartesian3.clone(c)),
];
const expectedCartesians = [
new Cartesian3(0.0, 0.0, 0.0),
new Cartesian3(637813.7, 0.0, 0.0),
new Cartesian3(0.0, 638879.3881344117, 0.0),
new Cartesian3(637813.7, 638879.3881344117, 0.0),
];
console.log(actualCartesians);
console.log(expectedCartesians);
for (let i = 0; i < actualCartesians.length; i++) {
expect(
Cartesian3.equalsEpsilon(
actualCartesians[i],
expectedCartesians[i],
CesiumMath.EPSILON6,
),
).toBeTrue();
}
});
it("computeTexCoords throws with undefined positions", function () {
const positions = undefined;
const boundingRectangle = new BoundingRectangle(0.0, 0.0, 2.0, 2.0);
expect(function () {
ModelImageryMapping.computeTexCoords(positions, boundingRectangle);
}).toThrowDeveloperError();
});
it("computeTexCoords throws with undefined boundingRectangle", function () {
const positions = [
new Cartesian3(0.0, 0.0, 0.0),
new Cartesian3(1.0, 0.0, 0.0),
new Cartesian3(0.0, 1.0, 0.0),
new Cartesian3(4.0, 4.0, 0.0),
];
const boundingRectangle = undefined;
expect(function () {
ModelImageryMapping.computeTexCoords(positions, boundingRectangle);
}).toThrowDeveloperError();
});
it("computeTexCoords computes the texture coordinates", function () {
const positions = [
new Cartesian3(0.0, 0.0, 0.0),
new Cartesian3(1.0, 0.0, 0.0),
new Cartesian3(0.0, 1.0, 0.0),
new Cartesian3(-4.0, 4.0, 0.0),
];
const boundingRectangle = new BoundingRectangle(0.0, 0.0, 2.0, 2.0);
const resultIterable = ModelImageryMapping.computeTexCoords(
positions,
boundingRectangle,
);
const actualCartesians = [
...ModelImageryMapping.map(resultIterable, (c) => Cartesian2.clone(c)),
];
const expectedCartesians = [
new Cartesian2(0.0, 0.0),
new Cartesian2(0.5, 0.0),
new Cartesian2(0.0, 0.5),
new Cartesian2(0.0, 1.0), // Clamped!
];
console.log(actualCartesians);
for (let i = 0; i < actualCartesians.length; i++) {
expect(
Cartesian2.equalsEpsilon(
actualCartesians[i],
expectedCartesians[i],
CesiumMath.EPSILON6,
),
).toBeTrue();
}
});
it("createTypedArrayFromCartesians2 throws with invalid numElements", function () {
const numElements = -1;
const elements = [
new Cartesian2(0.0, 0.0),
new Cartesian2(1.0, 0.0),
new Cartesian2(0.0, 1.0),
new Cartesian2(1.0, 1.0),
];
expect(function () {
ModelImageryMapping.createTypedArrayFromCartesians2(
numElements,
elements,
);
}).toThrowDeveloperError();
});
it("createTypedArrayFromCartesians2 throws with undefined elements", function () {
const numElements = 4;
const elements = undefined;
expect(function () {
ModelImageryMapping.createTypedArrayFromCartesians2(
numElements,
elements,
);
}).toThrowDeveloperError();
});
it("createTypedArrayFromCartesians2 creates a typed array", function () {
const numElements = 4;
const elements = [
new Cartesian2(0.0, 0.0),
new Cartesian2(1.0, 0.0),
new Cartesian2(0.0, 1.0),
new Cartesian2(1.0, 1.0),
];
const actualTypedArray =
ModelImageryMapping.createTypedArrayFromCartesians2(
numElements,
elements,
);
const expectedTypedArray = new Float32Array([
0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0,
]);
expect(actualTypedArray).toEqual(expectedTypedArray);
});
});

View File

@ -0,0 +1,176 @@
import {
Cartesian3,
ResourceCache,
Transforms,
ModelImagery,
ImageryLayer,
TileCoordinatesImageryProvider,
HeadingPitchRoll,
WebMercatorTilingScheme,
} from "../../../index.js";
import createScene from "../../../../../Specs/createScene.js";
import pollToPromise from "../../../../../Specs/pollToPromise.js";
import Cesium3DTilesTester from "../../../../../Specs/Cesium3DTilesTester.js";
const tileset_unitSquare_fourPrimitives_plain_url =
"./Data/Models/glTF-2.0/unitSquare/tileset_unitSquare_fourPrimitives_plain.json";
/**
* Wait until the root tile of the given tileset is loaded
*
* @param {Cesium3DTileset} tileset The tileset
* @param {Scene} scene The scene
*/
async function waitForRootLoaded(tileset, scene) {
scene.renderForSpecs();
const root = tileset.root;
await pollToPromise(() => {
scene.renderForSpecs();
return root.contentFailed || root.contentReady;
});
}
/**
* Load and return a test tileset that defines an imagery layer,
* waiting until the root of that tileset is loaded.
*
* This means that the resulting <code>tileset.root.content._model._modelImagery</code>
* (including the <code>ModelPrimitiveImagery</code> instances) will be defined and ready.
*
* @param {Scene} scene The scene
* @returns {Cesium3DTileset} The tileset
*/
async function loadTestTilesetWithImagery(scene) {
const url = tileset_unitSquare_fourPrimitives_plain_url;
const tileset = await Cesium3DTilesTester.loadTileset(scene, url);
// Create a non-trivial transform for the tileset
const transform = Transforms.eastNorthUpToFixedFrame(
Cartesian3.fromDegrees(-120.0, 40.0, 1.0),
);
tileset.modelMatrix = transform;
// Set a view that fully shows the tile content
// (a unit square at the position given above)
scene.camera.setView({
destination: new Cartesian3(
-2446354.452726738,
-4237211.248955036,
4077988.0921552004,
),
orientation: new HeadingPitchRoll(Math.PI * 2, -Math.PI / 2, 0),
});
const imageryProvider = new TileCoordinatesImageryProvider({
tilingScheme: new WebMercatorTilingScheme(),
});
const imageryLayer = new ImageryLayer(imageryProvider);
tileset.imageryLayers.add(imageryLayer);
await waitForRootLoaded(tileset, scene);
return tileset;
}
describe("Scene/Model/ModelImagery", function () {
let scene;
beforeAll(function () {
scene = createScene();
});
afterAll(function () {
scene.destroyForSpecs();
});
afterEach(function () {
scene.primitives.removeAll();
ResourceCache.clearForSpecs();
});
it("constructor throws without model", function () {
expect(function () {
// eslint-disable-next-line no-new
new ModelImagery(undefined);
}).toThrowDeveloperError();
});
it("properly reports _hasImagery", async function () {
const tileset = await loadTestTilesetWithImagery(scene);
const root = tileset.root;
const content = root.content;
const model = content._model;
const modelImagery = model._modelImagery;
// Expect imagery to be present
expect(modelImagery._hasImagery).toBeTrue();
// Clear the set of imagery layers
tileset.imageryLayers.removeAll();
// Now there is no imagery again
expect(modelImagery._hasImagery).toBeFalse();
});
it("properly reports _allImageryLayersReady", async function () {
const tileset = await loadTestTilesetWithImagery(scene);
const root = tileset.root;
const content = root.content;
const model = content._model;
const modelImagery = model._modelImagery;
const imageryLayer = tileset.imageryLayers.get(0);
// All imagery layers should be ready now (we just waited for them)
expect(modelImagery._allImageryLayersReady).toBeTrue();
// For spec: This causes the imagery layer to not count as "ready"
imageryLayer._imageryProvider = undefined;
// Now, it should report the imagery layers to not be ready
expect(modelImagery._allImageryLayersReady).toBeFalse();
});
it("properly handles modifications of the imageryConfigurations", async function () {
if (!scene.context.webgl2) {
return;
}
const tileset = await loadTestTilesetWithImagery(scene);
const root = tileset.root;
const content = root.content;
const model = content._model;
const modelImagery = model._modelImagery;
const imageryLayer = tileset.imageryLayers.get(0);
// Initially, _imageryConfigurationsModified is false (it was just updated)
expect(modelImagery._imageryConfigurationsModified()).toBeFalse();
// For spec: Modify imagery configuration
imageryLayer.alpha = 0.5;
// Now, _imageryConfigurationsModified is true
expect(modelImagery._imageryConfigurationsModified()).toBeTrue();
// Trigger an update
modelImagery._checkForModifiedImageryConfigurations();
// Now, _imageryConfigurationsModified is false again
expect(modelImagery._imageryConfigurationsModified()).toBeFalse();
});
it("creates one ModelPrimitiveImagery for each primitive", async function () {
const tileset = await loadTestTilesetWithImagery(scene);
const root = tileset.root;
const content = root.content;
const model = content._model;
const modelImagery = model._modelImagery;
// The model has four primitives
const modelPrimitiveImageries = modelImagery._modelPrimitiveImageries;
expect(modelPrimitiveImageries.length).toBe(4);
});
});

View File

@ -0,0 +1,678 @@
import {
Ellipsoid,
Matrix4,
Model,
ResourceCache,
GeographicTilingScheme,
ImageryLayer,
ImageryLayerCollection,
TileCoordinatesImageryProvider,
WebMercatorTilingScheme,
ModelPrimitiveImagery,
Cartesian3,
Transforms,
HeadingPitchRoll,
WebMercatorProjection,
} from "../../../index.js";
import createScene from "../../../../../Specs/createScene.js";
import loadAndZoomToModelAsync from "./loadAndZoomToModelAsync.js";
import pollToPromise from "../../../../../Specs/pollToPromise.js";
import Cesium3DTilesTester from "../../../../../Specs/Cesium3DTilesTester.js";
import ModelImageryMapping from "../../../Source/Scene/Model/ModelImageryMapping.js";
import Cartographic from "../../../Source/Core/Cartographic.js";
const unitSquare_fourPrimitives_plain_url =
"./Data/Models/glTF-2.0/unitSquare/unitSquare_fourPrimitives_plain.glb";
const tileset_unitSquare_fourPrimitives_plain_url =
"./Data/Models/glTF-2.0/unitSquare/tileset_unitSquare_fourPrimitives_plain.json";
/**
* Wait until the root tile of the given tileset is loaded
*
* @param {Cesium3DTileset} tileset The tileset
* @param {Scene} scene The scene
*/
async function waitForRootLoaded(tileset, scene) {
scene.renderForSpecs();
const root = tileset.root;
await pollToPromise(() => {
scene.renderForSpecs();
return root.contentFailed || root.contentReady;
});
}
/**
* Load and return a test tileset that defines an imagery layer,
* waiting until the root of that tileset is loaded.
*
* This means that the resulting <code>tileset.root.content._model._modelImagery</code>
* (including the <code>ModelPrimitiveImagery</code> instances) will be defined and ready.
*
* @param {Scene} scene The scene
* @returns {Cesium3DTileset} The tileset
*/
async function loadTestTilesetWithImagery(scene) {
const url = tileset_unitSquare_fourPrimitives_plain_url;
const tileset = await Cesium3DTilesTester.loadTileset(scene, url);
// Create a non-trivial transform for the tileset
const transform = Transforms.eastNorthUpToFixedFrame(
Cartesian3.fromDegrees(-120.0, 40.0, 1.0),
);
tileset.modelMatrix = transform;
// Set a view that fully shows the tile content
// (a unit square at the position given above)
scene.camera.setView({
destination: new Cartesian3(
-2446354.452726738,
-4237211.248955036,
4077988.0921552004,
),
orientation: new HeadingPitchRoll(Math.PI * 2, -Math.PI / 2, 0),
});
const imageryProvider = new TileCoordinatesImageryProvider({
tilingScheme: new WebMercatorTilingScheme(),
});
const imageryLayer = new ImageryLayer(imageryProvider);
tileset.imageryLayers.add(imageryLayer);
await waitForRootLoaded(tileset, scene);
return tileset;
}
describe("Scene/Model/ModelPrimitiveImagery", function () {
let scene;
beforeAll(function () {
scene = createScene();
});
afterAll(function () {
scene.destroyForSpecs();
});
afterEach(function () {
scene.primitives.removeAll();
ResourceCache.clearForSpecs();
});
it("_computeUniqueEllipsoids throws with undefined argument", function () {
expect(function () {
ModelPrimitiveImagery._computeUniqueEllipsoids(undefined);
}).toThrowDeveloperError();
});
it("_computeUniqueEllipsoids computes unique ellipsoids", function () {
const imageryLayer0 = new ImageryLayer(
new TileCoordinatesImageryProvider(
new GeographicTilingScheme({ ellipsoid: Ellipsoid.WGS84 }),
),
);
const imageryLayer1 = new ImageryLayer(
new TileCoordinatesImageryProvider(
new GeographicTilingScheme({ ellipsoid: Ellipsoid.UNIT_SPHERE }),
),
);
const imageryLayer2 = new ImageryLayer(
new TileCoordinatesImageryProvider(
new GeographicTilingScheme({ ellipsoid: Ellipsoid.UNIT_SPHERE }),
),
);
const imageryLayer3 = new ImageryLayer(
new TileCoordinatesImageryProvider(
new GeographicTilingScheme({ ellipsoid: Ellipsoid.MOON }),
),
);
const imageryLayers = new ImageryLayerCollection();
imageryLayers.add(imageryLayer0);
imageryLayers.add(imageryLayer1);
imageryLayers.add(imageryLayer2);
imageryLayers.add(imageryLayer3);
const expectedUniqueEllipsoids = [
Ellipsoid.WGS84,
Ellipsoid.UNIT_SPHERE,
Ellipsoid.MOON,
];
const actualUniqueEllipsoids =
ModelPrimitiveImagery._computeUniqueEllipsoids(imageryLayers);
expect(actualUniqueEllipsoids).toEqual(expectedUniqueEllipsoids);
});
it("_extractProjections throws with undefined argument", function () {
expect(function () {
ModelPrimitiveImagery._extractProjections(undefined);
}).toThrowDeveloperError();
});
it("_extractProjections extracts the projections", function () {
const tilingScheme0 = new GeographicTilingScheme();
const tilingScheme1 = new WebMercatorTilingScheme();
const tilingScheme2 = new GeographicTilingScheme();
const tilingScheme3 = new WebMercatorTilingScheme();
const imageryLayer0 = new ImageryLayer(
new TileCoordinatesImageryProvider({ tilingScheme: tilingScheme0 }),
);
const imageryLayer1 = new ImageryLayer(
new TileCoordinatesImageryProvider({ tilingScheme: tilingScheme1 }),
);
const imageryLayer2 = new ImageryLayer(
new TileCoordinatesImageryProvider({ tilingScheme: tilingScheme2 }),
);
const imageryLayer3 = new ImageryLayer(
new TileCoordinatesImageryProvider({ tilingScheme: tilingScheme3 }),
);
const imageryLayers = new ImageryLayerCollection();
imageryLayers.add(imageryLayer0);
imageryLayers.add(imageryLayer1);
imageryLayers.add(imageryLayer2);
imageryLayers.add(imageryLayer3);
const expectedProjections = [
tilingScheme0.projection,
tilingScheme1.projection,
tilingScheme2.projection,
tilingScheme3.projection,
];
const actualProjections =
ModelPrimitiveImagery._extractProjections(imageryLayers);
expect(actualProjections).toEqual(expectedProjections);
});
it("_computePrimitivePositionTransform throws without model", async function () {
const url = unitSquare_fourPrimitives_plain_url;
const model = await loadAndZoomToModelAsync(
{
gltf: url,
},
scene,
);
const runtimeNode = model.sceneGraph._runtimeNodes[0];
expect(function () {
ModelPrimitiveImagery._computePrimitivePositionTransform(
undefined,
runtimeNode,
);
}).toThrowDeveloperError();
});
it("_computePrimitivePositionTransform throws without runtimeNode", async function () {
const url = unitSquare_fourPrimitives_plain_url;
const model = await Model.fromGltfAsync({
url: url,
});
expect(function () {
ModelPrimitiveImagery._computePrimitivePositionTransform(
model,
undefined,
);
}).toThrowDeveloperError();
});
it("_computePrimitivePositionTransform computes the transform", async function () {
const url = unitSquare_fourPrimitives_plain_url;
const model = await loadAndZoomToModelAsync(
{
gltf: url,
},
scene,
);
const runtimeNode = model.sceneGraph._runtimeNodes[0];
// Note: This test does not make sense. The function just computes the
// product of some matrices, and the computation itself has only been
// reverse engineered from buildDrawCommands. One of the matrices is
// the model matrix that can be set by the user. The values of the
// other matrices depend on dozens of factors. One of them is the
// "axis correction matrix". The other ones are based on the glTF
// node hierarchy, and they are all the identity matrix here.
// It should not be necessary to manually compute that product to begin with.
// It should be possible to access the primitive.getTransform()
// directly, where this "getTransform" should be covered with dozens
// of unit tests for all the configurations of modelMatrix, up-axis
// conventions, and glTF node hierarchy matrices.
// What is tested here is essentially: "It does not crash", but not more.
const actualTransform =
ModelPrimitiveImagery._computePrimitivePositionTransform(
model,
runtimeNode,
);
// This is Y_UP_TO_Z_U * Z_UP_TO_X_UP, i.e. the axisCorrectionMatrix
// prettier-ignore
const expectedTransform = new Matrix4(
0, 0, 1, 0,
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 0, 1,
);
expect(actualTransform).toEqual(expectedTransform);
});
it("_obtainPrimitivePositionAttribute throws without primitive", async function () {
expect(function () {
ModelPrimitiveImagery._obtainPrimitivePositionAttribute(undefined);
}).toThrowDeveloperError();
});
it("_obtainPrimitivePositionAttribute throws for primitive without POSITION", async function () {
const url = unitSquare_fourPrimitives_plain_url;
const model = await loadAndZoomToModelAsync(
{
gltf: url,
},
scene,
);
const primitive = model.sceneGraph.components.nodes[0].primitives[0];
primitive.attributes.shift();
expect(function () {
ModelPrimitiveImagery._obtainPrimitivePositionAttribute(primitive);
}).toThrowDeveloperError();
});
it("_obtainPrimitivePositionAttribute obtains the primitive POSITION attribute", async function () {
const url = unitSquare_fourPrimitives_plain_url;
const model = await loadAndZoomToModelAsync(
{
gltf: url,
},
scene,
);
const primitive = model.sceneGraph.components.nodes[0].primitives[0];
const expectedAttribute = primitive.attributes[0];
const actualAttribute =
ModelPrimitiveImagery._obtainPrimitivePositionAttribute(primitive);
expect(actualAttribute.semantic).toEqual("POSITION");
expect(actualAttribute).toEqual(expectedAttribute);
});
it("properly reports _mappedPositionsNeedUpdate", async function () {
if (!scene.context.webgl2) {
return;
}
const tileset = await loadTestTilesetWithImagery(scene);
const root = tileset.root;
const content = root.content;
const model = content._model;
const modelImagery = model._modelImagery;
const modelPrimitiveImageries = modelImagery._modelPrimitiveImageries;
const modelPrimitiveImagery = modelPrimitiveImageries[0];
// Initially, the mapped positions don't need an update
expect(modelPrimitiveImagery._mappedPositionsNeedUpdate).toBeFalse();
// For spec: Brutally set the model matrix to a new value
model.modelMatrix = Matrix4.clone(Matrix4.IDENTITY);
// Now, the mapped positions need an update
expect(modelPrimitiveImagery._mappedPositionsNeedUpdate).toBeTrue();
});
it("_computeMappedPositionsPerEllipsoid computes the mapped positions", async function () {
if (!scene.context.webgl2) {
return;
}
const tileset = await loadTestTilesetWithImagery(scene);
const root = tileset.root;
const content = root.content;
const model = content._model;
const modelImagery = model._modelImagery;
const modelPrimitiveImageries = modelImagery._modelPrimitiveImageries;
const modelPrimitiveImagery = modelPrimitiveImageries[0];
const actualMappedPositions =
modelPrimitiveImagery._computeMappedPositionsPerEllipsoid();
// Not checking the exact values here. The correctness
// of these should be covered with other tests. This test
// only checks that the MappedPositions are created.
// One ellipsoid, therefore, one MappedPositions object
expect(actualMappedPositions.length).toBe(1);
// The primitives consist of 3x3 vertices
expect(actualMappedPositions[0].numPositions).toBe(9);
});
it("_computeImageryTexCoordsAttributesPerProjection computes the attributes", async function () {
if (!scene.context.webgl2) {
return;
}
const tileset = await loadTestTilesetWithImagery(scene);
const root = tileset.root;
const content = root.content;
const model = content._model;
const modelImagery = model._modelImagery;
const modelPrimitiveImageries = modelImagery._modelPrimitiveImageries;
const modelPrimitiveImagery = modelPrimitiveImageries[0];
const actualTexCoordAttributes =
modelPrimitiveImagery._computeImageryTexCoordsAttributesPerProjection();
// Not checking the exact values here. The correctness
// of these should be covered with other tests. This test
// only checks that the Attributes are created.
// One projection, therefore, one attribute
expect(actualTexCoordAttributes.length).toBe(1);
// The primitives consist of 3x3 vertices
expect(actualTexCoordAttributes[0].count).toBe(9);
});
it("_createImageryTexCoordAttributes computes the attributes", async function () {
if (!scene.context.webgl2) {
return;
}
const tileset = await loadTestTilesetWithImagery(scene);
const root = tileset.root;
const content = root.content;
const model = content._model;
const modelImagery = model._modelImagery;
const modelPrimitiveImageries = modelImagery._modelPrimitiveImageries;
const modelPrimitiveImagery = modelPrimitiveImageries[0];
const uniqueProjections = [new WebMercatorProjection()];
const actualTexCoordAttributes =
modelPrimitiveImagery._createImageryTexCoordAttributes(uniqueProjections);
// Not checking the exact values here. The correctness
// of these should be covered with other tests. This test
// only checks that the Attributes are created.
// One projection, therefore, one attribute
expect(actualTexCoordAttributes.length).toBe(1);
// The primitives consist of 3x3 vertices
expect(actualTexCoordAttributes[0].count).toBe(9);
});
it("coveragesForImageryLayer throws for unknown imagery layer", async function () {
if (!scene.context.webgl2) {
return;
}
const tileset = await loadTestTilesetWithImagery(scene);
const root = tileset.root;
const content = root.content;
const model = content._model;
const modelImagery = model._modelImagery;
const modelPrimitiveImageries = modelImagery._modelPrimitiveImageries;
const modelPrimitiveImagery = modelPrimitiveImageries[0];
// Create a new imageryLayer that does not appear in the tileset
const imageryProvider = new TileCoordinatesImageryProvider({
tilingScheme: new WebMercatorTilingScheme(),
});
const imageryLayer = new ImageryLayer(imageryProvider);
expect(function () {
modelPrimitiveImagery.coveragesForImageryLayer(imageryLayer);
}).toThrowDeveloperError();
});
it("coveragesForImageryLayer provides the proper coverages", async function () {
if (!scene.context.webgl2) {
return;
}
const tileset = await loadTestTilesetWithImagery(scene);
const root = tileset.root;
const content = root.content;
const model = content._model;
const modelImagery = model._modelImagery;
const modelPrimitiveImageries = modelImagery._modelPrimitiveImageries;
const modelPrimitiveImagery = modelPrimitiveImageries[0];
const imageryLayer = tileset.imageryLayers.get(0);
const actualImageryCoverages =
modelPrimitiveImagery.coveragesForImageryLayer(imageryLayer);
// Note: The "correctness" has been verified visually for this
// configuration, and the proper numbers have been extracted
// from a debugger run. This may be overly specific, and may
// have to be adjusted in the future. Right now, it may only
// prevent certain regressions.
expect(actualImageryCoverages.length).toBe(1);
const actualImageryCoverage = actualImageryCoverages[0];
expect(actualImageryCoverage.x).toBe(5592405);
expect(actualImageryCoverage.y).toBe(12703008);
expect(actualImageryCoverage.level).toBe(25);
});
it("reference counting for imagery works", async function () {
if (!scene.context.webgl2) {
return;
}
const tileset = await loadTestTilesetWithImagery(scene);
const root = tileset.root;
const content = root.content;
const model = content._model;
const imageryLayer = tileset.imageryLayers.get(0);
// Obtain the imageries that should be covered.
// Note that this will increase their reference
// count by default...
const imagery0 = imageryLayer.getImageryFromCache(5592405, 12703007, 25);
const imagery1 = imageryLayer.getImageryFromCache(5592405, 12703008, 25);
const imagery2 = imageryLayer.getImageryFromCache(5592406, 12703007, 25);
const imagery3 = imageryLayer.getImageryFromCache(5592406, 12703008, 25);
// ... so decrease it here immediately
imagery0.releaseReference();
imagery1.releaseReference();
imagery2.releaseReference();
imagery3.releaseReference();
// Check the reference counts that have been found via reverse engineering
expect(imagery0.referenceCount).toBe(2);
expect(imagery1.referenceCount).toBe(4);
expect(imagery2.referenceCount).toBe(1);
expect(imagery3.referenceCount).toBe(2);
// Set a model matrix that causes the previous imageries to no longer
// be covered, and trigger an update
model.modelMatrix = Matrix4.clone(Matrix4.IDENTITY);
model.update(scene.frameState);
// The new reference counters should be all 0 now
expect(imagery0.referenceCount).toBe(0);
expect(imagery1.referenceCount).toBe(0);
expect(imagery2.referenceCount).toBe(0);
expect(imagery3.referenceCount).toBe(0);
});
it("_uploadImageryTexCoordAttributes throws without context", async function () {
if (!scene.context.webgl2) {
return;
}
const tileset = await loadTestTilesetWithImagery(scene);
const root = tileset.root;
const content = root.content;
const model = content._model;
const modelImagery = model._modelImagery;
const modelPrimitiveImageries = modelImagery._modelPrimitiveImageries;
const modelPrimitiveImagery = modelPrimitiveImageries[0];
expect(function () {
modelPrimitiveImagery._uploadImageryTexCoordAttributes(undefined);
}).toThrowDeveloperError();
});
it("_uploadImageryTexCoordAttributes uploads attribute data into buffers", async function () {
if (!scene.context.webgl2) {
return;
}
const tileset = await loadTestTilesetWithImagery(scene);
const root = tileset.root;
const content = root.content;
const model = content._model;
const modelImagery = model._modelImagery;
const modelPrimitiveImageries = modelImagery._modelPrimitiveImageries;
const modelPrimitiveImagery = modelPrimitiveImageries[0];
// For specs: Delete the buffers that already exist
const attributes =
modelPrimitiveImagery._imageryTexCoordAttributesPerProjection;
for (const attribute of attributes) {
delete attribute.buffer;
}
modelPrimitiveImagery._uploadImageryTexCoordAttributes(scene.context);
// Expect the new buffers to be present now
for (const attribute of attributes) {
expect(attribute.buffer).toBeDefined();
}
});
it("_destroyImageryTexCoordAttributes destroys the attributes and their buffers", async function () {
if (!scene.context.webgl2) {
return;
}
const tileset = await loadTestTilesetWithImagery(scene);
const root = tileset.root;
const content = root.content;
const model = content._model;
const modelImagery = model._modelImagery;
const modelPrimitiveImageries = modelImagery._modelPrimitiveImageries;
const modelPrimitiveImagery = modelPrimitiveImageries[0];
// Prepare the "buffer.destroy" call expectations for all attributes
const attributes =
modelPrimitiveImagery._imageryTexCoordAttributesPerProjection;
const bufferDestroyCalls = [];
for (const attribute of attributes) {
const buffer = attribute.buffer;
const bufferDestroyCall = spyOn(buffer, "destroy").and.callThrough();
bufferDestroyCalls.push(bufferDestroyCall);
}
modelPrimitiveImagery._destroyImageryTexCoordAttributes();
// Expect the destroy function of all buffers to have been called
for (const bufferDestroyCall of bufferDestroyCalls) {
expect(bufferDestroyCall).toHaveBeenCalled();
}
expect(
modelPrimitiveImagery._imageryTexCoordAttributesPerProjection,
).toBeUndefined();
});
// Note: The following tests would rather belong into ModelImageryMappingSpec,
// but require primitive attributes that are only available after loading the
// tileset in a scene, so they are added here
it("ModelImageryMapping createCartographicPositions throws without primitivePositionAttribute", async function () {
if (!scene.context.webgl2) {
return;
}
const primitivePositionAttribute = undefined;
const primitivePositionTransform = Matrix4.IDENTITY;
const ellipsoid = Ellipsoid.WGS84;
expect(function () {
ModelImageryMapping.createCartographicPositions(
primitivePositionAttribute,
primitivePositionTransform,
ellipsoid,
);
}).toThrowDeveloperError();
});
it("ModelImageryMapping createCartographicPositions throws without primitivePositionTransform", async function () {
if (!scene.context.webgl2) {
return;
}
const tileset = await loadTestTilesetWithImagery(scene);
const root = tileset.root;
const content = root.content;
const model = content._model;
const primitivePositionAttribute =
model.sceneGraph.components.nodes[0].primitives[0].attributes[0];
const primitivePositionTransform = undefined;
const ellipsoid = Ellipsoid.WGS84;
expect(function () {
ModelImageryMapping.createCartographicPositions(
primitivePositionAttribute,
primitivePositionTransform,
ellipsoid,
);
}).toThrowDeveloperError();
});
it("ModelImageryMapping createCartographicPositions throws without ellipsoid", async function () {
if (!scene.context.webgl2) {
return;
}
const tileset = await loadTestTilesetWithImagery(scene);
const root = tileset.root;
const content = root.content;
const model = content._model;
const primitivePositionAttribute =
model.sceneGraph.components.nodes[0].primitives[0].attributes[0];
const primitivePositionTransform = Matrix4.IDENTITY;
const ellipsoid = undefined;
expect(function () {
ModelImageryMapping.createCartographicPositions(
primitivePositionAttribute,
primitivePositionTransform,
ellipsoid,
);
}).toThrowDeveloperError();
});
it("ModelImageryMapping createCartographicPositions creates cartographic positions", async function () {
if (!scene.context.webgl2) {
return;
}
const tileset = await loadTestTilesetWithImagery(scene);
const root = tileset.root;
const content = root.content;
const model = content._model;
const primitivePositionAttribute =
model.sceneGraph.components.nodes[0].primitives[0].attributes[0];
const primitivePositionTransform = Matrix4.IDENTITY;
const ellipsoid = Ellipsoid.WGS84;
const cartographicPositions =
ModelImageryMapping.createCartographicPositions(
primitivePositionAttribute,
primitivePositionTransform,
ellipsoid,
);
const actualCartographicPositions = [
...ModelImageryMapping.map(cartographicPositions, (c) =>
Cartographic.clone(c),
),
];
expect(actualCartographicPositions.length).toBe(9);
});
});

View File

@ -0,0 +1,442 @@
import {
Model,
ModelUtility,
ResourceCache,
Math as CesiumMath,
Cartesian2,
Cartesian3,
Matrix4,
ModelReader,
TranslationRotationScale,
} from "../../../index.js";
import createScene from "../../../../../Specs/createScene.js";
import pollToPromise from "../../../../../Specs/pollToPromise.js";
const baseUrl = "./Data/Models/glTF-2.0/unitSquare/";
/**
* Create a model from the given glTF, add it as a primitive
* to the given scene, and wait until it is fully loaded.
*
* @param {Scene} scene The scene
* @param {object} gltf The gltf
* @returns {Model} The model
*/
async function loadAsModel(scene, gltf) {
const basePath = "SPEC_BASE_PATH";
const model = await Model.fromGltfAsync({
gltf: gltf,
basePath: basePath,
incrementallyLoadTextures: false,
});
scene.primitives.add(model);
await pollToPromise(
function () {
scene.renderForSpecs();
return model.ready;
},
{ timeout: 10000 },
);
return model;
}
// A simple representation of a 'Vertex' in an indexed triangle
// set, only consisting of a Cartesian3 position and a
// Cartesian2 texture coordinate
class SpecVertex {
constructor(p, t) {
this.p = p;
this.t = t;
}
equalsEpsilon(other, epsilon) {
if (!this.p.equalsEpsilon(other.p, epsilon)) {
return false;
}
if (!this.t.equalsEpsilon(other.t, epsilon)) {
return false;
}
return true;
}
}
// A simple representation of a 'Triangle' in an indexed triangle
// set, simply storing 3 vertices
class SpecTriangle {
constructor(v0, v1, v2) {
this.v0 = v0;
this.v1 = v1;
this.v2 = v2;
}
equalsEpsilon(other, epsilon) {
if (
this.v0.equalsEpsilon(other.v0, epsilon) &&
this.v1.equalsEpsilon(other.v1, epsilon) &&
this.v2.equalsEpsilon(other.v2, epsilon)
) {
return true;
}
if (
this.v0.equalsEpsilon(other.v1, epsilon) &&
this.v1.equalsEpsilon(other.v2, epsilon) &&
this.v2.equalsEpsilon(other.v0, epsilon)
) {
return true;
}
if (
this.v0.equalsEpsilon(other.v2, epsilon) &&
this.v1.equalsEpsilon(other.v0, epsilon) &&
this.v2.equalsEpsilon(other.v1, epsilon)
) {
return true;
}
return false;
}
}
// A simple representation of an indexed triangle set,
// consisting of SpecTriangle and SpecVertex instances,
// created from flat arrays of triangle indices,
// positions, and texture coordinates
class SpecIndexedTriangleSet {
constructor(indices, positions, texCoords) {
const specVertices = [];
const numVertices = positions.length / 3;
for (let i = 0; i < numVertices; i++) {
const px = positions[i * 3 + 0];
const py = positions[i * 3 + 1];
const pz = positions[i * 3 + 2];
const tx = texCoords[i * 2 + 0];
const ty = texCoords[i * 2 + 1];
const p = new Cartesian3(px, py, pz);
const t = new Cartesian2(tx, ty);
const v = new SpecVertex(p, t);
specVertices.push(v);
}
const specTriangles = [];
const numTriangles = indices.length / 3;
for (let i = 0; i < numTriangles; i++) {
const i0 = indices[i * 3 + 0];
const i1 = indices[i * 3 + 1];
const i2 = indices[i * 3 + 2];
const v0 = specVertices[i0];
const v1 = specVertices[i1];
const v2 = specVertices[i2];
const t = new SpecTriangle(v0, v1, v2);
specTriangles.push(t);
}
this.specTriangles = specTriangles;
}
containsEpsilon(specTriangle, epsilon) {
const n = this.specTriangles.length;
for (let i = 0; i < n; i++) {
const t = this.specTriangles[i];
if (t.equalsEpsilon(specTriangle, epsilon)) {
return true;
}
}
return false;
}
equalsEpsilon(other, epsilon) {
if (this.specTriangles.length !== other.specTriangles.length) {
return false;
}
const n = this.specTriangles.length;
for (let i = 0; i < n; i++) {
const specTriangle = this.specTriangles[i];
if (!other.containsEpsilon(specTriangle, epsilon)) {
return false;
}
}
return true;
}
}
// Returns a Matrix4 that describes the transform of the given
// glTF node, either obtained from the node 'matrix' or from
// the node 'translation', 'rotation', 'scale', defaulting
// to the identity matrix if no information was given.
function getNodeMatrix(node) {
if (node.matrix) {
return Matrix4.fromArray(node.matrix, 0, new Matrix4());
}
const trs = new TranslationRotationScale(
node.translation,
node.rotation,
node.scale,
);
return Matrix4.fromTranslationRotationScale(trs, new Matrix4());
}
// Loads the glTF from the given URL as a 'Model' and adds it to
// the given scene, then obtains the indices, positions, and
// texture coordinates from this model using the 'ModelReader',
// and creates a SpecIndexedTriangleSet from the result.
async function loadPrimitiveAsIndexedTriangleSet(scene, url) {
const model = await loadAsModel(scene, url);
const node = model.sceneGraph.components.nodes[0];
const matrix = getNodeMatrix(node);
const primitive = node.primitives[0];
const indices = ModelReader.readIndicesAsTriangleIndicesTypedArray(
primitive.indices,
primitive.primitiveType,
);
const positionAttribute = ModelUtility.getAttributeBySemantic(
primitive,
"POSITION",
);
const texCoordAttribute = ModelUtility.getAttributeBySemantic(
primitive,
"TEXCOORD",
0,
);
const rawPositions = ModelReader.readAttributeAsTypedArray(positionAttribute);
const positions = ModelReader.transform3D(rawPositions, matrix, undefined);
const texCoords = ModelReader.readAttributeAsTypedArray(texCoordAttribute);
const its = new SpecIndexedTriangleSet(indices, positions, texCoords);
return its;
}
// A spec for the 'ModelReader' class. It reads the same geometry from
// different flavors of glTF assets (e.g. interleaved or compressed),
// and checks whether the resulting geometry is epsilon-equal to the
// geometry that was read from the "plain" glTF asset
describe(
"Scene/Model/ModelReader",
function () {
let scene;
beforeAll(function () {
scene = createScene();
});
afterAll(function () {
scene.destroyForSpecs();
});
afterEach(function () {
scene.primitives.removeAll();
ResourceCache.clearForSpecs();
});
it("reads interleaved data", async function () {
if (!scene.context.webgl2) {
return;
}
const expectedName = "unitSquare11x11_plain.glb";
const actualName = "unitSquare11x11_plain_interleaved.glb";
const expectedUrl = `${baseUrl}${expectedName}`;
const actualUrl = `${baseUrl}${actualName}`;
const expectedIts = await loadPrimitiveAsIndexedTriangleSet(
scene,
expectedUrl,
);
const actualIts = await loadPrimitiveAsIndexedTriangleSet(
scene,
actualUrl,
);
const equal = actualIts.equalsEpsilon(expectedIts, CesiumMath.EPSILON3);
expect(equal)
.withContext(
`Expected ${actualName} to contain the same geometry as ${expectedName}`,
)
.toBeTrue();
});
it("reads draco data", async function () {
if (!scene.context.webgl2) {
return;
}
const expectedName = "unitSquare11x11_plain.glb";
const actualName = "unitSquare11x11_draco.glb";
const expectedUrl = `${baseUrl}${expectedName}`;
const actualUrl = `${baseUrl}${actualName}`;
const expectedIts = await loadPrimitiveAsIndexedTriangleSet(
scene,
expectedUrl,
);
const actualIts = await loadPrimitiveAsIndexedTriangleSet(
scene,
actualUrl,
);
const equal = actualIts.equalsEpsilon(expectedIts, CesiumMath.EPSILON3);
expect(equal)
.withContext(
`Expected ${actualName} to contain the same geometry as ${expectedName}`,
)
.toBeTrue();
});
it("reads meshopt data", async function () {
if (!scene.context.webgl2) {
return;
}
const expectedName = "unitSquare11x11_plain.glb";
const actualName = "unitSquare11x11_meshopt.glb";
const expectedUrl = `${baseUrl}${expectedName}`;
const actualUrl = `${baseUrl}${actualName}`;
const expectedIts = await loadPrimitiveAsIndexedTriangleSet(
scene,
expectedUrl,
);
const actualIts = await loadPrimitiveAsIndexedTriangleSet(
scene,
actualUrl,
);
const equal = actualIts.equalsEpsilon(expectedIts, CesiumMath.EPSILON3);
expect(equal)
.withContext(
`Expected ${actualName} to contain the same geometry as ${expectedName}`,
)
.toBeTrue();
});
it("reads quantized_interleaved data", async function () {
if (!scene.context.webgl2) {
return;
}
const expectedName = "unitSquare11x11_plain.glb";
const actualName = "unitSquare11x11_quantized_interleaved.glb";
const expectedUrl = `${baseUrl}${expectedName}`;
const actualUrl = `${baseUrl}${actualName}`;
const expectedIts = await loadPrimitiveAsIndexedTriangleSet(
scene,
expectedUrl,
);
const actualIts = await loadPrimitiveAsIndexedTriangleSet(
scene,
actualUrl,
);
const equal = actualIts.equalsEpsilon(expectedIts, CesiumMath.EPSILON3);
expect(equal)
.withContext(
`Expected ${actualName} to contain the same geometry as ${expectedName}`,
)
.toBeTrue();
});
it("reads unsignedShortTexCoords data", async function () {
if (!scene.context.webgl2) {
return;
}
const expectedName = "unitSquare11x11_plain.glb";
const actualName = "unitSquare11x11_unsignedShortTexCoords.glb";
const expectedUrl = `${baseUrl}${expectedName}`;
const actualUrl = `${baseUrl}${actualName}`;
const expectedIts = await loadPrimitiveAsIndexedTriangleSet(
scene,
expectedUrl,
);
const actualIts = await loadPrimitiveAsIndexedTriangleSet(
scene,
actualUrl,
);
const equal = actualIts.equalsEpsilon(expectedIts, CesiumMath.EPSILON3);
expect(equal)
.withContext(
`Expected ${actualName} to contain the same geometry as ${expectedName}`,
)
.toBeTrue();
});
it("reads unsignedShortTexCoords_interleaved data", async function () {
if (!scene.context.webgl2) {
return;
}
const expectedName = "unitSquare11x11_plain.glb";
const actualName =
"unitSquare11x11_unsignedShortTexCoords_interleaved.glb";
const expectedUrl = `${baseUrl}${expectedName}`;
const actualUrl = `${baseUrl}${actualName}`;
const expectedIts = await loadPrimitiveAsIndexedTriangleSet(
scene,
expectedUrl,
);
const actualIts = await loadPrimitiveAsIndexedTriangleSet(
scene,
actualUrl,
);
const equal = actualIts.equalsEpsilon(expectedIts, CesiumMath.EPSILON3);
expect(equal)
.withContext(
`Expected ${actualName} to contain the same geometry as ${expectedName}`,
)
.toBeTrue();
});
it("reads unsignedShortTexCoords_quantized data", async function () {
if (!scene.context.webgl2) {
return;
}
const expectedName = "unitSquare11x11_plain.glb";
const actualName = "unitSquare11x11_unsignedShortTexCoords_quantized.glb";
const expectedUrl = `${baseUrl}${expectedName}`;
const actualUrl = `${baseUrl}${actualName}`;
const expectedIts = await loadPrimitiveAsIndexedTriangleSet(
scene,
expectedUrl,
);
const actualIts = await loadPrimitiveAsIndexedTriangleSet(
scene,
actualUrl,
);
const equal = actualIts.equalsEpsilon(expectedIts, CesiumMath.EPSILON3);
expect(equal)
.withContext(
`Expected ${actualName} to contain the same geometry as ${expectedName}`,
)
.toBeTrue();
});
it("reads unsignedShortTexCoords_quantized_interleaved data", async function () {
if (!scene.context.webgl2) {
return;
}
const expectedName = "unitSquare11x11_plain.glb";
const actualName =
"unitSquare11x11_unsignedShortTexCoords_quantized_interleaved.glb";
const expectedUrl = `${baseUrl}${expectedName}`;
const actualUrl = `${baseUrl}${actualName}`;
const expectedIts = await loadPrimitiveAsIndexedTriangleSet(
scene,
expectedUrl,
);
const actualIts = await loadPrimitiveAsIndexedTriangleSet(
scene,
actualUrl,
);
const equal = actualIts.equalsEpsilon(expectedIts, CesiumMath.EPSILON3);
expect(equal)
.withContext(
`Expected ${actualName} to contain the same geometry as ${expectedName}`,
)
.toBeTrue();
});
},
"WebGL",
);

View File

@ -49,17 +49,17 @@ describe("Scene/processVoxelProperties", function () {
propertyStatisticsFields,
);
// Check for Statistics struct
const statisticsFields = [" PropertyStatistics_a a;"];
// Check for MetadataStatistics struct
const metadataStatisticsFields = [" PropertyStatistics_a a;"];
ShaderBuilderTester.expectHasFragmentStruct(
shaderBuilder,
"Statistics",
"Statistics",
statisticsFields,
"MetadataStatistics",
"MetadataStatistics",
metadataStatisticsFields,
);
// Check for Metadata struct
const metadataFields = [" Statistics statistics;", " float a;"];
const metadataFields = [" float a;"];
ShaderBuilderTester.expectHasFragmentStruct(
shaderBuilder,
"Metadata",
@ -67,34 +67,23 @@ describe("Scene/processVoxelProperties", function () {
metadataFields,
);
// Check for VoxelProperty structs
const voxelPropertyFields = [
" vec3 partialDerivativeLocal;",
" vec3 partialDerivativeWorld;",
" vec3 partialDerivativeView;",
" vec3 partialDerivativeValid;",
];
// Check for Attributes struct
const attributesFields = [" vec3 positionEC;", " vec3 normalEC;"];
ShaderBuilderTester.expectHasFragmentStruct(
shaderBuilder,
"VoxelProperty_a",
"VoxelProperty_a",
voxelPropertyFields,
"Attributes",
"Attributes",
attributesFields,
);
// Check for Voxel struct
const voxelFields = [
" VoxelProperty_a a;",
" vec3 positionEC;",
" vec3 positionUv;",
" vec3 positionShapeUv;",
" vec3 positionUvLocal;",
" vec3 surfaceNormal;",
" vec3 viewDirUv;",
" vec3 viewDirWorld;",
" float travelDistance;",
" int stepCount;",
" int sampleIndex;",
" int tileIndex;",
" float distanceToDepthBuffer;",
];
ShaderBuilderTester.expectHasFragmentStruct(
shaderBuilder,
@ -104,7 +93,12 @@ describe("Scene/processVoxelProperties", function () {
);
// Check for FragmentInput struct
const fragmentInputFields = [" Metadata metadata;", " Voxel voxel;"];
const fragmentInputFields = [
" MetadataStatistics metadataStatistics;",
" Metadata metadata;",
" Attributes attributes;",
" Voxel voxel;",
];
ShaderBuilderTester.expectHasFragmentStruct(
shaderBuilder,
"FragmentInput",
@ -122,11 +116,82 @@ describe("Scene/processVoxelProperties", function () {
);
// Check clearProperties function
ShaderBuilderTester.expectHasFragmentFunctionUnordered(
shaderBuilder,
"clearProperties",
"Properties clearProperties()",
[
" Properties properties;",
" properties.a = float(0.0);",
" return properties;",
],
);
// Check sumProperties function
ShaderBuilderTester.expectHasFragmentFunctionUnordered(
shaderBuilder,
"sumProperties",
"Properties sumProperties(Properties propertiesA, Properties propertiesB)",
[
" Properties properties;",
" properties.a = propertiesA.a + propertiesB.a;",
" return properties;",
],
);
// Check scaleProperties function
ShaderBuilderTester.expectHasFragmentFunctionUnordered(
shaderBuilder,
"scaleProperties",
"Properties scaleProperties(Properties properties, float scale)",
[
" Properties scaledProperties = properties;",
" scaledProperties.a *= scale;",
" return scaledProperties;",
],
);
// Check mixProperties function
ShaderBuilderTester.expectHasFragmentFunctionUnordered(
shaderBuilder,
"mixProperties",
"Properties mixProperties(Properties propertiesA, Properties propertiesB, float mixFactor)",
[
" Properties properties;",
" properties.a = mix(propertiesA.a, propertiesB.a, mixFactor);",
" return properties;",
],
);
// Check copyPropertiesToMetadata function
ShaderBuilderTester.expectHasFragmentFunctionUnordered(
shaderBuilder,
"copyPropertiesToMetadata",
"void copyPropertiesToMetadata(in Properties properties, inout Metadata metadata)",
[" metadata.a = properties.a;"],
);
// Check setStatistics function
ShaderBuilderTester.expectHasFragmentFunctionUnordered(
shaderBuilder,
"setStatistics",
"void setStatistics(inout MetadataStatistics metadataStatistics)",
[
" metadataStatistics.a.min = 0.0;",
" metadataStatistics.a.max = 1.0;",
],
);
// Check getPropertiesFromMegatextureAtUv function
ShaderBuilderTester.expectHasFragmentFunctionUnordered(
shaderBuilder,
"getPropertiesFromMegatextureAtUv",
"Properties getPropertiesFromMegatextureAtUv(vec2 texcoord)",
[
" Properties properties;",
" properties.a = texture(u_megatextureTextures[0], texcoord).r;",
" return properties;",
],
);
});
});

View File

@ -1,6 +1,6 @@
{
"name": "@cesium/engine",
"version": "17.0.0",
"version": "18.0.0",
"description": "CesiumJS is a JavaScript library for creating 3D globes and 2D maps in a web browser without a plugin.",
"keywords": [
"3D",
@ -23,7 +23,7 @@
"LICENSE.md"
],
"engines": {
"node": ">=14.0.0"
"node": ">=20.19.0"
},
"sideEffects": [
"./Source/ThirdParty/**/*",
@ -48,7 +48,7 @@
"meshoptimizer": "^0.23.0",
"pako": "^2.0.4",
"protobufjs": "^7.1.0",
"rbush": "3.0.1",
"rbush": "^4.0.1",
"topojson-client": "^3.1.0",
"urijs": "^1.19.7"
},

View File

@ -1,6 +1,6 @@
{
"name": "@cesium/widgets",
"version": "12.0.0",
"version": "12.1.0",
"description": "A widgets library for use with CesiumJS. CesiumJS is a JavaScript library for creating 3D globes and 2D maps in a web browser without a plugin.",
"keywords": [
"3D",
@ -25,10 +25,10 @@
"./Specs/**/*"
],
"engines": {
"node": ">=14.0.0"
"node": ">=20.19.0"
},
"dependencies": {
"@cesium/engine": "^17.0.0",
"@cesium/engine": "^18.0.0",
"nosleep.js": "^0.12.0"
},
"type": "module",