1 /*
2  * vp_rle.c
3  *
4  * Routines for run-length encoding classified volume data.
5  *
6  * Copyright (c) 1994 The Board of Trustees of The Leland Stanford
7  * Junior University.  All rights reserved.
8  *
9  * Permission to use, copy, modify and distribute this software and its
10  * documentation for any purpose is hereby granted without fee, provided
11  * that the above copyright notice and this permission notice appear in
12  * all copies of this software and that you do not sell the software.
13  * Commercial licensing is available by contacting the author.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS" AND WITHOUT WARRANTY OF ANY KIND,
16  * EXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY
17  * WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE.
18  *
19  * Author:
20  *    Phil Lacroute
21  *    Computer Systems Laboratory
22  *    Electrical Engineering Dept.
23  *    Stanford University
24  */
25 
26 /*
27  * $Date: 1994/12/30 23:52:38 $
28  * $Revision: 1.28 $
29  */
30 
31 #include "vp_global.h"
32 
33 static void EncodeScanline ANSI_ARGS((vpContext *vpc, void *voxels,
34     MinMaxOctree *octree, MMOctreeLevel level_stack[VP_MAX_OCTREE_LEVELS]));
35 static void InitRLE ANSI_ARGS((vpContext *vpc));
36 static void CacheVoxel ANSI_ARGS((vpContext *vpc, double opacity,
37     void *rawvoxel));
38 static void CacheLength ANSI_ARGS((vpContext *vpc, int length));
39 static void CountNonZeroVoxel ANSI_ARGS((RunData *rundata, int index,
40     int end_of_scan, vpContext *vpc));
41 static void RepackClassifiedVolume ANSI_ARGS((vpContext *vpc));
42 static RLEVoxels *CreateEmptyRLEVoxels ANSI_ARGS((vpContext *vpc,
43     int ilen, int jlen, int klen));
44 static RLEVoxels *CreateRLEVoxelsFromRunData ANSI_ARGS((vpContext *vpc,
45     int ilen, int jlen, int klen, int non_zero_count, RunData *run_data,
46     int rle_bytes_per_voxel));
47 static void StoreNonZeroVoxel ANSI_ARGS((void *voxel, int rle_bytes_per_voxel,
48     void *data, unsigned char *lengths, RunData *rundata, int index));
49 static void PadScanlines ANSI_ARGS((int ilen, int jlen, int klen,
50     RunData *run_data, unsigned char *lengths));
51 static ConstructionBuffer *CreateConstructionBuffer ANSI_ARGS((
52     vpContext *vpc));
53 static void DestroyConstructionBuffer ANSI_ARGS((vpContext *vpc,
54     ConstructionBuffer *cbuf));
55 static GBuffer *CreateGBuffer ANSI_ARGS((vpContext *vpc));
56 static void DestroyGBuffer ANSI_ARGS((vpContext *vpc, GBuffer *gbuf));
57 #ifdef INDEX_VOLUME
58 static vpResult ComputeIndex ANSI_ARGS((vpContext *vpc,
59     RLEVoxels *rle_voxels));
60 #endif
61 static vpResult ComputeScanOffsets ANSI_ARGS((vpContext *vpc,
62     RLEVoxels *rle_voxels));
63 #ifdef DEBUG
64 static void ValidateRLEVoxels ANSI_ARGS((vpContext *vpc, RLEVoxels *rle,
65     int istride, int jstride, int kstride, int axis));
66 #endif
67 
68 /*
69  * vpClassifyScalars
70  *
71  * Classify an array of scalars and store the result in the classified volume.
72  */
73 
74 vpResult
vpClassifyScalars(vpc,scalar_data,length,scalar_field,grad_field,norm_field)75 vpClassifyScalars(vpc, scalar_data, length, scalar_field, grad_field,
76 		  norm_field)
77 vpContext *vpc;		/* context */
78 unsigned char *scalar_data; /* 3D array of scalar data */
79 int length;		/* number of scalars in scalar_data */
80 int scalar_field;	/* voxel field for scalar, or VP_SKIP_FIELD */
81 int grad_field;		/* voxel field for gradient, or VP_SKIP_FIELD */
82 int norm_field;		/* voxel field for normal, or VP_SKIP_FIELD */
83 {
84     int xlen, ylen, zlen;	/* volume dimensions */
85     int y, z;			/* loop indices */
86     unsigned char *scalar;	/* pointer to current scalar */
87     int scalar_ystride;		/* stride to next scalar scanline */
88     int scalar_zstride;		/* stride to next scalar slice */
89     char *voxel;		/* pointer to current voxel */
90     unsigned char *s_py, *s_my, *s_pz, *s_mz; /* ptrs to adjacent scans */
91     int retcode;		/* return code from vpScanlineNormals */
92     void *voxel_scan;		/* buffer for storing one scan of raw voxels */
93 
94     /* check for errors */
95     xlen = vpc->xlen;
96     ylen = vpc->ylen;
97     zlen = vpc->zlen;
98     if (xlen == 0 || ylen == 0 || zlen == 0 || vpc->raw_bytes_per_voxel == 0)
99 	return(VPSetError(vpc, VPERROR_BAD_VOLUME));
100     if (xlen * ylen * zlen != length)
101 	return(VPSetError(vpc, VPERROR_BAD_SIZE));
102 
103     /* initialize */
104     scalar = scalar_data;
105     scalar_ystride = xlen;
106     scalar_zstride = xlen*ylen;
107     Alloc(vpc, voxel_scan, void *, xlen*vpc->raw_bytes_per_voxel,"voxel_scan");
108 
109     /* compute volume data */
110     for (z = 0; z < zlen; z++) {
111 	ReportStatus(vpc, (double)z / (double)zlen);
112 	for (y = 0; y < ylen; y++) {
113 	    s_my = (y == 0)      ? NULL : scalar - scalar_ystride;
114 	    s_py = (y == ylen-1) ? NULL : scalar + scalar_ystride;
115 	    s_mz = (z == 0)      ? NULL : scalar - scalar_zstride;
116 	    s_pz = (z == zlen-1) ? NULL : scalar + scalar_zstride;
117 	    voxel = voxel_scan;
118 	    retcode = vpScanlineNormals(vpc, xlen, scalar, s_my, s_py,
119 					s_mz, s_pz, voxel, scalar_field,
120 					grad_field, norm_field);
121 	    if (retcode != VP_OK) {
122 		Dealloc(vpc, voxel_scan);
123 		return(retcode);
124 	    }
125 	    retcode = vpClassifyScanline(vpc, voxel_scan);
126 	    if (retcode != VP_OK) {
127 		Dealloc(vpc, voxel_scan);
128 		return(retcode);
129 	    }
130 	    scalar += scalar_ystride;
131 	}
132 	scalar += scalar_zstride - ylen*scalar_ystride;
133     }
134     ReportStatus(vpc, 1.0);
135     Dealloc(vpc, voxel_scan);
136     return(VP_OK);
137 }
138 
139 /*
140  * vpClassifyVolume
141  *
142  * Classify the current raw volume and store the result in the
143  * classified volume.
144  */
145 
146 vpResult
vpClassifyVolume(vpc)147 vpClassifyVolume(vpc)
148 vpContext *vpc;		/* context */
149 {
150     int xlen, ylen, zlen;	/* volume dimensions */
151     int y, z;			/* loop indices */
152     char *voxel;		/* pointer to current voxel */
153     int voxel_ystride;		/* stride to next voxel scanline */
154     int voxel_zstride;		/* stride to next voxel slice */
155     int retcode;		/* return code */
156     MinMaxOctree *octree;	/* octree for fast classification */
157     MMOctreeLevel level_stack[VP_MAX_OCTREE_LEVELS]; /* stack for octree */
158 
159     /* check for errors */
160     if ((retcode = VPCheckRawVolume(vpc)) != VP_OK)
161 	return(retcode);
162     if ((retcode = VPCheckClassifier(vpc)) != VP_OK)
163 	return(retcode);
164 
165     /* initialize */
166     vpDestroyClassifiedVolume(vpc);
167     InitRLE(vpc);
168     xlen = vpc->xlen;
169     ylen = vpc->ylen;
170     zlen = vpc->zlen;
171     voxel = vpc->raw_voxels;
172     voxel_ystride = vpc->ystride;
173     voxel_zstride = vpc->zstride;
174     octree = vpc->mm_octree;
175     if (octree != NULL) {
176 	VPComputeSummedAreaTable(vpc);
177 	VPClassifyOctree(vpc);
178     }
179 
180     /* compute volume data */
181     for (z = 0; z < zlen; z++) {
182 	ReportStatus(vpc, (double)z / (double)zlen);
183 	if (octree != NULL)
184 	    VPInitOctreeLevelStack(vpc, level_stack, VP_Z_AXIS, z);
185 	for (y = 0; y < ylen; y++) {
186 	    EncodeScanline(vpc, voxel, octree, level_stack);
187 	    voxel += voxel_ystride;
188 	}
189 	voxel += voxel_zstride - ylen*voxel_ystride;
190     }
191     ReportStatus(vpc, 1.0);
192 
193     return(VP_OK);
194 }
195 
196 /*
197  * vpClassifyScanline
198  *
199  * Apply the classification function to a scanline of raw voxels and append
200  * it to the classified volume.
201  */
202 
203 vpResult
vpClassifyScanline(vpc,voxels)204 vpClassifyScanline(vpc, voxels)
205 vpContext *vpc;		/* context */
206 void *voxels;		/* voxel scanline */
207 {
208     int retcode;
209 
210     /* initialize if this is the first scanline */
211     if (vpc->cbuf == NULL) {
212 	if ((retcode = VPCheckClassifier(vpc)) != VP_OK)
213 	    return(retcode);
214 	vpDestroyClassifiedVolume(vpc);
215 	InitRLE(vpc);
216     }
217 
218     /* encode scanline */
219     EncodeScanline(vpc, voxels, NULL, NULL);
220 
221     return(VP_OK);
222 }
223 
224 /*
225  * EncodeScanline
226  *
227  * Classify and run-length encode one scanline of voxels.
228  */
229 
230 static void
EncodeScanline(vpc,voxels,octree,level_stack)231 EncodeScanline(vpc, voxels, octree, level_stack)
232 vpContext *vpc;		/* context */
233 void *voxels;		/* voxel scanline */
234 MinMaxOctree *octree;	/* octree for fast classification */
235 MMOctreeLevel level_stack[VP_MAX_OCTREE_LEVELS]; /* stack for octree */
236 {
237     ConstructionBuffer *cbuf;	/* state preserved between calls */
238     RunData rundata_x;		/* statistics for current x-axis run */
239     RunData *rundata_y;		/* statistics for all y-axis runs */
240     RunData *rundata_z;		/* statistics for all z-axis runs */
241     int skip_rle_x;		/* if true, do not compute rle_x */
242     int skip_rle_y;		/* if true, do not compute rle_y */
243     int skip_rle_z;		/* if true, do not compute rle_z */
244     int y, z;			/* y and z coordinates of scanline */
245     int x;			/* index of current voxel in scanline */
246     int xlen, ylen, zlen;	/* volume dimensions */
247     float opacity;		/* current value of the opacity (0.0-1.0) */
248     float min_opacity;		/* low opacity threshold */
249     int raw_bytes_per_voxel;	/* bytes in unclassified voxel */
250     int run_length;		/* length of last run */
251     char *rawvoxel;		/* current unclassified voxel */
252     unsigned char *octree_run_ptr;	/* pointer to current run */
253     int voxel_count;		/* voxels remaining in current run */
254     int retcode;
255 
256     /* initialize */
257     cbuf = vpc->cbuf;
258     xlen = vpc->xlen;
259     ylen = vpc->ylen;
260     zlen = vpc->zlen;
261     bzero(&rundata_x, sizeof(RunData));
262     rundata_y = &cbuf->rundata_y[cbuf->next_z];
263     rundata_z = &cbuf->rundata_z[cbuf->next_y * xlen];
264     skip_rle_x = vpc->skip_rle_x;
265     skip_rle_y = vpc->skip_rle_y;
266     skip_rle_z = vpc->skip_rle_z;
267     min_opacity = vpc->min_opacity;
268     raw_bytes_per_voxel = vpc->raw_bytes_per_voxel;
269     rawvoxel = voxels;
270     y = cbuf->next_y;
271     z = cbuf->next_z;
272     if (octree != NULL) {
273 	if (cbuf->octree_scans_left == 0) {
274 	    cbuf->octree_scans_left = VPComputeScanRuns(vpc, level_stack,
275 		cbuf->octree_runs, VP_Z_AXIS, y, xlen);
276 	}
277 	cbuf->octree_scans_left--;
278 	octree_run_ptr = cbuf->octree_runs;
279     }
280 
281     /* loop over voxels in the scanline */
282     x = 0;
283     while (x < xlen) {
284 	if (octree == NULL) {
285 	    /* no octree available, so process all of the voxels in the scan */
286 	    voxel_count = xlen;
287 	} else do {
288 	    /* skip over a run of zero voxels */
289 	    voxel_count = *octree_run_ptr++;
290 	    rundata_y += zlen * voxel_count;
291 	    rundata_z += voxel_count;
292 	    rawvoxel += raw_bytes_per_voxel * voxel_count;
293 	    x += voxel_count;
294 
295 	    /* get length of nonzero voxel run */
296 	    voxel_count = *octree_run_ptr++;
297 	} while (voxel_count == 0 && x < xlen);
298 
299 	/* process the voxels in the nonzero run */
300 	while (voxel_count-- > 0) {
301 	    /* compute opacity */
302 	    opacity = VPClassifyVoxel(vpc, rawvoxel);
303 
304 	    /* compare opacity to threshold */
305 	    if (opacity > min_opacity) {
306 		/* voxel is non-transparent, so save it */
307 		CacheVoxel(vpc, opacity, rawvoxel);
308 		if (!skip_rle_x) {
309 		    rundata_y->p.p1.non_zero_count++;
310 		    CountNonZeroVoxel(rundata_y, y, 0, NULL);
311 		}
312 		if (!skip_rle_y) {
313 		    rundata_z->p.p1.non_zero_count++;
314 		    CountNonZeroVoxel(rundata_z, z, 0, NULL);
315 		}
316 		rundata_x.p.p1.non_zero_count++;
317 		CountNonZeroVoxel(&rundata_x, x, 0, vpc);
318 	    }
319 
320 	    rundata_y += zlen;
321 	    rundata_z++;
322 	    rawvoxel += raw_bytes_per_voxel;
323 	    x++;
324 	} /* while (voxel_count) */
325     } /* for x */
326 
327     /* finish off the statistics for the scanline */
328     CountNonZeroVoxel(&rundata_x, x, 1, vpc);
329 
330     /* update saved state */
331     cbuf->non_zero_count += rundata_x.p.p1.non_zero_count;
332     cbuf->x_run_count += rundata_x.p.p1.run_count;
333 
334     /* check if this is the last scanline in the volume */
335     if (++cbuf->next_y == ylen) {
336 	cbuf->next_y = 0;
337 	cbuf->octree_scans_left = 0;
338 	if (++cbuf->next_z == zlen) {
339 	    RepackClassifiedVolume(vpc);
340 	    DestroyConstructionBuffer(vpc, vpc->cbuf);
341 	    vpc->cbuf = NULL;
342 #ifdef DEBUG
343 	    printf("\r");
344 	    if (!skip_rle_x) {
345 		printf("Checking X scanline offsets....\n");
346 		VPCheckScanOffsets(vpc->rle_x, vpc->rle_bytes_per_voxel);
347 	    }
348 	    if (!skip_rle_y) {
349 		printf("Checking Y scanline offsets....\n");
350 		VPCheckScanOffsets(vpc->rle_y, vpc->rle_bytes_per_voxel);
351 	    }
352 	    if (!skip_rle_z) {
353 		printf("Checking Z scanline offsets....\n");
354 		VPCheckScanOffsets(vpc->rle_z, vpc->rle_bytes_per_voxel);
355 	    }
356 	    VPValidateClassifiedVolume(vpc);
357 #endif
358 	}
359     }
360 }
361 
362 /*
363  * InitRLE
364  *
365  * Initialize in preparation for creating a new run-length encoded volume.
366  */
367 
368 static void
InitRLE(vpc)369 InitRLE(vpc)
370 vpContext *vpc;
371 {
372     int f;
373     int rle_bytes_per_voxel, size, offset;
374     int maxsize = 0;
375 
376     /* find out how many bytes of the raw voxel are used for shading */
377     rle_bytes_per_voxel = 0;
378     for (f = 0; f < vpc->num_shade_fields; f++) {
379 	size = vpc->field_size[f];
380 	offset = vpc->field_offset[f] + size;
381 	if (offset > rle_bytes_per_voxel)
382 	    rle_bytes_per_voxel = offset;
383 	if (size > maxsize)
384 	    maxsize = size;
385     }
386 
387     /* add one byte for opacity and then pad to the byte boundary of
388        the largest field in the voxel; this ensures alignment; the
389        opacity is always stored in the last byte (so the padding
390        is in between the shading fields and the opacity field) */
391     rle_bytes_per_voxel++;
392     rle_bytes_per_voxel = (rle_bytes_per_voxel + maxsize-1) & ~(maxsize-1);
393     vpc->rle_bytes_per_voxel = rle_bytes_per_voxel;
394 
395     /* initialize construction buffer */
396     vpc->cbuf = CreateConstructionBuffer(vpc);
397 }
398 
399 /*
400  * CacheVoxel
401  *
402  * Cache one voxel's data in the ConstructionBuffer.
403  */
404 
405 static void
CacheVoxel(vpc,opacity,rawvoxel)406 CacheVoxel(vpc, opacity, rawvoxel)
407 vpContext *vpc;			/* context */
408 double opacity;			/* voxel's opacity */
409 void *rawvoxel;			/* raw voxel data */
410 {
411     ConstructionBuffer *cbuf;	/* state during construction of volume */
412     GBuffer *data_buf;		/* storage for cached voxels */
413     void *data_ptr;		/* pointer to current voxel's storage */
414     int rle_bytes_per_voxel;	/* bytes per voxel after classification */
415     int opc_int;		/* quantized opacity */
416 
417     /* initialize */
418     cbuf = vpc->cbuf;
419     data_buf = cbuf->data_buf_tail;
420     rle_bytes_per_voxel = vpc->rle_bytes_per_voxel;
421 
422     /* allocate more memory if necessary */
423     if (data_buf->bytes_left < rle_bytes_per_voxel) {
424 	/* allocate more memory */
425 	data_buf->next = CreateGBuffer(vpc);
426 	data_buf = data_buf->next;
427 	cbuf->data_buf_tail = data_buf;
428     }
429     data_ptr = data_buf->data_ptr;
430 
431     /* copy the voxel fields required for shading */
432     bcopy(rawvoxel, data_ptr, rle_bytes_per_voxel-1);
433 
434     /* quantize and store the opacity */
435     opc_int = opacity*255.;
436     if (opc_int > 255)
437 	opc_int = 255;
438     else if (opc_int < 0)
439 	opc_int = 0;
440     ByteField(data_ptr, rle_bytes_per_voxel-1) = opc_int;
441 
442     data_buf->data_ptr += rle_bytes_per_voxel;
443     data_buf->bytes_left -= rle_bytes_per_voxel;
444 }
445 
446 /*
447  * CacheLength
448  *
449  * Cache one run length in the ConstructionBuffer.
450  */
451 
452 static void
CacheLength(vpc,length)453 CacheLength(vpc, length)
454 vpContext *vpc;
455 int length;
456 {
457     GBuffer *lengths_buf;
458 
459     lengths_buf = vpc->cbuf->lengths_buf_tail;
460     if (lengths_buf->bytes_left == 0) {
461 	/* allocate more memory */
462 	lengths_buf->next = CreateGBuffer(vpc);
463 	lengths_buf = lengths_buf->next;
464 	vpc->cbuf->lengths_buf_tail = lengths_buf;
465     }
466     *(lengths_buf->data_ptr)++ = length;
467     lengths_buf->bytes_left--;
468 }
469 
470 /*
471  * CountNonZeroVoxel
472  *
473  * Update the run count and nonzero voxel count for a voxel scanline.
474  * This routine adds one non-zero voxel to the scanline.  Index
475  * indicates the position of the voxel in the scanline.  If that
476  * position is not immediately adjacent to the last non-zero voxel then
477  * a run of zero voxels is added as well.
478  *
479  * If the vpc argument is non-NULL then the lengths of any completed
480  * runs are written out to the run length buffer.
481  */
482 
483 static void
CountNonZeroVoxel(rundata,index,end_of_scan,vpc)484 CountNonZeroVoxel(rundata, index, end_of_scan, vpc)
485 RunData *rundata;	/* statistics for the scanline */
486 int index;		/* index of voxel in scanline */
487 int end_of_scan;	/* if true then finish the scanline instead of
488 			   adding a voxel */
489 vpContext *vpc;		/* context in which run lengths should be stored */
490 {
491     int run_length;
492 
493     if (rundata->next_index != index) {
494 	/* a run of zero voxels intervenes between the current index
495 	   and the last nonzero voxel that was processed */
496 
497 	if (rundata->next_index != 0) {
498 	    /* add the last nonzero run to the statistics */
499 	    run_length = rundata->run_length;
500 	    while (run_length > 255) {
501 		/* run is too long, so split it */
502 		run_length -= 255;
503 		rundata->p.p1.run_count += 2;
504 		if (vpc != NULL) {
505 		    CacheLength(vpc, 255);
506 		    CacheLength(vpc, 0);
507 		}
508 	    }
509 	    rundata->p.p1.run_count++;
510 	    if (vpc != NULL)
511 		CacheLength(vpc, run_length);
512 	}
513 
514 	/* add the last zero run to the statistics */
515 	run_length = index - rundata->next_index;
516 	while (run_length > 255) {
517 	    /* run is too long, so split it */
518 	    run_length -= 255;
519 	    rundata->p.p1.run_count += 2;
520 	    if (vpc != NULL) {
521 		CacheLength(vpc, 255);
522 		CacheLength(vpc, 0);
523 	    }
524 	}
525 	rundata->p.p1.run_count++;
526 	if (vpc != NULL)
527 	    CacheLength(vpc, run_length);
528 
529 	if (end_of_scan) {
530 	    /* add a zero-length nonzero run to finish the scanline */
531 	    rundata->p.p1.run_count++;
532 	    if (vpc != NULL)
533 		CacheLength(vpc, 0);
534 	} else {
535 	    /* start the new run */
536 	    rundata->run_length = 1;
537 	    rundata->next_index = index + 1;
538 	}
539     } else if (!end_of_scan) {
540 	/* add a nonzero voxel to the current run */
541 	if (rundata->next_index == 0) {
542 	    rundata->p.p1.run_count++;	/* count initial zero run */
543 	    if (vpc != NULL)
544 		CacheLength(vpc, 0);
545 	}
546 	rundata->run_length++;
547 	rundata->next_index = index + 1;
548     } else {
549 	/* scanline ends with a nonzero voxel run */
550 	run_length = rundata->run_length;
551 	while (run_length > 255) {
552 	    /* run is too long, so split it */
553 	    run_length -= 255;
554 	    rundata->p.p1.run_count += 2;
555 	    if (vpc != NULL) {
556 		CacheLength(vpc, 255);
557 		CacheLength(vpc, 0);
558 	    }
559 	}
560 	rundata->p.p1.run_count++;
561 	if (vpc != NULL)
562 	    CacheLength(vpc, run_length);
563     }
564 }
565 
566 /*
567  * RepackClassifiedVolume
568  *
569  * Repack the data in the ConstructionBuffer after the last call to
570  * vpClassifyScanline.  This procedure creates the three run-length
571  * encoded copies of the classified voxels.
572  */
573 
574 static void
RepackClassifiedVolume(vpc)575 RepackClassifiedVolume(vpc)
576 vpContext *vpc;
577 {
578     int xlen, ylen, zlen;	/* volume dimensions */
579     int x, y, z;		/* voxel coordinates */
580     int non_zero_count;		/* number of nonzero voxels in volume */
581     int rle_bytes_per_voxel;	/* bytes per classified voxel */
582     int skip_rle_x;		/* if true, compute rle_x */
583     int skip_rle_y;		/* if true, compute rle_y */
584     int skip_rle_z;		/* if true, compute rle_z */
585     char *x_data;		/* voxel data for x viewpoint */
586     char *y_data;		/* voxel data for y viewpoint */
587     char *z_data;		/* voxel data for z viewpoint */
588     unsigned char *x_lengths;	/* run length for x viewpoint */
589     unsigned char *y_lengths;	/* run length for y viewpoint */
590     unsigned char *z_lengths;	/* run length for z viewpoint */
591     int z_data_offset;		/* offset to current data value in z volume */
592     int z_length_offset;        /* offset to current length value in z volume*/
593     GBuffer *data_buf;		/* next GBuffer containing voxel data */
594     char *data;			/* pointer to next voxel */
595     int data_bytes_left;	/* bytes of data left in data buffer */
596     GBuffer *lengths_buf;	/* next GBuffer containing length data */
597     unsigned char *lengths;	/* pointer to next length */
598     int lengths_bytes_left;	/* bytes of data left in lengths buffer */
599     int x_run_length;		/* length of current x-scanline run */
600     int is_non_zero;		/* true if current x-scanline run is nonzero */
601     RunData *rundata_y;		/* statistics for y-axis runs */
602     RunData *rundata_z;		/* statistics for z-axis runs */
603 
604     /* initialize */
605     xlen = vpc->xlen;
606     ylen = vpc->ylen;
607     zlen = vpc->zlen;
608     non_zero_count = vpc->cbuf->non_zero_count;
609     rle_bytes_per_voxel = vpc->rle_bytes_per_voxel;
610     skip_rle_x = vpc->skip_rle_x;
611     skip_rle_y = vpc->skip_rle_y;
612     skip_rle_z = vpc->skip_rle_z;
613 
614     /* check for empty volume */
615     if (non_zero_count == 0) {
616 	if (!skip_rle_x)
617 	    vpc->rle_x = CreateEmptyRLEVoxels(vpc, ylen, zlen, xlen);
618 	if (!skip_rle_y)
619 	    vpc->rle_y = CreateEmptyRLEVoxels(vpc, zlen, xlen, ylen);
620 	if (!skip_rle_z)
621 	    vpc->rle_z = CreateEmptyRLEVoxels(vpc, xlen, ylen, zlen);
622 	return;
623     }
624 
625     /* allocate space for y-axis runs (used for the x viewing axis) */
626     if (!skip_rle_x) {
627 	vpc->rle_x = CreateRLEVoxelsFromRunData(vpc, ylen, zlen, xlen,
628 	    non_zero_count, vpc->cbuf->rundata_y, rle_bytes_per_voxel);
629 	x_data = vpc->rle_x->data;
630 	x_lengths = vpc->rle_x->run_lengths;
631     }
632 
633     /* allocate space for z-axis runs (used for the y viewing axis) */
634     if (!skip_rle_y) {
635 	vpc->rle_y = CreateRLEVoxelsFromRunData(vpc, zlen, xlen, ylen,
636 	    non_zero_count, vpc->cbuf->rundata_z, rle_bytes_per_voxel);
637 	y_data = vpc->rle_y->data;
638 	y_lengths = vpc->rle_y->run_lengths;
639     }
640 
641     /* allocate space for x-axis runs (used for the z viewing axis) */
642     if (!skip_rle_z) {
643 	vpc->rle_z = VPCreateRLEVoxels(vpc, xlen, ylen, zlen, non_zero_count,
644 	    vpc->cbuf->x_run_count, rle_bytes_per_voxel);
645 	Alloc(vpc, vpc->rle_z->scan_offsets, ScanOffset *,
646 	      zlen*sizeof(ScanOffset), "scan_offsets");
647 	vpc->rle_z->scan_offsets_per_slice = 1;
648 	z_data = vpc->rle_z->data;
649 	z_lengths = vpc->rle_z->run_lengths;
650 	z_data_offset = 0;
651 	z_length_offset = 0;
652     }
653 
654     /* copy data into the three RLEVoxels structures */
655     data_buf = vpc->cbuf->data_buf_head;
656     data = NULL;
657     data_bytes_left = 0;
658     lengths_buf = vpc->cbuf->lengths_buf_head;
659     lengths = NULL;
660     lengths_bytes_left = 0;
661     x_run_length = 0;
662     is_non_zero = 1;
663     for (z = 0; z < zlen; z++) {
664 	ReportStatus(vpc, (double)z / (double)zlen);
665 	if (!skip_rle_z) {
666 	    vpc->rle_z->scan_offsets[z].first_data = z_data_offset;
667 	    vpc->rle_z->scan_offsets[z].first_len =
668 		(z_length_offset & 0x1) ? z_length_offset + 1 :
669 		z_length_offset;
670 	}
671 	rundata_z = vpc->cbuf->rundata_z;
672 	for (y = 0; y < ylen; y++) {
673 	    rundata_y = &vpc->cbuf->rundata_y[z];
674 	    for (x = 0; x < xlen; x++) {
675 		while (x_run_length == 0) {
676 		    /* find length of next run */
677 		    if (lengths_bytes_left <= 0) {
678 			/* go to next lengths buffer */
679 			lengths = (unsigned char *)lengths_buf->data;
680 			lengths_bytes_left = GBUFFER_SIZE -
681 			    		     lengths_buf->bytes_left;
682 			lengths_buf = lengths_buf->next;
683 			if (!skip_rle_z) {
684 			    bcopy(lengths, z_lengths, lengths_bytes_left);
685 			    z_lengths += lengths_bytes_left;
686 			}
687 		    }
688 		    x_run_length = *lengths++;
689 		    lengths_bytes_left--;
690 		    is_non_zero = !is_non_zero;
691 		    z_length_offset++;
692 		}
693 		x_run_length--;	/* consume one voxel */
694 		if (is_non_zero) {
695 		    /* find the data for this voxel */
696 		    if (data_bytes_left <= 0) {
697 			data = data_buf->data;
698 			data_bytes_left = GBUFFER_SIZE - data_buf->bytes_left;
699 			data_buf = data_buf->next;
700 			if (!skip_rle_z) {
701 			    bcopy(data, z_data, data_bytes_left);
702 			    z_data = (char *)z_data + data_bytes_left;
703 			}
704 		    }
705 
706 		    /* store voxel */
707 		    if (!skip_rle_x) {
708 			StoreNonZeroVoxel(data, rle_bytes_per_voxel, x_data,
709 					  x_lengths, rundata_y, y);
710 		    }
711 		    if (!skip_rle_y) {
712 			StoreNonZeroVoxel(data, rle_bytes_per_voxel, y_data,
713 					  y_lengths, rundata_z, z);
714 		    }
715 		    data += rle_bytes_per_voxel;
716 		    data_bytes_left -= rle_bytes_per_voxel;
717 		    z_data_offset += rle_bytes_per_voxel;
718 		}
719 		rundata_y += zlen;
720 		rundata_z++;
721 	    } /* for x */
722 	} /* for y */
723     } /* for z */
724     ReportStatus(vpc, 1.0);
725 
726     if (!skip_rle_x)
727 	PadScanlines(ylen, zlen, xlen, vpc->cbuf->rundata_y, x_lengths);
728     if (!skip_rle_y)
729 	PadScanlines(zlen, xlen, ylen, vpc->cbuf->rundata_z, y_lengths);
730 }
731 
732 /*
733  * CreateEmptyRLEVoxels
734  *
735  * Create an empty RLEVoxels object (all voxels transparent).
736  */
737 
738 static RLEVoxels *
CreateEmptyRLEVoxels(vpc,ilen,jlen,klen)739 CreateEmptyRLEVoxels(vpc, ilen, jlen, klen)
740 vpContext *vpc;
741 int ilen, jlen, klen;
742 {
743     RLEVoxels *rle_voxels;
744     int j, k;
745     unsigned char *run_lengths;
746     ScanOffset *scan_offsets;
747 
748     rle_voxels = VPCreateRLEVoxels(vpc, ilen, jlen, klen, 1, 2*jlen*klen, 1);
749     Alloc(vpc, rle_voxels->scan_offsets, ScanOffset *, klen*sizeof(ScanOffset),
750 	  "scan_offsets");
751     rle_voxels->scan_offsets_per_slice = 1;
752     run_lengths = rle_voxels->run_lengths;
753     scan_offsets = rle_voxels->scan_offsets;
754     for (k = 0; k < klen; k++) {
755 	scan_offsets->first_len = k*jlen*2;
756 	scan_offsets->first_data = 0;
757 	scan_offsets++;
758 	for (j = 0; j < jlen; j++) {
759 	    *run_lengths++ = ilen;
760 	    *run_lengths++ = 0;
761 	}
762     }
763     return(rle_voxels);
764 }
765 
766 /*
767  * CreateRLEVoxelsFromRunData
768  *
769  * Allocate an RLEVoxels structure using the data in a RunData array
770  * in order to determine the required size.  Also reinitialize the RunData
771  * array with pointers to the RLEVoxels data for the beginning of
772  * each scanline.
773  */
774 
775 static RLEVoxels *
CreateRLEVoxelsFromRunData(vpc,ilen,jlen,klen,non_zero_count,run_data,rle_bytes_per_voxel)776 CreateRLEVoxelsFromRunData(vpc, ilen, jlen, klen, non_zero_count, run_data,
777 			   rle_bytes_per_voxel)
778 vpContext *vpc;		/* context */
779 int ilen, jlen, klen;	/* size of volume in rotated object space */
780 int non_zero_count;	/* number of nonzero voxels in volume */
781 RunData *run_data;	/* array of run statistics (jlen*klen entries) */
782 int rle_bytes_per_voxel;/* number of bytes to allocate for each voxel */
783 {
784     int j, k;			/* scanline and slice number */
785     int scan_run_count;		/* runs in current scanline */
786     int run_count;		/* runs in entire volume */
787     int scan_non_zero_count;	/* nonzero voxels in scanline */
788     int data_offset;		/* scanline's offset in RLEVoxels->data */
789     int length_offset;		/* scanline's offset in
790 				   RLEVoxels->run_lengths */
791     ScanOffset *slice_offset;	/* offsets for each slice */
792     RLEVoxels *rle_voxels;	/* return value */
793 
794     Alloc(vpc, slice_offset, ScanOffset *, klen*sizeof(ScanOffset),
795 	  "scan_offsets");
796 
797     /* accumulate the statistics for the last run in each scanline,
798        count the total number of runs, and store the data and length
799        offsets for the beginning of the scanline */
800     data_offset = 0;
801     length_offset = 0;
802     run_count = 0;
803     for (k = 0; k < klen; k++) {
804 	slice_offset[k].first_data = data_offset;
805 	slice_offset[k].first_len = length_offset;
806 	for (j = 0; j < jlen; j++) {
807 	    CountNonZeroVoxel(run_data, ilen, 1, NULL);
808 	    scan_non_zero_count = run_data->p.p1.non_zero_count;
809 	    scan_run_count = run_data->p.p1.run_count;
810 	    run_data->run_length = 0;
811 	    run_data->next_index = 0;
812 	    run_data->p.p2.data_offset = data_offset;
813 	    run_data->p.p2.length_offset = length_offset;
814 	    data_offset += scan_non_zero_count * rle_bytes_per_voxel;
815 	    length_offset += scan_run_count * sizeof(unsigned char);
816 	    run_count += scan_run_count;
817 	    run_data++;
818 	}
819     }
820 
821     /* allocate space */
822     rle_voxels = VPCreateRLEVoxels(vpc, ilen, jlen, klen, non_zero_count,
823 				   run_count, rle_bytes_per_voxel);
824     rle_voxels->scan_offsets_per_slice = 1;
825     rle_voxels->scan_offsets = slice_offset;
826     return(rle_voxels);
827 }
828 
829 /*
830  * StoreNonZeroVoxel
831  *
832  * Store a nonzero voxel in an RLEVoxels object.  This function is
833  * just like CountNonZeroVoxel except that it actually stores voxel data.
834  */
835 
836 static void
StoreNonZeroVoxel(voxel,rle_bytes_per_voxel,data,lengths,rundata,index)837 StoreNonZeroVoxel(voxel, rle_bytes_per_voxel, data, lengths, rundata, index)
838 void *voxel;		/* input voxel data */
839 int rle_bytes_per_voxel;/* size of voxel */
840 void *data;		/* location to store voxel */
841 unsigned char *lengths;	/* location to store run lengths */
842 RunData *rundata;	/* run length statistics for current voxel scanline */
843 int index;		/* index of voxel in scanline */
844 {
845     int run_length;
846 
847     /* store the voxel */
848     if (voxel != NULL) {
849 	bcopy(voxel, (char *)data + rundata->p.p2.data_offset,
850 	      rle_bytes_per_voxel);
851 	rundata->p.p2.data_offset += rle_bytes_per_voxel;
852     }
853 
854     /* update run lengths */
855     if (rundata->next_index != index) {
856 	/* a run of zero voxels intervenes between the current index
857 	   and the last nonzero voxel that was processed */
858 
859 	if (rundata->next_index != 0) {
860 	    /* add the last nonzero run to the statistics */
861 	    run_length = rundata->run_length;
862 	    while (run_length > 255) {
863 		/* run is too long, so split it */
864 		run_length -= 255;
865 		lengths[rundata->p.p2.length_offset++] = 255;
866 		lengths[rundata->p.p2.length_offset++] = 0;
867 	    }
868 	    lengths[rundata->p.p2.length_offset++] = run_length;
869 	}
870 
871 	/* add the last zero run to the statistics */
872 	run_length = index - rundata->next_index;
873 	while (run_length > 255) {
874 	    /* run is too long, so split it */
875 	    run_length -= 255;
876 	    lengths[rundata->p.p2.length_offset++] = 255;
877 	    lengths[rundata->p.p2.length_offset++] = 0;
878 	}
879 	lengths[rundata->p.p2.length_offset++] = run_length;
880 
881 	if (voxel == NULL) {
882 	    /* add a zero-length nonzero run to finish the scanline */
883 	    lengths[rundata->p.p2.length_offset++] = 0;
884 	} else {
885 	    /* start the new run */
886 	    rundata->run_length = 1;
887 	    rundata->next_index = index + 1;
888 	}
889     } else if (voxel != NULL) {
890 	/* add a nonzero voxel to the current run */
891 	if (rundata->next_index == 0) {
892 	    lengths[rundata->p.p2.length_offset++] = 0;
893 	}
894 	rundata->run_length++;
895 	rundata->next_index = index + 1;
896     } else {
897 	/* scanline ends with a nonzero voxel run */
898 	run_length = rundata->run_length;
899 	while (run_length > 255) {
900 	    /* run is too long, so split it */
901 	    run_length -= 255;
902 	    lengths[rundata->p.p2.length_offset++] = 255;
903 	    lengths[rundata->p.p2.length_offset++] = 0;
904 	}
905 	lengths[rundata->p.p2.length_offset++] = run_length;
906     }
907 }
908 
909 /*
910  * PadScanlines
911  *
912  * Make sure each scanline has an even number of runs.
913  */
914 
915 static void
PadScanlines(ilen,jlen,klen,run_data,lengths)916 PadScanlines(ilen, jlen, klen, run_data, lengths)
917 int ilen, jlen, klen;	/* size of volume in rotated object space */
918 RunData *run_data;	/* array of run statistics (jlen*klen entries) */
919 unsigned char *lengths;	/* beginning of run lengths array */
920 {
921     int scan_count;		/* number of scanlines */
922     int scan_run_count;		/* number of runs in scanline */
923     int scan;			/* current scanline number */
924 
925     scan_count = jlen * klen;
926     for (scan = 0; scan < scan_count; scan++) {
927 	StoreNonZeroVoxel(NULL, 0, NULL, lengths, run_data, ilen);
928 	run_data++;
929     }
930 }
931 
932 /*
933  * VPCreateRLEVoxels
934  *
935  *
936  * Allocate a new RLEVoxels object.
937  */
938 
939 RLEVoxels *
VPCreateRLEVoxels(vpc,ilen,jlen,klen,data_count,run_count,rle_bytes_per_voxel)940 VPCreateRLEVoxels(vpc, ilen, jlen, klen, data_count, run_count,
941 		  rle_bytes_per_voxel)
942 vpContext *vpc;		/* context */
943 int ilen, jlen, klen;	/* dimensions in rotated object space */
944 int data_count;		/* number of nonzero voxels */
945 int run_count;		/* number of runs */
946 int rle_bytes_per_voxel;/* bytes of storage for one voxel */
947 {
948     RLEVoxels *rle_voxels;
949 
950     Alloc(vpc, rle_voxels, RLEVoxels *, sizeof(RLEVoxels), "RLEVoxels");
951     rle_voxels->ilen = ilen;
952     rle_voxels->jlen = jlen;
953     rle_voxels->klen = klen;
954     rle_voxels->run_count = run_count;
955     if (run_count > 0) {
956 	Alloc(vpc, rle_voxels->run_lengths, unsigned char *, run_count,
957 	      "run_lengths");
958     } else {
959 	rle_voxels->run_lengths = NULL;
960     }
961     rle_voxels->data_count = data_count;
962     if (data_count > 0) {
963 	Alloc(vpc, rle_voxels->data, void *, data_count * rle_bytes_per_voxel,
964 	      "voxeldata");
965     } else {
966 	rle_voxels->data = NULL;
967     }
968     rle_voxels->scan_offsets_per_slice = 0;
969     rle_voxels->scan_offsets = NULL;
970     rle_voxels->mmapped = 0;
971 #ifdef INDEX_VOLUME
972     rle_voxels->voxel_index = NULL;
973 #endif
974     return(rle_voxels);
975 }
976 
977 /*
978  * VPDestroyRLEVoxels
979  *
980  * Destroy an RLEVoxels object.
981  */
982 
983 void
VPDestroyRLEVoxels(vpc,rle_voxels)984 VPDestroyRLEVoxels(vpc, rle_voxels)
985 vpContext *vpc;
986 RLEVoxels *rle_voxels;
987 {
988     if (!rle_voxels->mmapped) {
989 	if (rle_voxels->run_lengths != NULL)
990 	    Dealloc(vpc, rle_voxels->run_lengths);
991 	if (rle_voxels->data != NULL)
992 	    Dealloc(vpc, rle_voxels->data);
993 	if (rle_voxels->scan_offsets != NULL)
994 	    Dealloc(vpc, rle_voxels->scan_offsets);
995     }
996 #ifdef INDEX_VOLUME
997     if (rle_voxels->voxel_index != NULL)
998 	Dealloc(vpc, rle_voxels->voxel_index);
999 #endif
1000     Dealloc(vpc, rle_voxels);
1001 }
1002 
1003 /*
1004  * CreateConstructionBuffer
1005  *
1006  * Create a ConstructionBuffer object.
1007  */
1008 
1009 static ConstructionBuffer *
CreateConstructionBuffer(vpc)1010 CreateConstructionBuffer(vpc)
1011 vpContext *vpc;
1012 {
1013     ConstructionBuffer *cbuf;
1014     int xlen, ylen, zlen;
1015 
1016     xlen = vpc->xlen;
1017     ylen = vpc->ylen;
1018     zlen = vpc->zlen;
1019     Alloc(vpc, cbuf, ConstructionBuffer *, sizeof(ConstructionBuffer),
1020 	  "ConstructionBuffer");
1021     Alloc(vpc, cbuf->rundata_y, RunData *, xlen*zlen*sizeof(RunData),
1022 	  "rundata_y");
1023     Alloc(vpc, cbuf->rundata_z, RunData *, ylen*xlen*sizeof(RunData),
1024 	  "rundata_z");
1025     bzero(cbuf->rundata_y, xlen*zlen*sizeof(RunData));
1026     bzero(cbuf->rundata_z, ylen*xlen*sizeof(RunData));
1027     cbuf->data_buf_head = CreateGBuffer(vpc);
1028     cbuf->data_buf_tail = cbuf->data_buf_head;
1029     cbuf->lengths_buf_head = CreateGBuffer(vpc);
1030     cbuf->lengths_buf_tail = cbuf->lengths_buf_head;
1031     cbuf->non_zero_count = 0;
1032     cbuf->x_run_count = 0;
1033     cbuf->octree_scans_left = 0;
1034     cbuf->next_z = 0;
1035     cbuf->next_y = 0;
1036     return(cbuf);
1037 }
1038 
1039 /*
1040  * DestroyConstructionBuffer
1041  *
1042  * Destroy a ConstructionBuffer object.
1043  */
1044 
1045 static void
DestroyConstructionBuffer(vpc,cbuf)1046 DestroyConstructionBuffer(vpc, cbuf)
1047 vpContext *vpc;
1048 ConstructionBuffer *cbuf;
1049 {
1050     GBuffer *gbuf, *next_gbuf;
1051 
1052     Dealloc(vpc, cbuf->rundata_y);
1053     Dealloc(vpc, cbuf->rundata_z);
1054     for (gbuf = cbuf->data_buf_head; gbuf != NULL; gbuf = next_gbuf) {
1055 	next_gbuf = gbuf->next;
1056 	DestroyGBuffer(vpc, gbuf);
1057     }
1058     for (gbuf = cbuf->lengths_buf_head; gbuf != NULL; gbuf = next_gbuf) {
1059 	next_gbuf = gbuf->next;
1060 	DestroyGBuffer(vpc, gbuf);
1061     }
1062     Dealloc(vpc, cbuf);
1063 }
1064 
1065 /*
1066  * CreateGBuffer
1067  *
1068  * Create a GBuffer object.
1069  */
1070 
1071 static GBuffer *
CreateGBuffer(vpc)1072 CreateGBuffer(vpc)
1073 vpContext *vpc;
1074 {
1075     GBuffer *gbuf;
1076 
1077     Alloc(vpc, gbuf, GBuffer *, sizeof(GBuffer), "GBuffer");
1078     gbuf->bytes_left = GBUFFER_SIZE;
1079     gbuf->data_ptr = gbuf->data;
1080     gbuf->next = NULL;
1081     return(gbuf);
1082 }
1083 
1084 /*
1085  * DestroyGBuffer
1086  *
1087  * Destroy a GBuffer.
1088  */
1089 
1090 static void
DestroyGBuffer(vpc,gbuf)1091 DestroyGBuffer(vpc, gbuf)
1092 vpContext *vpc;
1093 GBuffer *gbuf;
1094 {
1095     Dealloc(vpc, gbuf);
1096 }
1097 
1098 /*
1099  * vpDestroyClassifiedVolume
1100  *
1101  * Free all memory associated with a classified volume.
1102  */
1103 
1104 vpResult
vpDestroyClassifiedVolume(vpc)1105 vpDestroyClassifiedVolume(vpc)
1106 vpContext *vpc;
1107 {
1108     if (vpc->cbuf != NULL) {
1109 	DestroyConstructionBuffer(vpc, vpc->cbuf);
1110 	vpc->cbuf = NULL;
1111     }
1112     if (vpc->rle_x != NULL) {
1113 	VPDestroyRLEVoxels(vpc, vpc->rle_x);
1114 	vpc->rle_x = NULL;
1115     }
1116     if (vpc->rle_y != NULL) {
1117 	VPDestroyRLEVoxels(vpc, vpc->rle_y);
1118 	vpc->rle_y = NULL;
1119     }
1120     if (vpc->rle_z != NULL) {
1121 	VPDestroyRLEVoxels(vpc, vpc->rle_z);
1122 	vpc->rle_z = NULL;
1123     }
1124     return(VP_OK);
1125 }
1126 
1127 #ifdef INDEX_VOLUME
1128 /*
1129  * vpComputeRLEIndex
1130  *
1131  * Compute indexes for the classified volume data in a context.
1132  */
1133 
1134 vpResult
vpComputeRLEIndex(vpc)1135 vpComputeRLEIndex(vpc)
1136 vpContext *vpc;
1137 {
1138     vpResult result;
1139 
1140     if ((result = VPComputeRLEScanOffsets(vpc)) != VP_OK)
1141 	return(result);
1142     if (vpc->rle_x != NULL) {
1143 	if ((result = ComputeIndex(vpc, vpc->rle_x)) != VP_OK)
1144 	    return(result);
1145     }
1146     if (vpc->rle_y != NULL) {
1147 	if ((result = ComputeIndex(vpc, vpc->rle_y)) != VP_OK)
1148 	    return(result);
1149     }
1150     if (vpc->rle_z != NULL) {
1151 	if ((result = ComputeIndex(vpc, vpc->rle_z)) != VP_OK)
1152 	    return(result);
1153     }
1154     return(VP_OK);
1155 }
1156 
1157 /*
1158  * ComputeIndex
1159  *
1160  * Compute an index that maps 3D voxel coordinates to the RLE run data
1161  * for the corresponding voxel.  The values stored in the index are
1162  * byte offsets to the beginning of the run containing the voxel,
1163  * plus a count indicating the position of the voxel in the run.
1164  * Return value is a result code.
1165  */
1166 
1167 static vpResult
ComputeIndex(vpc,rle_voxels)1168 ComputeIndex(vpc, rle_voxels)
1169 vpContext *vpc;
1170 RLEVoxels *rle_voxels;
1171 {
1172     int ilen, jlen, klen;	/* size of volume */
1173     unsigned char *RLElen;	/* pointer to current run length */
1174     VoxelLocation *index;	/* pointer to current index entry */
1175     int i, j, k;		/* current voxel coordinates */
1176     unsigned len_offset;	/* offset in bytes from beginning of
1177 				   scanline to current run length */
1178     unsigned data_offset;	/* offset in bytes from beginning of
1179 				   scanline to current voxel data */
1180     int run_is_zero;		/* true if current run is a zero run */
1181     int run_count;		/* voxels left in current run */
1182     int voxel_size;		/* size of a voxel in bytes */
1183 
1184     ilen = rle_voxels->ilen;
1185     jlen = rle_voxels->jlen;
1186     klen = rle_voxels->klen;
1187     RLElen = rle_voxels->run_lengths;
1188     if (rle_voxels->scan_offsets_per_slice != jlen)
1189 	return(VPERROR_BAD_VOLUME);
1190     if (rle_voxels->voxel_index == NULL) {
1191 	Alloc(vpc, rle_voxels->voxel_index, VoxelLocation *,
1192 	      ilen * jlen * klen * sizeof(VoxelLocation), "voxel_index");
1193     }
1194     index = rle_voxels->voxel_index;
1195     voxel_size = vpc->rle_bytes_per_voxel;
1196     run_is_zero = 0;
1197     run_count = 0;
1198     for (k = 0; k < klen; k++) {
1199 	for (j = 0; j < jlen; j++) {
1200 	    ASSERT(run_is_zero == 0);
1201 	    ASSERT(run_count == 0);
1202 	    len_offset = 0;
1203 	    data_offset = 0;
1204 	    for (i = 0; i < ilen; i++) {
1205 		/* record index for current voxel */
1206 		if (len_offset > 256) {
1207 		    Dealloc(vpc, rle_voxels->voxel_index);
1208 		    rle_voxels->voxel_index = NULL;
1209 		    return(VPERROR_LIMIT_EXCEEDED);
1210 		}
1211 		index->run_count = run_count;
1212 		index->len_offset = len_offset;
1213 		if (run_is_zero)
1214 		    index->data_offset = data_offset | INDEX_RUN_IS_ZERO;
1215 		else
1216 		    index->data_offset = data_offset;
1217 		index++;
1218 
1219 		/* go on to next voxel */
1220 		while (run_count == 0) {
1221 		    run_count = *RLElen++;
1222 		    run_is_zero = !run_is_zero;
1223 		    len_offset++;
1224 		}
1225 		run_count--;
1226 		if (!run_is_zero)
1227 		    data_offset += voxel_size;
1228 	    }
1229 	    ASSERT(run_count == 0);
1230 	    if (run_is_zero) {
1231 		run_count = *RLElen++;
1232 		run_is_zero = !run_is_zero;
1233 		len_offset++;
1234 	    }
1235 	}
1236     }
1237     return(VP_OK);
1238 }
1239 #endif /* INDEX_VOLUME */
1240 
1241 /*
1242  * VPComputeRLEScanOffsets
1243  *
1244  * Recompute the scan_offsets arrays for the classified volume data in
1245  * a context.  Return value is a result code.
1246  */
1247 
1248 vpResult
VPComputeRLEScanOffsets(vpc)1249 VPComputeRLEScanOffsets(vpc)
1250 vpContext *vpc;
1251 {
1252     vpResult result;
1253 
1254     if (vpc->rle_x != NULL) {
1255 	if ((result = ComputeScanOffsets(vpc, vpc->rle_x)) != VP_OK)
1256 	    return(result);
1257 #ifdef DEBUG
1258 	VPCheckScanOffsets(vpc->rle_x, vpc->rle_bytes_per_voxel);
1259 #endif
1260     }
1261 
1262     if (vpc->rle_y != NULL) {
1263 	if ((result = ComputeScanOffsets(vpc, vpc->rle_y)) != VP_OK)
1264 	    return(result);
1265 #ifdef DEBUG
1266 	VPCheckScanOffsets(vpc->rle_y, vpc->rle_bytes_per_voxel);
1267 #endif
1268     }
1269 
1270     if (vpc->rle_z != NULL) {
1271 	if ((result = ComputeScanOffsets(vpc, vpc->rle_z)) != VP_OK)
1272 	    return(result);
1273 #ifdef DEBUG
1274 	VPCheckScanOffsets(vpc->rle_z, vpc->rle_bytes_per_voxel);
1275 #endif
1276     }
1277     return(VP_OK);
1278 }
1279 
1280 /*
1281  * ComputeScanOffsets
1282  *
1283  * Recompute the scan_offsets array for a classified volume.
1284  * Return value is a result code.
1285  */
1286 
1287 static vpResult
ComputeScanOffsets(vpc,rle_voxels)1288 ComputeScanOffsets(vpc, rle_voxels)
1289 vpContext *vpc;
1290 RLEVoxels *rle_voxels;
1291 {
1292     int ilen, jlen, klen;	/* size of volume */
1293     unsigned char *RLElen;	/* pointer to current run length */
1294     ScanOffset *scan_offset;	/* pointer to current scanline offset */
1295     int i, j, k;		/* current voxel coordinates */
1296     unsigned len_offset;	/* offset in bytes from beginning of
1297 				   run lengths to current run length */
1298     unsigned data_offset;	/* offset in bytes from beginning of
1299 				   voxel data to current voxel data */
1300     int voxel_size;		/* size of a voxel in bytes */
1301     int zerocount, nonzerocount;
1302 
1303     if (rle_voxels->mmapped)
1304 	return(VPERROR_IO);
1305     ilen = rle_voxels->ilen;
1306     jlen = rle_voxels->jlen;
1307     klen = rle_voxels->klen;
1308     RLElen = rle_voxels->run_lengths;
1309     if (rle_voxels->scan_offsets_per_slice != jlen) {
1310 	if (rle_voxels->scan_offsets != NULL)
1311 	    Dealloc(vpc, rle_voxels->scan_offsets);
1312 	Alloc(vpc, rle_voxels->scan_offsets, ScanOffset *,
1313 	      klen * jlen * sizeof(ScanOffset), "scan_offsets");
1314 	rle_voxels->scan_offsets_per_slice = jlen;
1315     }
1316     scan_offset = rle_voxels->scan_offsets;
1317     len_offset = 0;
1318     data_offset = 0;
1319     voxel_size = vpc->rle_bytes_per_voxel;
1320 
1321     for (k = 0; k < klen; k++) {
1322 	for (j = 0; j < jlen; j++) {
1323 	    scan_offset->first_len = len_offset;
1324 	    scan_offset->first_data = data_offset;
1325 	    scan_offset++;
1326 	    for (i = 0; i < ilen; ) {
1327 		zerocount = *RLElen++;	 /* get length of run of zeros */
1328 		nonzerocount = *RLElen++;/* get length of run of non-zeros */
1329 		len_offset += 2;
1330 		data_offset += nonzerocount * voxel_size;
1331 		i += zerocount + nonzerocount;
1332 	    }
1333 	    ASSERT(i == ilen);
1334 	}
1335     }
1336     return(VP_OK);
1337 }
1338 
1339 #ifdef DEBUG
1340 /*
1341  * VPCheckScanOffsets
1342  *
1343  * Check the scan_offsets field of an RLEVolume for internal consistency.
1344  */
1345 
1346 void
VPCheckScanOffsets(rle_voxels,rle_bytes_per_voxel)1347 VPCheckScanOffsets(rle_voxels, rle_bytes_per_voxel)
1348 RLEVoxels *rle_voxels;
1349 {
1350     int i, j, k;
1351     int ilen, jlen, klen;
1352     int run_length;
1353     int is_non_zero;
1354     unsigned char *run_length_ptr;
1355     int length_offset;
1356     int data_offset;
1357     int scan_offsets_per_slice;
1358     ScanOffset *scan_offset;
1359 
1360     scan_offsets_per_slice = rle_voxels->scan_offsets_per_slice;
1361     if (scan_offsets_per_slice == 0)
1362 	return;
1363     ilen = rle_voxels->ilen;
1364     jlen = rle_voxels->jlen;
1365     klen = rle_voxels->klen;
1366     run_length_ptr = rle_voxels->run_lengths;
1367     run_length = 0;
1368     is_non_zero = 1;
1369     length_offset = 0;
1370     data_offset = 0;
1371     for (k = 0; k < klen; k++) {
1372 	for (j = 0; j < jlen; j++) {
1373 	    if (j < scan_offsets_per_slice) {
1374 		scan_offset = &rle_voxels->scan_offsets[
1375 			       k*scan_offsets_per_slice + j];
1376 		if (scan_offset->first_len != length_offset) {
1377 		    printf("Bad length offset on slice %d, scanline %d: ",k,j);
1378 		    printf("%d should be %d\n", scan_offset->first_len,
1379 			   length_offset);
1380 		}
1381 		if (scan_offset->first_data != data_offset) {
1382 		    printf("Bad data offset on slice %d, scanline %d: ",k,j);
1383 		    printf("%d should be %d\n", scan_offset->first_data,
1384 			   data_offset);
1385 		}
1386 	    }
1387 	    for (i = 0; i < ilen; i++) {
1388 		while (run_length == 0) {
1389 		    run_length = *run_length_ptr++;
1390 		    is_non_zero = !is_non_zero;
1391 		    length_offset++;
1392 		}
1393 		run_length--;
1394 		if (is_non_zero)
1395 		    data_offset += rle_bytes_per_voxel;
1396 	    }
1397 	    if (run_length != 0) {
1398 		printf("Run did not terminate at end of scanline ");
1399 		printf("on slice %d, scanline %d\n", k, j);
1400 	    }
1401 	    if (!is_non_zero) {
1402 		if (*run_length_ptr++ != 0) {
1403 		    printf("Missing zero run at end of scanline ");
1404 		    printf("on slice %d, scanline %d\n", k, j);
1405 		}
1406 		is_non_zero = !is_non_zero;
1407 		length_offset++;
1408 	    }
1409 	}
1410     }
1411 }
1412 
1413 /*
1414  * VPValidateClassifiedVolume
1415  *
1416  * Compare the classified volume to the unclassified volume.
1417  */
1418 
1419 void
VPValidateClassifiedVolume(vpc)1420 VPValidateClassifiedVolume(vpc)
1421 vpContext *vpc;
1422 {
1423     if (vpc->raw_voxels == NULL)
1424 	return;
1425     if (vpc->rle_z != NULL) {
1426 	printf("Checking Z view....\n");
1427 	ValidateRLEVoxels(vpc, vpc->rle_z, vpc->xstride, vpc->ystride,
1428 			  vpc->zstride, VP_Z_AXIS);
1429     }
1430     if (vpc->rle_y != NULL) {
1431 	printf("Checking Y view....\n");
1432 	ValidateRLEVoxels(vpc, vpc->rle_y, vpc->zstride, vpc->xstride,
1433 			  vpc->ystride, VP_Y_AXIS);
1434     }
1435     if (vpc->rle_x != NULL) {
1436 	printf("Checking X view....\n");
1437 	ValidateRLEVoxels(vpc, vpc->rle_x, vpc->ystride, vpc->zstride,
1438 			  vpc->xstride, VP_X_AXIS);
1439     }
1440 }
1441 
1442 static void
ValidateRLEVoxels(vpc,rle,istride,jstride,kstride,axis)1443 ValidateRLEVoxels(vpc, rle, istride, jstride, kstride, axis)
1444 vpContext *vpc;
1445 RLEVoxels *rle;
1446 int istride, jstride, kstride;
1447 int axis;
1448 {
1449     char *rawvoxel;
1450     char *rlevoxel;
1451     unsigned char *lengths;
1452     int i, j, k;
1453     int count;
1454     int is_non_zero;
1455     int num_runs;
1456     float opacity;
1457     int ilen, jlen, klen;
1458     int founderror;
1459     int raw_opc_int;
1460     int rle_opc_int;
1461     int rle_bytes_per_voxel;
1462 
1463     rawvoxel = (char *)vpc->raw_voxels;
1464     rlevoxel = (char *)rle->data;
1465     lengths = rle->run_lengths;
1466     ilen = rle->ilen;
1467     jlen = rle->jlen;
1468     klen = rle->klen;
1469     rle_bytes_per_voxel = vpc->rle_bytes_per_voxel;
1470     founderror = 0;
1471     for (k = 0; k < klen; k++) {
1472 	for (j = 0; j < jlen; j++) {
1473 	    count = 0;
1474 	    is_non_zero = 1;
1475 	    num_runs = 0;
1476 	    for (i = 0; i < ilen; i++) {
1477 		while (count == 0) {
1478 		    count = *lengths++;
1479 		    is_non_zero = !is_non_zero;
1480 		    if (++num_runs > rle->ilen)
1481 			VPBug("runaway scan detected by ValidateRLEVoxels");
1482 		}
1483 		opacity = VPClassifyVoxel(vpc, rawvoxel);
1484 		if (is_non_zero) {
1485 		    if (opacity <= vpc->min_opacity &&
1486 			fabs(opacity - vpc->min_opacity) > 0.001) {
1487 			printf("\n");
1488 			printf("**** zero rawvoxel in nonzero rlerun ****\n");
1489 			printf("voxel (i,j,k)=(%d,%d,%d), viewaxis %d\n",
1490 			       i, j, k, axis);
1491 			printf("Actual opacity: %17.15f\n", opacity);
1492 			printf("Threshold:      %17.15f\n", vpc->min_opacity);
1493 			founderror = 1;
1494 		    }
1495 		    raw_opc_int = (int)rint(opacity*255.);
1496 		    rle_opc_int = ByteField(rlevoxel, rle_bytes_per_voxel-1);
1497 		    if (abs(raw_opc_int - rle_opc_int) > 1) {
1498 			printf("\n");
1499 			printf("**** rawvoxel and rlevoxel disagree ****\n");
1500 			printf("voxel (i,j,k)=(%d,%d,%d), viewaxis %d\n",
1501 			       i, j, k, axis);
1502 			printf("Raw opacity: %3d\n", raw_opc_int);
1503 			printf("RLE opacity: %3d\n", rle_opc_int);
1504 			founderror = 1;
1505 		    }
1506 		    rlevoxel += rle_bytes_per_voxel;
1507 		} else {
1508 		    if (opacity > vpc->min_opacity &&
1509 			fabs(opacity - vpc->min_opacity) > 0.001) {
1510 			printf("\n");
1511 			printf("**** nonzero rawvoxel in zero rlerun ****\n");
1512 			printf("voxel (i,j,k)=(%d,%d,%d), viewaxis %d\n",
1513 			       i, j, k, axis);
1514 			printf("Actual opacity: %17.15f\n", opacity);
1515 			printf("Threshold:      %17.15f\n", vpc->min_opacity);
1516 			founderror = 1;
1517 		    }
1518 		}
1519 		if (founderror) {
1520 		    VPDumpClassifier(vpc);
1521 		    VPBug("ValidateRLEVoxels found a problem");
1522 		}
1523 		rawvoxel += istride;
1524 		count--;
1525 	    }
1526 	    if (count != 0)
1527 		VPBug("Run did not terminate at end of scanline");
1528 	    if (!is_non_zero) {
1529 		if (*lengths++ != 0)
1530 		    VPBug("Missing zero run at end of scanline");
1531 		is_non_zero = !is_non_zero;
1532 	    }
1533 	    rawvoxel += jstride - ilen*istride;
1534 	}
1535 	rawvoxel += kstride - jlen*jstride;
1536     }
1537 }
1538 #endif
1539 
1540 void
VPDumpView(vpc)1541 VPDumpView(vpc)
1542 vpContext *vpc;
1543 {
1544     int c;
1545 
1546     printf("MODEL:\n");
1547     for (c = 0; c < 4; c++) {
1548 	printf("    %12.6f %12.6f %12.6f %12.6f\n",
1549 	       vpc->transforms[VP_MODEL][c][0],
1550 	       vpc->transforms[VP_MODEL][c][1],
1551 	       vpc->transforms[VP_MODEL][c][2],
1552 	       vpc->transforms[VP_MODEL][c][3]);
1553     }
1554     printf("VIEW:\n");
1555     for (c = 0; c < 4; c++) {
1556 	printf("    %12.6f %12.6f %12.6f %12.6f\n",
1557 	       vpc->transforms[VP_MODEL][c][0],
1558 	       vpc->transforms[VP_MODEL][c][1],
1559 	       vpc->transforms[VP_MODEL][c][2],
1560 	       vpc->transforms[VP_MODEL][c][3]);
1561     }
1562     printf("PROJECT:\n");
1563     for (c = 0; c < 4; c++) {
1564 	printf("    %12.6f %12.6f %12.6f %12.6f\n",
1565 	       vpc->transforms[VP_MODEL][c][0],
1566 	       vpc->transforms[VP_MODEL][c][1],
1567 	       vpc->transforms[VP_MODEL][c][2],
1568 	       vpc->transforms[VP_MODEL][c][3]);
1569     }
1570 }
1571 
1572 void
VPDumpClassifier(vpc)1573 VPDumpClassifier(vpc)
1574 vpContext *vpc;
1575 {
1576     int c, d;
1577 
1578     for (d = 0; d < vpc->num_clsfy_params; d++) {
1579 	printf("CLASSIFIER PARAM %d:\n   ", d);
1580 	for (c = 0; c < vpc->field_max[vpc->param_field[d]]; c++) {
1581 	    printf(" %8.6f", vpc->clsfy_table[d][c]);
1582 	    if (c % 8 == 7)
1583 		printf("\n   ");
1584 	}
1585 	printf("\n");
1586     }
1587 }
1588