Guest User

boolean weaving

a guest
Aug 27th, 2015
107
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
C 44.17 KB | None | 0 0
  1. /*                          B O O L . C
  2.  * BRL-CAD
  3.  *
  4.  * Copyright (c) 1985-2014 United States Government as represented by
  5.  * the U.S. Army Research Laboratory.
  6.  *
  7.  * This library is free software; you can redistribute it and/or
  8.  * modify it under the terms of the GNU Lesser General Public License
  9.  * version 2.1 as published by the Free Software Foundation.
  10.  *
  11.  * This library is distributed in the hope that it will be useful, but
  12.  * WITHOUT ANY WARRANTY; without even the implied warranty of
  13.  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
  14.  * Lesser General Public License for more details.
  15.  *
  16.  * You should have received a copy of the GNU Lesser General Public
  17.  * License along with this file; see the file named COPYING for more
  18.  * information.
  19.  */
  20. /** @addtogroup ray */
  21. /** @{ */
  22. /** @file librt/bool.c
  23.  *
  24.  * Ray Tracing program, Boolean region evaluator.
  25.  *
  26.  * Note to developers -
  27.  * Do not use the hit_point field in these routines, as for some solid
  28.  * types it has been filled in by the g_xxx_shot() routine, and for
  29.  * other solid types it may not have been.  In particular, copying a
  30.  * garbage hit_point from a structure which has not yet been filled
  31.  * in, into a structure which won't be filled in again, gives wrong
  32.  * results.  Thanks to Keith Bowman for finding this obscure bug.
  33.  *
  34.  */
  35. /** @} */
  36.  
  37. #include "common.h"
  38.  
  39. #include <string.h>
  40. #include "bio.h"
  41.  
  42. #include "bu/parallel.h"
  43. #include "vmath.h"
  44. #include "raytrace.h"
  45.  
  46.  
  47.  
  48. /* Boolean values.  Not easy to change, but defined symbolically */
  49. #define BOOL_FALSE 0
  50. #define BOOL_TRUE 1
  51.  
  52.  
  53. /**
  54.  * If a zero thickness segment abuts another partition, it will be
  55.  * fused in, later.
  56.  *
  57.  * If it is free standing, then it will remain as a zero thickness
  58.  * partition, which probably signals going through some solid an odd
  59.  * number of times, or hitting an NMG wire edge or NMG lone vertex.
  60.  */
  61. void
  62. rt_weave0seg(struct seg *segp, struct partition *PartHdp, struct application *ap)
  63. {
  64.     struct partition *pp;
  65.     struct resource *res = ap->a_resource;
  66.     struct rt_i *rtip = ap->a_rt_i;
  67.     fastf_t tol_dist;
  68.  
  69.     tol_dist = rtip->rti_tol.dist;
  70.  
  71.     RT_CK_PT_HD(PartHdp);
  72.     RT_CK_RTI(ap->a_rt_i);
  73.     RT_CK_RESOURCE(res);
  74.     RT_CK_RTI(rtip);
  75.  
  76.     if (PartHdp->pt_forw == PartHdp) bu_bomb("rt_weave0seg() with empty partition list\n");
  77.  
  78.     /* See if this segment ends before start of first partition */
  79.     if (segp->seg_out.hit_dist < PartHdp->pt_forw->pt_inhit->hit_dist) {
  80.     GET_PT_INIT(rtip, pp, res);
  81.     bu_ptbl_ins_unique(&pp->pt_seglist, (long *)segp);
  82.     pp->pt_inseg = segp;
  83.     pp->pt_inhit = &segp->seg_in;
  84.     pp->pt_outseg = segp;
  85.     pp->pt_outhit = &segp->seg_out;
  86.     APPEND_PT(pp, PartHdp);
  87.     return;
  88.     }
  89.  
  90.     /*
  91.      * Cases: seg at start of pt, in middle of pt, at end of pt, or
  92.      * past end of pt but before start of next pt.
  93.      *
  94.      * XXX For the first 3 cases, we might want to make a new 0-len pt,
  95.      * XXX especially as the NMG ray-tracer starts reporting wire hits.
  96.      */
  97.     for (pp=PartHdp->pt_forw; pp != PartHdp; pp=pp->pt_forw) {
  98.     if (NEAR_EQUAL(segp->seg_in.hit_dist, pp->pt_inhit->hit_dist, tol_dist) ||
  99.         NEAR_EQUAL(segp->seg_out.hit_dist, pp->pt_inhit->hit_dist, tol_dist)
  100.         ) {
  101.         return;
  102.     }
  103.     if (NEAR_EQUAL(segp->seg_in.hit_dist, pp->pt_outhit->hit_dist, tol_dist) ||
  104.         NEAR_EQUAL(segp->seg_out.hit_dist, pp->pt_outhit->hit_dist, tol_dist)
  105.         ) {
  106.         return;
  107.     }
  108.     if (segp->seg_out.hit_dist <= pp->pt_outhit->hit_dist &&
  109.         segp->seg_in.hit_dist >= pp->pt_inhit->hit_dist) {
  110.         return;
  111.     }
  112.     if (pp->pt_forw == PartHdp ||
  113.         segp->seg_out.hit_dist < pp->pt_forw->pt_inhit->hit_dist) {
  114.         struct partition *npp;
  115.         GET_PT_INIT(rtip, npp, res);
  116.         bu_ptbl_ins_unique(&npp->pt_seglist, (long *)segp);
  117.         npp->pt_inseg = segp;
  118.         npp->pt_inhit = &segp->seg_in;
  119.         npp->pt_outseg = segp;
  120.         npp->pt_outhit = &segp->seg_out;
  121.         APPEND_PT(npp, pp);
  122.         return;
  123.     }
  124.     }
  125.     bu_bomb("rt_weave0seg() fell out of partition loop?\n");
  126. }
  127.  
  128.  
  129. void
  130. rt_boolweave(struct seg *out_hd, struct seg *in_hd, struct partition *PartHdp, struct application *ap)
  131. {
  132.     struct seg *segp;
  133.     struct partition *pp;
  134.     struct resource *res = ap->a_resource;
  135.     struct rt_i *rtip = ap->a_rt_i;
  136.  
  137.     fastf_t diff, diff_se;
  138.     fastf_t tol_dist;
  139.  
  140.     RT_CK_PT_HD(PartHdp);
  141.  
  142.     tol_dist = rtip->rti_tol.dist;
  143.  
  144.     RT_CK_RTI(ap->a_rt_i);
  145.     RT_CK_RESOURCE(res);
  146.     RT_CK_RTI(rtip);
  147.  
  148.     while (BU_LIST_NON_EMPTY(&(in_hd->l))) {
  149.     struct partition *newpp = PT_NULL;
  150.     struct seg *lastseg = RT_SEG_NULL;
  151.     struct hit *lasthit = HIT_NULL;
  152.     int lastflip = 0;
  153.  
  154.     segp = BU_LIST_FIRST(seg, &(in_hd->l));
  155.     RT_CHECK_SEG(segp);
  156.     RT_CK_HIT(&(segp->seg_in));
  157.     RT_CK_RAY(segp->seg_in.hit_rayp);
  158.     RT_CK_HIT(&(segp->seg_out));
  159.     RT_CK_RAY(segp->seg_out.hit_rayp);
  160.     if ((size_t)segp->seg_stp->st_bit >= rtip->nsolids)
  161.         bu_bomb("rt_boolweave: st_bit");
  162.  
  163.     BU_LIST_DEQUEUE(&(segp->l));
  164.     BU_LIST_INSERT(&(out_hd->l), &(segp->l));
  165.  
  166.     /* Make nearly zero be exactly zero */
  167.     if (NEAR_ZERO(segp->seg_in.hit_dist, tol_dist))
  168.         segp->seg_in.hit_dist = 0;
  169.     if (NEAR_ZERO(segp->seg_out.hit_dist, tol_dist))
  170.         segp->seg_out.hit_dist = 0;
  171.  
  172.     /* Totally ignore things behind the start position */
  173.     if (segp->seg_out.hit_dist < -10.0)
  174.         continue;
  175.  
  176.     if (segp->seg_stp->st_aradius < INFINITY &&
  177.         !(segp->seg_in.hit_dist >= -INFINITY &&
  178.           segp->seg_out.hit_dist <= INFINITY)) {
  179.         continue;
  180.     }
  181.     if (segp->seg_in.hit_dist > segp->seg_out.hit_dist) {
  182.         continue;
  183.     }
  184.  
  185.     /*
  186.      * Weave this segment into the existing partitions, creating
  187.      * new partitions as necessary.
  188.      */
  189.     if (PartHdp->pt_forw == PartHdp) {
  190.         /* No partitions yet, simple! */
  191.         GET_PT_INIT(rtip, pp, res);
  192.         bu_ptbl_ins_unique(&pp->pt_seglist, (long *)segp);
  193.         pp->pt_inseg = segp;
  194.         pp->pt_inhit = &segp->seg_in;
  195.         pp->pt_outseg = segp;
  196.         pp->pt_outhit = &segp->seg_out;
  197.         APPEND_PT(pp, PartHdp);
  198.         goto done_weave;
  199.     }
  200.  
  201.     if (ap->a_no_booleans) {
  202.         lastseg = segp;
  203.         lasthit = &segp->seg_in;
  204.         lastflip = 0;
  205.         /* Just sort in ascending in-dist order */
  206.         for (pp=PartHdp->pt_forw; pp != PartHdp; pp=pp->pt_forw) {
  207.         if (lasthit->hit_dist < pp->pt_inhit->hit_dist) {
  208.             GET_PT_INIT(rtip, newpp, res);
  209.             bu_ptbl_ins_unique(&newpp->pt_seglist, (long *)segp);
  210.             newpp->pt_inseg = segp;
  211.             newpp->pt_inhit = &segp->seg_in;
  212.             newpp->pt_outseg = segp;
  213.             newpp->pt_outhit = &segp->seg_out;
  214.             INSERT_PT(newpp, pp);
  215.             goto done_weave;
  216.         }
  217.         }
  218.         GET_PT_INIT(rtip, newpp, res);
  219.         bu_ptbl_ins_unique(&newpp->pt_seglist, (long *)segp);
  220.         newpp->pt_inseg = segp;
  221.         newpp->pt_inhit = &segp->seg_in;
  222.         newpp->pt_outseg = segp;
  223.         newpp->pt_outhit = &segp->seg_out;
  224.         INSERT_PT(newpp, PartHdp);
  225.         goto done_weave;
  226.     }
  227.  
  228.     /* Check for zero-thickness segment, within tol */
  229.     diff = segp->seg_in.hit_dist - segp->seg_out.hit_dist;
  230.     if (NEAR_ZERO(diff, tol_dist)) {
  231.         rt_weave0seg(segp, PartHdp, ap);
  232.         goto done_weave;
  233.     }
  234.  
  235.     if (segp->seg_in.hit_dist >= PartHdp->pt_back->pt_outhit->hit_dist) {
  236.         /*
  237.          * Segment starts exactly at last partition's end, or
  238.          * beyond last partitions end.  Make new partition.
  239.          */
  240.         GET_PT_INIT(rtip, pp, res);
  241.         bu_ptbl_ins_unique(&pp->pt_seglist, (long *)segp);
  242.         pp->pt_inseg = segp;
  243.         pp->pt_inhit = &segp->seg_in;
  244.         pp->pt_outseg = segp;
  245.         pp->pt_outhit = &segp->seg_out;
  246.         APPEND_PT(pp, PartHdp->pt_back);
  247.         goto done_weave;
  248.     }
  249.  
  250.     /* Loop through current partition list weaving the current
  251.      * input segment into the list. The following three variables
  252.      * keep track of the current starting point of the input
  253.      * segment. The starting point of the segment moves to higher
  254.      * hit_dist values (as it is woven in) until it is entirely
  255.      * consumed.
  256.      */
  257.     lastseg = segp;
  258.     lasthit = &segp->seg_in;
  259.     lastflip = 0;
  260.     for (pp=PartHdp->pt_forw; pp != PartHdp; pp=pp->pt_forw) {
  261.         diff_se = lasthit->hit_dist - pp->pt_outhit->hit_dist;
  262.         if (diff_se > tol_dist) {
  263.         /* Seg starts beyond the END of the
  264.          * current partition.
  265.          *  PPPP
  266.          *          SSSS
  267.          * Advance to next partition.
  268.          */
  269.         continue;
  270.         }
  271.         diff = lasthit->hit_dist - pp->pt_inhit->hit_dist;
  272.         if (diff_se > -(tol_dist) && diff > tol_dist) {
  273.         /*
  274.          * Seg starts almost "precisely" at the
  275.          * end of the current partition.
  276.          *  PPPP
  277.          *      SSSS
  278.          * FUSE an exact match of the endpoints,
  279.          * advance to next partition.
  280.          */
  281.         lasthit->hit_dist = pp->pt_outhit->hit_dist;
  282.         continue;
  283.         }
  284.  
  285.         /*
  286.          * diff < ~~0
  287.          * Seg starts before current partition ends
  288.          *  PPPPPPPPPPP
  289.          *    SSSS...
  290.          */
  291.         if (diff > tol_dist) {
  292.         /*
  293.          * lasthit->hit_dist > pp->pt_inhit->hit_dist
  294.          * pp->pt_inhit->hit_dist < lasthit->hit_dist
  295.          *
  296.          * Segment starts after partition starts,
  297.          * but before the end of the partition.
  298.          * Note:  pt_seglist will be updated in equal_start.
  299.          *  PPPPPPPPPPPP
  300.          *       SSSS...
  301.          *  newpp|pp
  302.          */
  303.         RT_DUP_PT(rtip, newpp, pp, res);
  304.         /* new partition is the span before seg joins partition */
  305.         pp->pt_inseg = segp;
  306.         pp->pt_inhit = &segp->seg_in;
  307.         pp->pt_inflip = 0;
  308.         newpp->pt_outseg = segp;
  309.         newpp->pt_outhit = &segp->seg_in;
  310.         newpp->pt_outflip = 1;
  311.         INSERT_PT(newpp, pp);
  312.         goto equal_start;
  313.         }
  314.         if (diff > -(tol_dist)) {
  315.         /*
  316.          * Make a subtle but important distinction here.  Even
  317.          * though the two distances are "equal" within
  318.          * tolerance, they are not exactly the same.  If the
  319.          * new segment is slightly closer to the ray origin,
  320.          * then use its IN point.
  321.          *
  322.          * This is an attempt to reduce the deflected normals
  323.          * sometimes seen along the edges of e.g. a cylinder
  324.          * unioned with an ARB8, where the ray hits the top of
  325.          * the cylinder and the *side* face of the ARB8 rather
  326.          * than the top face of the ARB8.
  327.          */
  328.         diff = segp->seg_in.hit_dist - pp->pt_inhit->hit_dist;
  329.         if (!pp->pt_back ||
  330.             pp->pt_back == PartHdp ||
  331.             pp->pt_back->pt_outhit->hit_dist <=
  332.             segp->seg_in.hit_dist) {
  333.             if (NEAR_ZERO(diff, tol_dist) &&
  334.             diff < 0) {
  335.             pp->pt_inseg = segp;
  336.             pp->pt_inhit = &segp->seg_in;
  337.             pp->pt_inflip = 0;
  338.             }
  339.         }
  340.         equal_start:
  341.         /*
  342.          * Segment and partition start at (roughly) the same
  343.          * point.  When fusing 2 points together i.e., when
  344.          * NEAR_ZERO(diff, tol) is true, the two points MUST
  345.          * be forced to become exactly equal!
  346.          */
  347.         diff = segp->seg_out.hit_dist - pp->pt_outhit->hit_dist;
  348.         if (diff > tol_dist) {
  349.             /*
  350.              * Seg & partition start at roughly
  351.              * the same spot,
  352.              * seg extends beyond partition end.
  353.              *  PPPP
  354.              *  SSSSSSSS
  355.              *  pp  |  newpp
  356.              */
  357.             bu_ptbl_ins_unique(&pp->pt_seglist, (long *)segp);
  358.             lasthit = pp->pt_outhit;
  359.             lastseg = pp->pt_outseg;
  360.             lastflip = 1;
  361.             continue;
  362.         }
  363.         if (diff > -(tol_dist)) {
  364.             /*
  365.              * diff ~= 0
  366.              * Segment and partition start & end
  367.              * (nearly) together.
  368.              *  PPPP
  369.              *  SSSS
  370.              */
  371.             bu_ptbl_ins_unique(&pp->pt_seglist, (long *)segp);
  372.             goto done_weave;
  373.         } else {
  374.             /*
  375.              * diff < ~0
  376.              *
  377.              * Segment + Partition start together,
  378.              * segment ends before partition ends.
  379.              *  PPPPPPPPPP
  380.              *  SSSSSS
  381.              *  newpp| pp
  382.              */
  383.             RT_DUP_PT(rtip, newpp, pp, res);
  384.             /* new partition contains segment */
  385.             bu_ptbl_ins_unique(&newpp->pt_seglist, (long *)segp);
  386.             newpp->pt_outseg = segp;
  387.             newpp->pt_outhit = &segp->seg_out;
  388.             newpp->pt_outflip = 0;
  389.             pp->pt_inseg = segp;
  390.             pp->pt_inhit = &segp->seg_out;
  391.             pp->pt_inflip = 1;
  392.             INSERT_PT(newpp, pp);
  393.             goto done_weave;
  394.         }
  395.         /* NOTREACHED */
  396.         } else {
  397.         /*
  398.          * diff < ~~0
  399.          *
  400.          * Seg starts before current partition starts,
  401.          * but after the previous partition ends.
  402.          *  SSSSSSSS...
  403.          *       PPPPP...
  404.          *  newpp|pp
  405.          */
  406.         GET_PT_INIT(rtip, newpp, res);
  407.         bu_ptbl_ins_unique(&newpp->pt_seglist, (long *)segp);
  408.         newpp->pt_inseg = lastseg;
  409.         newpp->pt_inhit = lasthit;
  410.         newpp->pt_inflip = lastflip;
  411.         diff = segp->seg_out.hit_dist - pp->pt_inhit->hit_dist;
  412.         if (diff < -(tol_dist)) {
  413.             /*
  414.              * diff < ~0
  415.              * Seg starts and ends before current
  416.              * partition, but after previous
  417.              * partition ends (diff < 0).
  418.              *      SSSS
  419.              *  pppp        PPPPP...
  420.              *      newpp   pp
  421.              */
  422.             newpp->pt_outseg = segp;
  423.             newpp->pt_outhit = &segp->seg_out;
  424.             newpp->pt_outflip = 0;
  425.             INSERT_PT(newpp, pp);
  426.             goto done_weave;
  427.         }
  428.         if (diff < tol_dist) {
  429.             /*
  430.              * diff ~= 0
  431.              *
  432.              * Seg starts before current
  433.              * partition starts, and ends at or
  434.              * near the start of the partition.
  435.              * (diff == 0).  FUSE the points.
  436.              *  SSSSSS
  437.              *       PPPPP
  438.              *  newpp|pp
  439.              * NOTE: only copy hit point, not
  440.              * normals or norm private stuff.
  441.              */
  442.             newpp->pt_outseg = segp;
  443.             newpp->pt_outhit = &segp->seg_out;
  444.             newpp->pt_outhit->hit_dist = pp->pt_inhit->hit_dist;
  445.             newpp->pt_outflip = 0;
  446.             INSERT_PT(newpp, pp);
  447.             goto done_weave;
  448.         }
  449.         /*
  450.          * Seg starts before current partition
  451.          * starts, and ends after the start of the
  452.          * partition.  (diff > 0).
  453.          *  SSSSSSSSSS
  454.          *        PPPPPPP
  455.          *  newpp| pp | ...
  456.          */
  457.         newpp->pt_outseg = pp->pt_inseg;
  458.         newpp->pt_outhit = pp->pt_inhit;
  459.         newpp->pt_outflip = 1;
  460.         lastseg = pp->pt_inseg;
  461.         lasthit = pp->pt_inhit;
  462.         lastflip = newpp->pt_outflip;
  463.         INSERT_PT(newpp, pp);
  464.         goto equal_start;
  465.         }
  466.         /* NOTREACHED */
  467.     }
  468.  
  469.     /*
  470.      * Segment has portion which extends beyond the end
  471.      * of the last partition.  Tack on the remainder.
  472.      *      PPPPP
  473.      *           SSSSS
  474.      */
  475.     GET_PT_INIT(rtip, newpp, res);
  476.     bu_ptbl_ins_unique(&newpp->pt_seglist, (long *)segp);
  477.     newpp->pt_inseg = lastseg;
  478.     newpp->pt_inhit = lasthit;
  479.     newpp->pt_inflip = lastflip;
  480.     newpp->pt_outseg = segp;
  481.     newpp->pt_outhit = &segp->seg_out;
  482.     APPEND_PT(newpp, PartHdp->pt_back);
  483.  
  484.     done_weave: ; /* Sorry about the goto's, but they give clarity */
  485.     }
  486. }
  487.  
  488.  
  489. int
  490. rt_defoverlap (struct application *ap, struct partition *pp, struct region *reg1, struct region *reg2, struct partition *pheadp)
  491. {
  492.     RT_CK_AP(ap);
  493.     RT_CK_PT(pp);
  494.     RT_CK_REGION(reg1);
  495.     RT_CK_REGION(reg2);
  496.  
  497.     /*
  498.      * Apply heuristics as to which region should claim partition.
  499.      */
  500.     if (reg1->reg_aircode != 0) {
  501.     /* reg1 was air, replace with reg2 */
  502.     return 2;
  503.     }
  504.     if (pp->pt_back != pheadp) {
  505.     /* Repeat a prev region, if that is a choice */
  506.     if (pp->pt_back->pt_regionp == reg1)
  507.         return 1;
  508.     if (pp->pt_back->pt_regionp == reg2)
  509.         return 2;
  510.     }
  511.  
  512.     /* To provide some consistency from ray to ray, use lowest bit # */
  513.     if (reg1->reg_bit < reg2->reg_bit)
  514.     return 1;
  515.     return 2;
  516. }
  517.  
  518.  
  519. /**
  520.  * Given one of the regions that is involved in a given partition
  521.  * (whether the boolean formula for this region is BOOL_TRUE in this
  522.  * part or not), return a bu_ptbl list containing all the segments in
  523.  * this partition which came from solids that appear as terms in the
  524.  * boolean formula for the given region.
  525.  *
  526.  * The bu_ptbl is initialized here, and must be freed by the caller.
  527.  * It will contain a pointer to at least one segment.
  528.  */
  529. void
  530. rt_get_region_seglist_for_partition(struct bu_ptbl *sl, const struct partition *pp, const struct region *regp)
  531. {
  532.     const struct seg **segpp;
  533.  
  534.     bu_ptbl_init(sl, 8, "region seglist for partition");
  535.  
  536.     /* Walk the partitions segment list */
  537.     for (BU_PTBL_FOR(segpp, (const struct seg **), &pp->pt_seglist)) {
  538.     const struct region **srpp;
  539.  
  540.     RT_CK_SEG(*segpp);
  541.     /* For every segment in part, walk the solid's region list */
  542.     for (BU_PTBL_FOR(srpp, (const struct region **), &(*segpp)->seg_stp->st_regions)) {
  543.         RT_CK_REGION(*srpp);
  544.  
  545.         if (*srpp != regp) continue;
  546.         /* This segment is part of a solid in this region */
  547.         bu_ptbl_ins_unique(sl, (long *)(*segpp));
  548.     }
  549.     }
  550.  
  551.     if (BU_PTBL_LEN(sl) <= 0) bu_bomb("rt_get_region_seglist_for_partition() didn't find any segments\n");
  552. }
  553.  
  554.  
  555. /**
  556.  * Find the maximum value of the raynum (seg_rayp->index) encountered
  557.  * in the segments contributing to this region.
  558.  *
  559.  * Returns -
  560.  *  # Maximum ray index
  561.  * -1 If no rays are contributing segs for this region.
  562.  */
  563. int
  564. rt_tree_max_raynum(const union tree *tp, const struct partition *pp)
  565. {
  566.     RT_CK_TREE(tp);
  567.     RT_CK_PARTITION(pp);
  568.  
  569.     switch (tp->tr_op) {
  570.     case OP_NOP:
  571.         return -1;
  572.  
  573.     case OP_SOLID:
  574.         {
  575.         struct soltab *seek_stp = tp->tr_a.tu_stp;
  576.         struct seg **segpp;
  577.         for (BU_PTBL_FOR(segpp, (struct seg **), &pp->pt_seglist)) {
  578.             if ((*segpp)->seg_stp != seek_stp) continue;
  579.             return (*segpp)->seg_in.hit_rayp->index;
  580.         }
  581.         }
  582.         /* Maybe it hasn't been shot yet, or ray missed */
  583.         return -1;
  584.  
  585.     case OP_NOT:
  586.         return rt_tree_max_raynum(tp->tr_b.tb_left, pp);
  587.  
  588.     case OP_UNION:
  589.     case OP_INTERSECT:
  590.     case OP_SUBTRACT:
  591.     case OP_XOR:
  592.         {
  593.         int a = rt_tree_max_raynum(tp->tr_b.tb_left, pp);
  594.         int b = rt_tree_max_raynum(tp->tr_b.tb_right, pp);
  595.         if (a > b) return a;
  596.         return b;
  597.         }
  598.     default:
  599.         bu_bomb("rt_tree_max_raynum: bad op\n");
  600.     }
  601.     return 0;
  602. }
  603.  
  604.  
  605. /**
  606.  * Handle FASTGEN volume/volume overlap.  Look at underlying segs.  If
  607.  * one is less than 1/4", take the longer.  Otherwise take the
  608.  * shorter.
  609.  *
  610.  * Required to null out one of the two regions.
  611.  */
  612. void
  613. rt_fastgen_vol_vol_overlap(struct region **fr1, struct region **fr2, const struct partition *pp)
  614. {
  615.     struct bu_ptbl sl1 = BU_PTBL_INIT_ZERO;
  616.     struct bu_ptbl sl2 = BU_PTBL_INIT_ZERO;
  617.     const struct seg *s1 = (const struct seg *)NULL;
  618.     const struct seg *s2 = (const struct seg *)NULL;
  619.     fastf_t s1_in_dist;
  620.     fastf_t s2_in_dist;
  621.     fastf_t depth;
  622.     struct seg **segpp;
  623.  
  624.     RT_CK_REGION(*fr1);
  625.     RT_CK_REGION(*fr2);
  626.  
  627.     rt_get_region_seglist_for_partition(&sl1, pp, *fr1);
  628.     rt_get_region_seglist_for_partition(&sl2, pp, *fr2);
  629.  
  630.     s1_in_dist = MAX_FASTF;
  631.     s2_in_dist = MAX_FASTF;
  632.     for (BU_PTBL_FOR(segpp, (struct seg **), &sl1))
  633.     {
  634.     if ((*segpp)->seg_in.hit_dist < s1_in_dist)
  635.     {
  636.         s1 = (*segpp);
  637.         s1_in_dist = s1->seg_in.hit_dist;
  638.     }
  639.     }
  640.     for (BU_PTBL_FOR(segpp, (struct seg **), &sl2))
  641.     {
  642.     if ((*segpp)->seg_in.hit_dist < s2_in_dist)
  643.     {
  644.         s2 = (*segpp);
  645.         s2_in_dist = s2->seg_in.hit_dist;
  646.     }
  647.     }
  648.     RT_CK_SEG(s1);
  649.     RT_CK_SEG(s2);
  650.  
  651.     depth = pp->pt_outhit->hit_dist - pp->pt_inhit->hit_dist;
  652.  
  653.     /* 6.35mm = 1/4 inch */
  654.     if (depth < 6.35) {
  655.     /* take region with lowest inhit */
  656.     if (s1->seg_in.hit_dist < s2->seg_in.hit_dist) {
  657.         /* keep fr1, delete fr2 */
  658.         *fr2 = REGION_NULL;
  659.     } else {
  660.         /* keep fr2, delete fr1 */
  661.         *fr1 = REGION_NULL;
  662.     }
  663.     } else {
  664.     /*
  665.      * take the region with largest inhit
  666.      */
  667.     if (s1->seg_in.hit_dist >= s2->seg_in.hit_dist) {
  668.         /* keep fr1, delete fr2 */
  669.         *fr2 = REGION_NULL;
  670.     } else {
  671.         *fr1 = REGION_NULL;
  672.     }
  673.     }
  674.  
  675.     bu_ptbl_free(&sl1);
  676.     bu_ptbl_free(&sl2);
  677. }
  678.  
  679. /**
  680.  * Handle FASTGEN plate/volume overlap.
  681.  *
  682.  * Measure width of _preceding_ partition, which must have been
  683.  * claimed by the volume mode region fr2.
  684.  *
  685.  * If less than 1/4", delete preceding partition, and plate wins this
  686.  * part.  If less than 1/4", plate wins this part, previous partition
  687.  * untouched.  If previous partition is claimed by plate mode region
  688.  * fr1, then overlap is left in output???
  689.  *
  690.  * Required to null out one of the two regions.
  691.  */
  692. void
  693. rt_fastgen_plate_vol_overlap(struct region **fr1, struct region **fr2, struct partition *pp, struct application *ap)
  694. {
  695.     struct partition *prev;
  696.     fastf_t depth;
  697.  
  698.     RT_CK_REGION(*fr1);
  699.     RT_CK_REGION(*fr2);
  700.     RT_CK_PT(pp);
  701.     RT_CK_AP(ap);
  702.  
  703.     prev = pp->pt_back;
  704.     if (prev->pt_magic == PT_HD_MAGIC) {
  705.     /* No prev partition, this is the first.  d=0, plate wins */
  706.     *fr2 = REGION_NULL;
  707.     return;
  708.     }
  709.  
  710.     /* arbitrary tolerance is the dominant absolute tolerance from the
  711.      * now-deprecated rt_fdiff().  need to test sensitivity before
  712.      * changing to the distance tolerance.
  713.      */
  714.     if (!NEAR_EQUAL(prev->pt_outhit->hit_dist, pp->pt_inhit->hit_dist, 0.001)) {
  715.     /* There is a gap between previous partition and this one.  So
  716.      * both plate and vol start at same place, d=0, plate wins.
  717.      */
  718.     *fr2 = REGION_NULL;
  719.     return;
  720.     }
  721.  
  722.     if (prev->pt_regionp == *fr2) {
  723.     /* previous part is volume mode, ends at start of pp */
  724.     depth = prev->pt_outhit->hit_dist - prev->pt_inhit->hit_dist;
  725.     /* 6.35mm = 1/4 inch */
  726.     if (depth < 6.35) {
  727.         /* Delete previous partition from list */
  728.         DEQUEUE_PT(prev);
  729.         FREE_PT(prev, ap->a_resource);
  730.  
  731.         /* plate mode fr1 wins this partition */
  732.         *fr2 = REGION_NULL;
  733.     } else {
  734.         /* Leave previous partition alone */
  735.         /* plate mode fr1 wins this partition */
  736.         *fr2 = REGION_NULL;
  737.     }
  738.     } else if (prev->pt_regionp == *fr1) {
  739.     /* previous part is plate mode, ends at start of pp */
  740.     /* d < 0, leave overlap in output??? */
  741.     /* For now, volume mode region just loses. */
  742.     *fr2 = REGION_NULL;
  743.     } else {
  744.     /* Some other region precedes this partition */
  745.     /* So both plate and vol start at same place, d=0, plate wins */
  746.     *fr2 = REGION_NULL;
  747.     }
  748. }
  749.  
  750.  
  751. void
  752. rt_default_multioverlap(struct application *ap, struct partition *pp, struct bu_ptbl *regiontable, struct partition *InputHdp)
  753. {
  754.     struct region *lastregion = (struct region *)NULL;
  755.     int n_regions;
  756.     int n_fastgen = 0;
  757.     int code;
  758.     size_t i;
  759.     int counter;
  760.  
  761.     RT_CK_AP(ap);
  762.     RT_CK_PARTITION(pp);
  763.     BU_CK_PTBL(regiontable);
  764.     RT_CK_PT_HD(InputHdp);
  765.  
  766.     if (ap->a_overlap == RT_AFN_NULL)
  767.     ap->a_overlap = rt_defoverlap;
  768.  
  769.     /* Count number of FASTGEN regions */
  770.     n_regions = BU_PTBL_LEN(regiontable);
  771.     for (counter = n_regions-1; counter >= 0; counter--) {
  772.     struct region *regp = (struct region *)BU_PTBL_GET(regiontable, counter);
  773.     RT_CK_REGION(regp);
  774.     if (regp->reg_is_fastgen != REGION_NON_FASTGEN) n_fastgen++;
  775.     }
  776.  
  777.     /*
  778.      * Resolve all FASTGEN overlaps before considering BRL-CAD
  779.      * overlaps, because FASTGEN overlaps have strict rules.
  780.      */
  781.     if (n_fastgen >= 2) {
  782.     struct region **fr1;
  783.     struct region **fr2;
  784.  
  785.     /*
  786.      * First, resolve volume_mode/volume_mode overlaps because
  787.      * they are a simple choice.  The searches run from high to
  788.      * low in the ptbl array.
  789.      */
  790.     for (BU_PTBL_FOR(fr1, (struct region **), regiontable)) {
  791.         if (*fr1 == REGION_NULL) continue;
  792.         RT_CK_REGION(*fr1);
  793.         if ((*fr1)->reg_is_fastgen != REGION_FASTGEN_VOLUME)
  794.         continue;
  795.         for (fr2 = fr1-1; fr2 >= (struct region **)BU_PTBL_BASEADDR(regiontable); fr2--) {
  796.         if (*fr2 == REGION_NULL) continue;
  797.         RT_CK_REGION(*fr2);
  798.         if ((*fr2)->reg_is_fastgen != REGION_FASTGEN_VOLUME)
  799.             continue;
  800.         rt_fastgen_vol_vol_overlap(fr1, fr2, pp);
  801.         if (*fr1 == REGION_NULL) break;
  802.         }
  803.     }
  804.  
  805.     /* Second, resolve plate_mode/volume_mode overlaps */
  806.     for (BU_PTBL_FOR(fr1, (struct region **), regiontable)) {
  807.         if (*fr1 == REGION_NULL) continue;
  808.         RT_CK_REGION(*fr1);
  809.         if ((*fr1)->reg_is_fastgen != REGION_FASTGEN_PLATE)
  810.         continue;
  811.         for (BU_PTBL_FOR(fr2, (struct region **), regiontable)) {
  812.         if (*fr2 == REGION_NULL) continue;
  813.         RT_CK_REGION(*fr2);
  814.         if ((*fr2)->reg_is_fastgen != REGION_FASTGEN_VOLUME)
  815.             continue;
  816.         rt_fastgen_plate_vol_overlap(fr1, fr2, pp, ap);
  817.         if (*fr1 == REGION_NULL) break;
  818.         }
  819.     }
  820.  
  821.  
  822.     /* Finally, think up a way to pass plate/plate overlaps on */
  823.     n_fastgen = 0;
  824.     for (counter = n_regions-1; counter >= 0; counter--) {
  825.         struct region *regp = (struct region *)BU_PTBL_GET(regiontable, counter);
  826.         if (regp == REGION_NULL) continue;  /* empty slot in table */
  827.         RT_CK_REGION(regp);
  828.         if (regp->reg_is_fastgen != REGION_NON_FASTGEN) n_fastgen++;
  829.     }
  830.  
  831.     /* Compress out any null pointers in the table */
  832.     bu_ptbl_rm(regiontable, (long *)NULL);
  833.     }
  834.  
  835.     lastregion = (struct region *)BU_PTBL_GET(regiontable, 0);
  836.     RT_CK_REGION(lastregion);
  837.  
  838.     if (BU_PTBL_LEN(regiontable) > 1 && ap->a_rt_i->rti_save_overlaps != 0) {
  839.     /*
  840.      * Snapshot current state of overlap list, so that downstream
  841.      * application code can resolve any FASTGEN plate/plate
  842.      * overlaps.  The snapshot is not taken at the top of the
  843.      * routine because nobody is interested in FASTGEN vol/plate
  844.      * or vol/vol overlaps.  The list is terminated with a NULL
  845.      * pointer, placed courtesy of bu_calloc().
  846.      */
  847.     pp->pt_overlap_reg = (struct region **)bu_calloc(
  848.         BU_PTBL_LEN(regiontable)+1, sizeof(struct region *),
  849.         "pt_overlap_reg");
  850.     memcpy((char *)pp->pt_overlap_reg,
  851.            (char *)BU_PTBL_BASEADDR(regiontable),
  852.            BU_PTBL_LEN(regiontable) * sizeof(struct region *));
  853.     }
  854.  
  855.     /* Examine the overlapping regions, pairwise */
  856.     for (i=1; i < BU_PTBL_LEN(regiontable); i++) {
  857.     struct region *regp = (struct region *)BU_PTBL_GET(regiontable, i);
  858.     if (regp == REGION_NULL) continue;  /* empty slot in table */
  859.     RT_CK_REGION(regp);
  860.  
  861.     code = -1;              /* For debug out in policy */
  862.  
  863.     /*
  864.      * Two or more regions claim this partition
  865.      */
  866.     if (lastregion->reg_aircode != 0 && regp->reg_aircode == 0) {
  867.         /* last region is air, replace with solid regp */
  868.         goto code2;
  869.     } else if (lastregion->reg_aircode == 0 && regp->reg_aircode != 0) {
  870.         /* last region solid, regp is air, keep last */
  871.         goto code1;
  872.     } else if (lastregion->reg_aircode != 0 &&
  873.            regp->reg_aircode != 0 &&
  874.            regp->reg_aircode == lastregion->reg_aircode) {
  875.         /* both are same air, keep last */
  876.         goto code1;
  877.     }
  878.  
  879.     /*
  880.      * If a FASTGEN region overlaps a non-FASTGEN region, the
  881.      * non-FASTGEN ("traditional BRL-CAD") region wins.  No
  882.      * mixed-mode geometry like this will be built by the
  883.      * fastgen-to-BRL-CAD converters, only by human editors.
  884.      */
  885.     if (lastregion->reg_is_fastgen != regp->reg_is_fastgen) {
  886.         if (lastregion->reg_is_fastgen)
  887.         goto code2;     /* keep regp */
  888.         if (regp->reg_is_fastgen)
  889.         goto code1;     /* keep lastregion */
  890.     }
  891.  
  892.     /*
  893.      * To support ray bundles, find partition with the lower
  894.      * contributing ray number (closer to center of bundle), and
  895.      * retain that one.
  896.      */
  897.     {
  898.         int r1 = rt_tree_max_raynum(lastregion->reg_treetop, pp);
  899.         int r2 = rt_tree_max_raynum(regp->reg_treetop, pp);
  900.  
  901.         /* Only use this algorithm if one is not the main ray */
  902.         if (r1 > 0 || r2 > 0) {
  903.         if (r1 < r2) {
  904.             goto code1; /* keep lastregion */
  905.         }
  906.         goto code2;     /* keep regp */
  907.         }
  908.     }
  909.  
  910.     /*
  911.      * Hand overlap to old-style application-specific
  912.      * overlap handler, or default.
  913.      * 0 = destroy partition,
  914.      * 1 = keep part, claiming region=lastregion
  915.      * 2 = keep part, claiming region=regp
  916.      */
  917.     code = ap->a_overlap(ap, pp, lastregion, regp, InputHdp);
  918.  
  919.     /* Implement the policy in "code" */
  920.     if (code == 0) {
  921.         /*
  922.          * Destroy the whole partition.
  923.          */
  924.         bu_ptbl_reset(regiontable);
  925.         return;
  926.     } else if (code == 1) {
  927.     code1:
  928.         /* Keep partition, claiming region = lastregion */
  929.         BU_PTBL_CLEAR_I(regiontable, i);
  930.     } else {
  931.     code2:
  932.         /* Keep partition, claiming region = regp */
  933.         bu_ptbl_zero(regiontable, (long *)lastregion);
  934.         lastregion = regp;
  935.     }
  936.     }
  937. }
  938.  
  939.  
  940. void
  941. rt_silent_logoverlap(struct application *ap, const struct partition *pp, const struct bu_ptbl *regiontable, const struct partition *UNUSED(InputHdp))
  942. {
  943.     RT_CK_AP(ap);
  944.     RT_CK_PT(pp);
  945.     BU_CK_PTBL(regiontable);
  946.     return;
  947. }
  948.  
  949.  
  950. /**
  951.  * Overlap tables are NULL terminated arrays of region pointers.  The
  952.  * order of entries may be different between the two.
  953.  *
  954.  * Returns -
  955.  * 1 The tables match
  956.  * 0 The tables do not match
  957.  */
  958. int
  959. rt_overlap_tables_equal(struct region *const*a, struct region *const*b)
  960. {
  961.     int alen=0, blen=0;
  962.     struct region *const*app;
  963.     struct region *const*bpp;
  964.  
  965.     if (a == NULL && b == NULL)
  966.     return 1;
  967.  
  968.     if (a == NULL || b == NULL)
  969.     return 0;
  970.  
  971.     /* First step, compare lengths */
  972.     for (app = a; *app != NULL; app++) alen++;
  973.     for (bpp = b; *bpp != NULL; bpp++) blen++;
  974.     if (alen != blen) return 0;
  975.  
  976.     /* Second step, compare contents */
  977.     for (app = a; *app != NULL; app++) {
  978.     const struct region *t = *app;
  979.     for (bpp = b; *bpp != NULL; bpp++) {
  980.         if (*bpp == t) goto b_ok;
  981.     }
  982.     /* 't' not found in b table, no match */
  983.     return 0;
  984.     b_ok:       ;
  985.     }
  986.     /* Everything matches */
  987.     return 1;
  988. }
  989.  
  990.  
  991. /**
  992.  * Test to see if a region is ready to be evaluated over a given
  993.  * partition, i.e. if all the prerequisites have been satisfied.
  994.  *
  995.  * Returns -
  996.  * !0 Region is ready for (correct) evaluation.
  997.  *  0 Region is not ready
  998.  */
  999. int
  1000. rt_tree_test_ready(const union tree *tp, const struct bu_bitv *solidbits, const struct region *regionp, const struct partition *pp)
  1001. {
  1002.     RT_CK_TREE(tp);
  1003.     BU_CK_BITV(solidbits);
  1004.     RT_CK_REGION(regionp);
  1005.     RT_CK_PT(pp);
  1006.  
  1007.     switch (tp->tr_op) {
  1008.     case OP_NOP:
  1009.         return 1;
  1010.  
  1011.     case OP_SOLID:
  1012.         if (BU_BITTEST(solidbits, tp->tr_a.tu_stp->st_bit)) {
  1013.         /* This solid's been shot, segs are valid. */
  1014.         return 1;
  1015.         }
  1016.  
  1017.         /*
  1018.          * This solid has not been shot yet.
  1019.          */
  1020.         return 0;
  1021.  
  1022.     case OP_NOT:
  1023.         return !rt_tree_test_ready(tp->tr_b.tb_left, solidbits, regionp, pp);
  1024.  
  1025.     case OP_UNION:
  1026.     case OP_INTERSECT:
  1027.     case OP_SUBTRACT:
  1028.     case OP_XOR:
  1029.         if (!rt_tree_test_ready(tp->tr_b.tb_left, solidbits, regionp, pp))
  1030.         return 0;
  1031.         return rt_tree_test_ready(tp->tr_b.tb_right, solidbits, regionp, pp);
  1032.  
  1033.     default:
  1034.         bu_bomb("rt_tree_test_ready: bad op\n");
  1035.     }
  1036.     return 0;
  1037. }
  1038.  
  1039.  
  1040. /**
  1041.  * If every solid in every region participating in this ray-partition
  1042.  * has already been intersected with the ray, then this partition can
  1043.  * be safely evaluated.
  1044.  *
  1045.  * Returns -
  1046.  * !0 Partition is ready for (correct) evaluation.
  1047.  *  0 Partition is not ready
  1048.  */
  1049. int
  1050. rt_bool_partition_eligible(const struct bu_ptbl *regiontable, const struct bu_bitv *solidbits, const struct partition *pp)
  1051. {
  1052.     struct region **regpp;
  1053.  
  1054.     BU_CK_PTBL(regiontable);
  1055.     BU_CK_BITV(solidbits);
  1056.     RT_CK_PT(pp);
  1057.  
  1058.     for (BU_PTBL_FOR(regpp, (struct region **), regiontable)) {
  1059.     struct region *regp;
  1060.  
  1061.     regp = *regpp;
  1062.     RT_CK_REGION(regp);
  1063.  
  1064.     /* Check region prerequisites */
  1065.     if (!rt_tree_test_ready(regp->reg_treetop, solidbits, regp, pp)) {
  1066.         return 0;
  1067.     }
  1068.     }
  1069.     return 1;
  1070. }
  1071.  
  1072.  
  1073. void
  1074. rt_grow_boolstack(struct resource *resp)
  1075. {
  1076.     if (resp->re_boolstack == (union tree **)0 || resp->re_boolslen <= 0) {
  1077.     resp->re_boolslen = 128;    /* default len */
  1078.     resp->re_boolstack = (union tree **)bu_malloc(sizeof(union tree *) * resp->re_boolslen, "initial boolstack");
  1079.     } else {
  1080.     resp->re_boolslen <<= 1;
  1081.     resp->re_boolstack = (union tree **)bu_realloc((char *)resp->re_boolstack, sizeof(union tree *) * resp->re_boolslen, "extend boolstack");
  1082.     }
  1083. }
  1084.  
  1085.  
  1086. /**
  1087.  * Using a stack to recall state, evaluate a boolean expression
  1088.  * without recursion.
  1089.  *
  1090.  * For use with XOR, a pointer to the "first valid subtree" would
  1091.  * be a useful addition, for rt_boolregions().
  1092.  *
  1093.  * Returns -
  1094.  * !0 tree is BOOL_TRUE
  1095.  *  0 tree is BOOL_FALSE
  1096.  * -1 tree is in error (GUARD)
  1097.  */
  1098. int
  1099. rt_booleval(union tree *treep, struct partition *partp, struct region **trueregp, struct resource *resp)
  1100. /* Tree to evaluate */
  1101. /* Partition to evaluate */
  1102. /* XOR true (and overlap) return */
  1103. /* resource pointer for this CPU */
  1104. {
  1105.     static union tree tree_not[MAX_PSW];    /* for OP_NOT nodes */
  1106.     static union tree tree_guard[MAX_PSW];  /* for OP_GUARD nodes */
  1107.     static union tree tree_xnop[MAX_PSW];   /* for OP_XNOP nodes */
  1108.     union tree **sp;
  1109.     int ret;
  1110.     union tree **stackend;
  1111.  
  1112.     RT_CK_TREE(treep);
  1113.     RT_CK_PT(partp);
  1114.     RT_CK_RESOURCE(resp);
  1115.     if (treep->tr_op != OP_XOR)
  1116.     trueregp[0] = treep->tr_regionp;
  1117.     else
  1118.     trueregp[0] = trueregp[1] = REGION_NULL;
  1119.     while ((sp = resp->re_boolstack) == (union tree **)0)
  1120.     rt_grow_boolstack(resp);
  1121.     stackend = &(resp->re_boolstack[resp->re_boolslen]);
  1122.     *sp++ = TREE_NULL;
  1123. stack:
  1124.     switch (treep->tr_op) {
  1125.     case OP_NOP:
  1126.         ret = 0;
  1127.         goto pop;
  1128.     case OP_SOLID:
  1129.         {
  1130.         struct soltab *seek_stp = treep->tr_a.tu_stp;
  1131.         struct seg **segpp;
  1132.         for (BU_PTBL_FOR(segpp, (struct seg **), &partp->pt_seglist)) {
  1133.             if ((*segpp)->seg_stp == seek_stp) {
  1134.             ret = 1;
  1135.             goto pop;
  1136.             }
  1137.         }
  1138.         ret = 0;
  1139.         }
  1140.         goto pop;
  1141.     case OP_UNION:
  1142.     case OP_INTERSECT:
  1143.     case OP_SUBTRACT:
  1144.     case OP_XOR:
  1145.         *sp++ = treep;
  1146.         if (sp >= stackend) {
  1147.         int off = sp - resp->re_boolstack;
  1148.         rt_grow_boolstack(resp);
  1149.         sp = &(resp->re_boolstack[off]);
  1150.         stackend = &(resp->re_boolstack[resp->re_boolslen]);
  1151.         }
  1152.         treep = treep->tr_b.tb_left;
  1153.         goto stack;
  1154.     default:
  1155.         bu_log("rt_booleval:  bad stack op [%d]\n", treep->tr_op);
  1156.         return BOOL_TRUE;   /* screw up output */
  1157.     }
  1158. pop:
  1159.     if ((treep = *--sp) == TREE_NULL)
  1160.     return ret;     /* top of tree again */
  1161.     /*
  1162.      * Here, each operation will look at the operation just completed
  1163.      * (the left branch of the tree generally), and rewrite the top of
  1164.      * the stack and/or branch accordingly.
  1165.      */
  1166.     switch (treep->tr_op) {
  1167.     case OP_SOLID:
  1168.         bu_log("rt_booleval:  pop SOLID?\n");
  1169.         return BOOL_TRUE;   /* screw up output */
  1170.     case OP_UNION:
  1171.         if (ret) goto pop;  /* BOOL_TRUE, we are done */
  1172.         /* lhs was false, rewrite as rhs tree */
  1173.         treep = treep->tr_b.tb_right;
  1174.         goto stack;
  1175.     case OP_INTERSECT:
  1176.         if (!ret) {
  1177.         ret = BOOL_FALSE;
  1178.         goto pop;
  1179.         }
  1180.         /* lhs was true, rewrite as rhs tree */
  1181.         treep = treep->tr_b.tb_right;
  1182.         goto stack;
  1183.     case OP_SUBTRACT:
  1184.         if (!ret) goto pop; /* BOOL_FALSE, we are done */
  1185.         /* lhs was true, rewrite as NOT of rhs tree */
  1186.         /* We introduce the special NOT operator here */
  1187.         tree_not[resp->re_cpu].tr_op = OP_NOT;
  1188.         *sp++ = &tree_not[resp->re_cpu];
  1189.         treep = treep->tr_b.tb_right;
  1190.         goto stack;
  1191.     case OP_NOT:
  1192.         /* Special operation for subtraction */
  1193.         ret = !ret;
  1194.         goto pop;
  1195.     case OP_XOR:
  1196.         if (ret) {
  1197.         /* lhs was true, rhs better not be, or we have an
  1198.          * overlap condition.  Rewrite as guard node followed
  1199.          * by rhs.
  1200.          */
  1201.         if (treep->tr_b.tb_left->tr_regionp)
  1202.             trueregp[0] = treep->tr_b.tb_left->tr_regionp;
  1203.         tree_guard[resp->re_cpu].tr_op = OP_GUARD;
  1204.         treep = treep->tr_b.tb_right;
  1205.         *sp++ = treep;      /* temp val for guard node */
  1206.         *sp++ = &tree_guard[resp->re_cpu];
  1207.         } else {
  1208.         /* lhs was false, rewrite as xnop node and result of
  1209.          * rhs.
  1210.          */
  1211.         tree_xnop[resp->re_cpu].tr_op = OP_XNOP;
  1212.         treep = treep->tr_b.tb_right;
  1213.         *sp++ = treep;      /* temp val for xnop */
  1214.         *sp++ = &tree_xnop[resp->re_cpu];
  1215.         }
  1216.         goto stack;
  1217.     case OP_GUARD:
  1218.         /*
  1219.          * Special operation for XOR.  lhs was true.  If rhs
  1220.          * subtree was true, an overlap condition exists (both
  1221.          * sides of the XOR are BOOL_TRUE).  Return error
  1222.          * condition.  If subtree is false, then return BOOL_TRUE
  1223.          * (from lhs).
  1224.          */
  1225.         if (ret) {
  1226.         /* stacked temp val: rhs */
  1227.         if (sp[-1]->tr_regionp)
  1228.             trueregp[1] = sp[-1]->tr_regionp;
  1229.         return -1;  /* GUARD error */
  1230.         }
  1231.         ret = BOOL_TRUE;
  1232.         sp--;           /* pop temp val */
  1233.         goto pop;
  1234.     case OP_XNOP:
  1235.         /*
  1236.          * Special NOP for XOR.  lhs was false.  If rhs is true,
  1237.          * take note of its regionp.
  1238.          */
  1239.         sp--;           /* pop temp val */
  1240.         if (ret) {
  1241.         if ((*sp)->tr_regionp)
  1242.             trueregp[0] = (*sp)->tr_regionp;
  1243.         }
  1244.         goto pop;
  1245.     default:
  1246.         bu_log("rt_booleval:  bad pop op [%d]\n", treep->tr_op);
  1247.         return BOOL_TRUE;   /* screw up output */
  1248.     }
  1249.     /* NOTREACHED */
  1250. }
  1251.  
  1252.  
  1253. int
  1254. rt_boolfinal(struct partition *InputHdp, struct partition *FinalHdp, fastf_t startdist, fastf_t enddist, struct bu_ptbl *regiontable, struct application *ap, const struct bu_bitv *solidbits)
  1255. {
  1256.     struct region *lastregion = (struct region *)NULL;
  1257.     struct region *TrueRg[2];
  1258.     struct partition *pp;
  1259.     int claiming_regions;
  1260.     int hits_avail = 0;
  1261.     int hits_needed;
  1262.     int ret = 0;
  1263.     int indefinite_outpt = 0;
  1264.     fastf_t diff;
  1265.  
  1266. #define HITS_TODO (hits_needed - hits_avail)
  1267.  
  1268.     RT_CK_PT_HD(InputHdp);
  1269.     RT_CK_PT_HD(FinalHdp);
  1270.     BU_CK_PTBL(regiontable);
  1271.     RT_CK_RTI(ap->a_rt_i);
  1272.     BU_CK_BITV(solidbits);
  1273.  
  1274.     if (!ap->a_multioverlap)
  1275.     ap->a_multioverlap = rt_default_multioverlap;
  1276.  
  1277.     if (!ap->a_logoverlap)
  1278.     ap->a_logoverlap = rt_silent_logoverlap;
  1279.  
  1280.     if (enddist <= 0) {
  1281.     ret = 0;
  1282.     goto out;
  1283.     }
  1284.  
  1285.     if (ap->a_onehit < 0)
  1286.     hits_needed = -ap->a_onehit;
  1287.     else
  1288.     hits_needed = ap->a_onehit;
  1289.  
  1290.     if (ap->a_onehit != 0) {
  1291.     struct partition *npp = FinalHdp->pt_forw;
  1292.  
  1293.     for (; npp != FinalHdp; npp = npp->pt_forw) {
  1294.         if (npp->pt_inhit->hit_dist < 0.0)
  1295.         continue;
  1296.         if (ap->a_onehit < 0 && npp->pt_regionp->reg_aircode != 0)
  1297.         continue;   /* skip air hits */
  1298.         hits_avail += 2;    /* both in and out listed */
  1299.     }
  1300.     if (hits_avail >= hits_needed) {
  1301.         ret = 1;
  1302.         goto out;
  1303.     }
  1304.     }
  1305.  
  1306.     if (ap->a_no_booleans) {
  1307.     while ((pp = InputHdp->pt_forw) != InputHdp) {
  1308.         RT_CK_PT(pp);
  1309.         DEQUEUE_PT(pp);
  1310.         pp->pt_regionp = (struct region *)
  1311.         BU_PTBL_GET(&pp->pt_inseg->seg_stp->st_regions, 0);
  1312.         RT_CK_REGION(pp->pt_regionp);
  1313.         INSERT_PT(pp, FinalHdp);
  1314.     }
  1315.     ret = 0;
  1316.     goto out;
  1317.     }
  1318.  
  1319.     pp = InputHdp->pt_forw;
  1320.     while (pp != InputHdp) {
  1321.     RT_CK_PT(pp);
  1322.     claiming_regions = 0;
  1323.     RT_CHECK_SEG(pp->pt_inseg);
  1324.     RT_CHECK_SEG(pp->pt_outseg);
  1325.  
  1326.     /* Force "very thin" partitions to have exactly zero thickness. */
  1327.     if (NEAR_EQUAL(pp->pt_inhit->hit_dist, pp->pt_outhit->hit_dist, ap->a_rt_i->rti_tol.dist)) {
  1328.         pp->pt_outhit->hit_dist = pp->pt_inhit->hit_dist;
  1329.     }
  1330.  
  1331.  
  1332.     /* Sanity checks on sorting. */
  1333.     BU_ASSERT(pp->pt_inhit->hit_dist <= pp->pt_outhit->hit_dist);
  1334.  
  1335.     if (pp->pt_forw != InputHdp) {
  1336.         diff = pp->pt_outhit->hit_dist - pp->pt_forw->pt_inhit->hit_dist;
  1337.         if (!ZERO(diff)) {
  1338.         if (NEAR_ZERO(diff, ap->a_rt_i->rti_tol.dist)) {
  1339.             pp->pt_forw->pt_inhit->hit_dist = pp->pt_outhit->hit_dist;
  1340.         } else if (diff > 0) {
  1341.             ret = 0;
  1342.             goto out;
  1343.         }
  1344.         }
  1345.     }
  1346.  
  1347.     /*
  1348.      * If partition is behind ray start point, discard it.
  1349.      *
  1350.      * Partition may start before current box starts, because it's
  1351.      * still waiting for all its solids to be shot.
  1352.      */
  1353.     if (pp->pt_outhit->hit_dist < ap->a_rt_i->rti_tol.dist) {
  1354.         struct partition *zap_pp;
  1355.         zap_pp = pp;
  1356.         pp = pp->pt_forw;
  1357.         DEQUEUE_PT(zap_pp);
  1358.         FREE_PT(zap_pp, ap->a_resource);
  1359.         continue;
  1360.     }
  1361.  
  1362.     /*
  1363.      * If partition begins beyond current box end, the state of
  1364.      * the partition is not fully known yet, and new partitions
  1365.      * might be added in front of this one, so stop now.
  1366.      */
  1367.     diff = pp->pt_inhit->hit_dist - enddist;
  1368.     if (diff > ap->a_rt_i->rti_tol.dist) {
  1369.         ret = 0;
  1370.         goto out;
  1371.     }
  1372.  
  1373.     /*
  1374.      * If partition ends somewhere beyond the end of the current
  1375.      * box, the condition of the outhit information is not fully
  1376.      * known yet.  The partition might be broken or shortened by
  1377.      * subsequent segments, not discovered until entering future
  1378.      * boxes.
  1379.      */
  1380.     diff = pp->pt_outhit->hit_dist - enddist;
  1381.     if (diff > ap->a_rt_i->rti_tol.dist) {
  1382.         if (ap->a_onehit != 1) {
  1383.         ret = 0;
  1384.         goto out;
  1385.         }
  1386.         /* pt_outhit may not be correct */
  1387.         indefinite_outpt = 1;
  1388.     } else {
  1389.         indefinite_outpt = 0;
  1390.     }
  1391.  
  1392.     /* XXX a_ray_length checking should be done here, not in
  1393.      * rt_shootray()
  1394.      */
  1395.  
  1396.     /* Start with a clean slate when evaluating this partition */
  1397.     bu_ptbl_reset(regiontable);
  1398.  
  1399.     /*
  1400.      * For each segment's solid that lies in this partition, add
  1401.      * the list of regions that refer to that solid into the
  1402.      * "regiontable" array.
  1403.      */
  1404.     {
  1405.         struct seg **segpp;
  1406.  
  1407.         for (BU_PTBL_FOR(segpp, (struct seg **), &pp->pt_seglist)) {
  1408.         struct soltab *stp = (*segpp)->seg_stp;
  1409.         RT_CK_SOLTAB(stp);
  1410.         bu_ptbl_cat_uniq(regiontable, &stp->st_regions);
  1411.         }
  1412.     }
  1413.  
  1414.     if (indefinite_outpt) {
  1415.         /*
  1416.          * More hits still needed.  HITS_TODO > 0.  If every solid
  1417.          * in every region participating in this partition has
  1418.          * been intersected, then it is OK to evaluate it now.
  1419.          */
  1420.         if (!rt_bool_partition_eligible(regiontable, solidbits, pp)) {
  1421.         ret = 0;
  1422.         goto out;
  1423.         }
  1424.     }
  1425.  
  1426.     /* Evaluate the boolean trees of any regions involved */
  1427.     {
  1428.         struct region **regpp;
  1429.         for (BU_PTBL_FOR(regpp, (struct region **), regiontable)) {
  1430.         struct region *regp;
  1431.  
  1432.         regp = *regpp;
  1433.         RT_CK_REGION(regp);
  1434.         if (regp->reg_all_unions) {
  1435.             claiming_regions++;
  1436.             lastregion = regp;
  1437.             continue;
  1438.         }
  1439.         if (rt_booleval(regp->reg_treetop, pp, TrueRg,
  1440.                 ap->a_resource) == BOOL_FALSE) {
  1441.             /* Null out non-claiming region's pointer */
  1442.             *regpp = REGION_NULL;
  1443.             continue;
  1444.         }
  1445.         /* This region claims partition */
  1446.         claiming_regions++;
  1447.         lastregion = regp;
  1448.         }
  1449.     }
  1450.     if (claiming_regions == 0) {
  1451.         pp = pp->pt_forw;       /* onwards! */
  1452.         continue;
  1453.     }
  1454.  
  1455.     if (claiming_regions > 1) {
  1456.         /*
  1457.          * There is an overlap between two or more regions, invoke
  1458.          * multi-overlap handler.
  1459.          */
  1460.         bu_ptbl_rm(regiontable, (long *)NULL);
  1461.         ap->a_logoverlap(ap, pp, regiontable, InputHdp);
  1462.         ap->a_multioverlap(ap, pp, regiontable, InputHdp);
  1463.  
  1464.         /* Count number of remaining regions, s/b 0 or 1 */
  1465.         claiming_regions = 0;
  1466.         {
  1467.         struct region **regpp;
  1468.         for (BU_PTBL_FOR(regpp, (struct region **), regiontable)) {
  1469.             if (*regpp != REGION_NULL) {
  1470.             claiming_regions++;
  1471.             lastregion = *regpp;
  1472.             }
  1473.         }
  1474.         }
  1475.  
  1476.         /*
  1477.          * If claiming_regions == 0, discard partition.
  1478.          * If claiming_regions > 1, signal error and discard.
  1479.          * There is nothing further we can do to fix it.
  1480.          */
  1481.         if (claiming_regions > 1) {
  1482.         bu_log("rt_boolfinal() a_multioverlap() failed to resolve overlap, discarding bad partition:\n");
  1483.         rt_pr_pt(ap->a_rt_i, pp);
  1484.         }
  1485.  
  1486.         if (claiming_regions != 1) {
  1487.         struct partition *zap_pp;
  1488.  
  1489.         zap_pp = pp;
  1490.         pp = pp->pt_forw;       /* onwards! */
  1491.         DEQUEUE_PT(zap_pp);
  1492.         FREE_PT(zap_pp, ap->a_resource);
  1493.         continue;
  1494.         }
  1495.     }
  1496.  
  1497.     /*
  1498.      * claiming_regions == 1
  1499.      *
  1500.      * Remove this partition from the input queue, and append it
  1501.      * to the result queue.
  1502.      */
  1503.     {
  1504.         struct partition *newpp;
  1505.         struct partition *lastpp;
  1506.         newpp = pp;
  1507.         pp=pp->pt_forw;             /* onwards! */
  1508.         DEQUEUE_PT(newpp);
  1509.         RT_CHECK_SEG(newpp->pt_inseg);      /* sanity */
  1510.         RT_CHECK_SEG(newpp->pt_outseg);     /* sanity */
  1511.         /* Record the "owning" region.  pt_regionp = NULL before here. */
  1512.         newpp->pt_regionp = lastregion;
  1513.  
  1514.         /* See if this new partition extends the previous last
  1515.          * partition, "exactly" matching.
  1516.          */
  1517.         lastpp = FinalHdp->pt_back;
  1518.         if (lastpp != FinalHdp &&
  1519.         lastregion == lastpp->pt_regionp &&
  1520.         NEAR_EQUAL(newpp->pt_inhit->hit_dist,
  1521.                lastpp->pt_outhit->hit_dist,
  1522.                ap->a_rt_i->rti_tol.dist) &&
  1523.         (ap->a_rt_i->rti_save_overlaps == 0 ||
  1524.          rt_overlap_tables_equal(
  1525.              lastpp->pt_overlap_reg,
  1526.              newpp->pt_overlap_reg))
  1527.         ) {
  1528.         /* same region, merge by extending last final partition */
  1529.         RT_CK_PT(lastpp);
  1530.         RT_CHECK_SEG(lastpp->pt_inseg); /* sanity */
  1531.         RT_CHECK_SEG(lastpp->pt_outseg);/* sanity */
  1532.         lastpp->pt_outhit = newpp->pt_outhit;
  1533.         lastpp->pt_outflip = newpp->pt_outflip;
  1534.         lastpp->pt_outseg = newpp->pt_outseg;
  1535.  
  1536.         /* Don't lose the fact that the two solids of this
  1537.          * partition contributed.
  1538.          */
  1539.         bu_ptbl_ins_unique(&lastpp->pt_seglist, (long *)newpp->pt_inseg);
  1540.         bu_ptbl_ins_unique(&lastpp->pt_seglist, (long *)newpp->pt_outseg);
  1541.  
  1542.         FREE_PT(newpp, ap->a_resource);
  1543.         newpp = lastpp;
  1544.         } else {
  1545.         APPEND_PT(newpp, lastpp);
  1546.         if (!(ap->a_onehit < 0 && newpp->pt_regionp->reg_aircode != 0))
  1547.             hits_avail += 2;
  1548.         }
  1549.  
  1550.         RT_CHECK_SEG(newpp->pt_inseg);      /* sanity */
  1551.         RT_CHECK_SEG(newpp->pt_outseg);     /* sanity */
  1552.     }
  1553.  
  1554.     /* See if it's worthwhile breaking out of partition loop early */
  1555.     if (ap->a_onehit != 0 && HITS_TODO <= 0) {
  1556.         ret = 1;
  1557.         goto out;
  1558.     }
  1559.     }
  1560.     if (ap->a_onehit != 0 && HITS_TODO <= 0) {
  1561.     ret = 1;
  1562.     } else {
  1563.     ret = 0;
  1564.     }
  1565. out:
  1566.     return ret;
  1567. }
  1568.  
  1569. /*
  1570.  * Local Variables:
  1571.  * mode: C
  1572.  * tab-width: 8
  1573.  * indent-tabs-mode: t
  1574.  * c-file-style: "stroustrup"
  1575.  * End:
  1576.  * ex: shiftwidth=4 tabstop=8
  1577.  */
Advertisement
Add Comment
Please, Sign In to add comment