LCOV - code coverage report
Current view: top level - gfx/cairo/cairo/src - cairo-tor-scan-converter.c (source / functions) Hit Total Coverage
Test: output.info Lines: 0 730 0.0 %
Date: 2017-07-14 16:53:18 Functions: 0 51 0.0 %
Legend: Lines: hit not hit

          Line data    Source code
       1             : /* -*- Mode: c; tab-width: 8; c-basic-offset: 4; indent-tabs-mode: t; -*- */
       2             : /* glitter-paths - polygon scan converter
       3             :  *
       4             :  * Copyright (c) 2008  M Joonas Pihlaja
       5             :  * Copyright (c) 2007  David Turner
       6             :  *
       7             :  * Permission is hereby granted, free of charge, to any person
       8             :  * obtaining a copy of this software and associated documentation
       9             :  * files (the "Software"), to deal in the Software without
      10             :  * restriction, including without limitation the rights to use,
      11             :  * copy, modify, merge, publish, distribute, sublicense, and/or sell
      12             :  * copies of the Software, and to permit persons to whom the
      13             :  * Software is furnished to do so, subject to the following
      14             :  * conditions:
      15             :  *
      16             :  * The above copyright notice and this permission notice shall be
      17             :  * included in all copies or substantial portions of the Software.
      18             :  *
      19             :  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
      20             :  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
      21             :  * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
      22             :  * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
      23             :  * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
      24             :  * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
      25             :  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
      26             :  * OTHER DEALINGS IN THE SOFTWARE.
      27             :  */
      28             : /* This is the Glitter paths scan converter incorporated into cairo.
      29             :  * The source is from commit 734c53237a867a773640bd5b64816249fa1730f8
      30             :  * of
      31             :  *
      32             :  *   http://gitweb.freedesktop.org/?p=users/joonas/glitter-paths
      33             :  */
      34             : /* Glitter-paths is a stand alone polygon rasteriser derived from
      35             :  * David Turner's reimplementation of Tor Anderssons's 15x17
      36             :  * supersampling rasteriser from the Apparition graphics library.  The
      37             :  * main new feature here is cheaply choosing per-scan line between
      38             :  * doing fully analytical coverage computation for an entire row at a
      39             :  * time vs. using a supersampling approach.
      40             :  *
      41             :  * David Turner's code can be found at
      42             :  *
      43             :  *   http://david.freetype.org/rasterizer-shootout/raster-comparison-20070813.tar.bz2
      44             :  *
      45             :  * In particular this file incorporates large parts of ftgrays_tor10.h
      46             :  * from raster-comparison-20070813.tar.bz2
      47             :  */
      48             : /* Overview
      49             :  *
      50             :  * A scan converter's basic purpose to take polygon edges and convert
      51             :  * them into an RLE compressed A8 mask.  This one works in two phases:
      52             :  * gathering edges and generating spans.
      53             :  *
      54             :  * 1) As the user feeds the scan converter edges they are vertically
      55             :  * clipped and bucketted into a _polygon_ data structure.  The edges
      56             :  * are also snapped from the user's coordinates to the subpixel grid
      57             :  * coordinates used during scan conversion.
      58             :  *
      59             :  *     user
      60             :  *      |
      61             :  *      | edges
      62             :  *      V
      63             :  *    polygon buckets
      64             :  *
      65             :  * 2) Generating spans works by performing a vertical sweep of pixel
      66             :  * rows from top to bottom and maintaining an _active_list_ of edges
      67             :  * that intersect the row.  From the active list the fill rule
      68             :  * determines which edges are the left and right edges of the start of
      69             :  * each span, and their contribution is then accumulated into a pixel
      70             :  * coverage list (_cell_list_) as coverage deltas.  Once the coverage
      71             :  * deltas of all edges are known we can form spans of constant pixel
      72             :  * coverage by summing the deltas during a traversal of the cell list.
      73             :  * At the end of a pixel row the cell list is sent to a coverage
      74             :  * blitter for rendering to some target surface.
      75             :  *
      76             :  * The pixel coverages are computed by either supersampling the row
      77             :  * and box filtering a mono rasterisation, or by computing the exact
      78             :  * coverages of edges in the active list.  The supersampling method is
      79             :  * used whenever some edge starts or stops within the row or there are
      80             :  * edge intersections in the row.
      81             :  *
      82             :  *   polygon bucket for       \
      83             :  *   current pixel row        |
      84             :  *      |                     |
      85             :  *      | activate new edges  |  Repeat GRID_Y times if we
      86             :  *      V                     \  are supersampling this row,
      87             :  *   active list              /  or just once if we're computing
      88             :  *      |                     |  analytical coverage.
      89             :  *      | coverage deltas     |
      90             :  *      V                     |
      91             :  *   pixel coverage list     /
      92             :  *      |
      93             :  *      V
      94             :  *   coverage blitter
      95             :  */
      96             : #include "cairoint.h"
      97             : #include "cairo-spans-private.h"
      98             : #include "cairo-error-private.h"
      99             : 
     100             : #include <assert.h>
     101             : #include <stdlib.h>
     102             : #include <string.h>
     103             : #include <limits.h>
     104             : 
     105             : /*-------------------------------------------------------------------------
     106             :  * cairo specific config
     107             :  */
     108             : #define I static
     109             : 
     110             : /* Prefer cairo's status type. */
     111             : #define GLITTER_HAVE_STATUS_T 1
     112             : #define GLITTER_STATUS_SUCCESS CAIRO_STATUS_SUCCESS
     113             : #define GLITTER_STATUS_NO_MEMORY CAIRO_STATUS_NO_MEMORY
     114             : typedef cairo_status_t glitter_status_t;
     115             : 
     116             : /* The input coordinate scale and the rasterisation grid scales. */
     117             : #define GLITTER_INPUT_BITS CAIRO_FIXED_FRAC_BITS
     118             : #define GRID_X_BITS CAIRO_FIXED_FRAC_BITS
     119             : #define GRID_Y 15
     120             : 
     121             : /* Set glitter up to use a cairo span renderer to do the coverage
     122             :  * blitting. */
     123             : struct pool;
     124             : struct cell_list;
     125             : 
     126             : static glitter_status_t
     127             : blit_with_span_renderer(
     128             :     struct cell_list            *coverages,
     129             :     cairo_span_renderer_t       *span_renderer,
     130             :     struct pool                 *span_pool,
     131             :     int                          y,
     132             :     int                          height,
     133             :     int                          xmin,
     134             :     int                          xmax);
     135             : 
     136             : static glitter_status_t
     137             : blit_empty_with_span_renderer (cairo_span_renderer_t *renderer, int y, int height);
     138             : 
     139             : #define GLITTER_BLIT_COVERAGES_ARGS \
     140             :         cairo_span_renderer_t *span_renderer, \
     141             :         struct pool *span_pool
     142             : 
     143             : #define GLITTER_BLIT_COVERAGES(cells, y, height,xmin, xmax) do {        \
     144             :     cairo_status_t status = blit_with_span_renderer (cells,             \
     145             :                                                      span_renderer,     \
     146             :                                                      span_pool,         \
     147             :                                                      y, height,         \
     148             :                                                      xmin, xmax);       \
     149             :     if (unlikely (status))                                              \
     150             :         return status;                                                  \
     151             : } while (0)
     152             : 
     153             : #define GLITTER_BLIT_COVERAGES_EMPTY(y, height, xmin, xmax) do {                \
     154             :     cairo_status_t status = blit_empty_with_span_renderer (span_renderer, y, height); \
     155             :     if (unlikely (status))                                              \
     156             :         return status;                                                  \
     157             : } while (0)
     158             : 
     159             : /*-------------------------------------------------------------------------
     160             :  * glitter-paths.h
     161             :  */
     162             : 
     163             : /* "Input scaled" numbers are fixed precision reals with multiplier
     164             :  * 2**GLITTER_INPUT_BITS.  Input coordinates are given to glitter as
     165             :  * pixel scaled numbers.  These get converted to the internal grid
     166             :  * scaled numbers as soon as possible. Internal overflow is possible
     167             :  * if GRID_X/Y inside glitter-paths.c is larger than
     168             :  * 1<<GLITTER_INPUT_BITS. */
     169             : #ifndef GLITTER_INPUT_BITS
     170             : #  define GLITTER_INPUT_BITS 8
     171             : #endif
     172             : #define GLITTER_INPUT_SCALE (1<<GLITTER_INPUT_BITS)
     173             : typedef int glitter_input_scaled_t;
     174             : 
     175             : #if !GLITTER_HAVE_STATUS_T
     176             : typedef enum {
     177             :     GLITTER_STATUS_SUCCESS = 0,
     178             :     GLITTER_STATUS_NO_MEMORY
     179             : } glitter_status_t;
     180             : #endif
     181             : 
     182             : #ifndef I
     183             : # define I /*static*/
     184             : #endif
     185             : 
     186             : /* Opaque type for scan converting. */
     187             : typedef struct glitter_scan_converter glitter_scan_converter_t;
     188             : 
     189             : /* Reset a scan converter to accept polygon edges and set the clip box
     190             :  * in pixels.  Allocates O(ymax-ymin) bytes of memory.  The clip box
     191             :  * is set to integer pixel coordinates xmin <= x < xmax, ymin <= y <
     192             :  * ymax. */
     193             : I glitter_status_t
     194             : glitter_scan_converter_reset(
     195             :     glitter_scan_converter_t *converter,
     196             :     int xmin, int ymin,
     197             :     int xmax, int ymax);
     198             : 
     199             : /* Add a new polygon edge from pixel (x1,y1) to (x2,y2) to the scan
     200             :  * converter.  The coordinates represent pixel positions scaled by
     201             :  * 2**GLITTER_PIXEL_BITS.  If this function fails then the scan
     202             :  * converter should be reset or destroyed.  Dir must be +1 or -1,
     203             :  * with the latter reversing the orientation of the edge. */
     204             : I glitter_status_t
     205             : glitter_scan_converter_add_edge (glitter_scan_converter_t *converter,
     206             :                                  const cairo_edge_t *edge);
     207             : 
     208             : /* Render the polygon in the scan converter to the given A8 format
     209             :  * image raster.  Only the pixels accessible as pixels[y*stride+x] for
     210             :  * x,y inside the clip box are written to, where xmin <= x < xmax,
     211             :  * ymin <= y < ymax.  The image is assumed to be clear on input.
     212             :  *
     213             :  * If nonzero_fill is true then the interior of the polygon is
     214             :  * computed with the non-zero fill rule.  Otherwise the even-odd fill
     215             :  * rule is used.
     216             :  *
     217             :  * The scan converter must be reset or destroyed after this call. */
     218             : #ifndef GLITTER_BLIT_COVERAGES_ARGS
     219             : # define GLITTER_BLIT_COVERAGES_ARGS unsigned char *raster_pixels, long raster_stride
     220             : #endif
     221             : I glitter_status_t
     222             : glitter_scan_converter_render(
     223             :     glitter_scan_converter_t *converter,
     224             :     int nonzero_fill,
     225             :     GLITTER_BLIT_COVERAGES_ARGS);
     226             : 
     227             : /*-------------------------------------------------------------------------
     228             :  * glitter-paths.c: Implementation internal types
     229             :  */
     230             : #include <stdlib.h>
     231             : #include <string.h>
     232             : #include <limits.h>
     233             : 
     234             : /* All polygon coordinates are snapped onto a subsample grid. "Grid
     235             :  * scaled" numbers are fixed precision reals with multiplier GRID_X or
     236             :  * GRID_Y. */
     237             : typedef int grid_scaled_t;
     238             : typedef int grid_scaled_x_t;
     239             : typedef int grid_scaled_y_t;
     240             : 
     241             : /* Default x/y scale factors.
     242             :  *  You can either define GRID_X/Y_BITS to get a power-of-two scale
     243             :  *  or define GRID_X/Y separately. */
     244             : #if !defined(GRID_X) && !defined(GRID_X_BITS)
     245             : #  define GRID_X_BITS 8
     246             : #endif
     247             : #if !defined(GRID_Y) && !defined(GRID_Y_BITS)
     248             : #  define GRID_Y 15
     249             : #endif
     250             : 
     251             : /* Use GRID_X/Y_BITS to define GRID_X/Y if they're available. */
     252             : #ifdef GRID_X_BITS
     253             : #  define GRID_X (1 << GRID_X_BITS)
     254             : #endif
     255             : #ifdef GRID_Y_BITS
     256             : #  define GRID_Y (1 << GRID_Y_BITS)
     257             : #endif
     258             : 
     259             : /* The GRID_X_TO_INT_FRAC macro splits a grid scaled coordinate into
     260             :  * integer and fractional parts. The integer part is floored. */
     261             : #if defined(GRID_X_TO_INT_FRAC)
     262             :   /* do nothing */
     263             : #elif defined(GRID_X_BITS)
     264             : #  define GRID_X_TO_INT_FRAC(x, i, f) \
     265             :         _GRID_TO_INT_FRAC_shift(x, i, f, GRID_X_BITS)
     266             : #else
     267             : #  define GRID_X_TO_INT_FRAC(x, i, f) \
     268             :         _GRID_TO_INT_FRAC_general(x, i, f, GRID_X)
     269             : #endif
     270             : 
     271             : #define _GRID_TO_INT_FRAC_general(t, i, f, m) do {      \
     272             :     (i) = (t) / (m);                                    \
     273             :     (f) = (t) % (m);                                    \
     274             :     if ((f) < 0) {                                   \
     275             :         --(i);                                          \
     276             :         (f) += (m);                                     \
     277             :     }                                                   \
     278             : } while (0)
     279             : 
     280             : #define _GRID_TO_INT_FRAC_shift(t, i, f, b) do {        \
     281             :     (f) = (t) & ((1 << (b)) - 1);                     \
     282             :     (i) = (t) >> (b);                                     \
     283             : } while (0)
     284             : 
     285             : /* A grid area is a real in [0,1] scaled by 2*GRID_X*GRID_Y.  We want
     286             :  * to be able to represent exactly areas of subpixel trapezoids whose
     287             :  * vertices are given in grid scaled coordinates.  The scale factor
     288             :  * comes from needing to accurately represent the area 0.5*dx*dy of a
     289             :  * triangle with base dx and height dy in grid scaled numbers. */
     290             : typedef int grid_area_t;
     291             : #define GRID_XY (2*GRID_X*GRID_Y) /* Unit area on the grid. */
     292             : 
     293             : /* GRID_AREA_TO_ALPHA(area): map [0,GRID_XY] to [0,255]. */
     294             : #if GRID_XY == 510
     295             : #  define GRID_AREA_TO_ALPHA(c)   (((c)+1) >> 1)
     296             : #elif GRID_XY == 255
     297             : #  define  GRID_AREA_TO_ALPHA(c)  (c)
     298             : #elif GRID_XY == 64
     299             : #  define  GRID_AREA_TO_ALPHA(c)  (((c) << 2) | -(((c) & 0x40) >> 6))
     300             : #elif GRID_XY == 128
     301             : #  define  GRID_AREA_TO_ALPHA(c)  ((((c) << 1) | -((c) >> 7)) & 255)
     302             : #elif GRID_XY == 256
     303             : #  define  GRID_AREA_TO_ALPHA(c)  (((c) | -((c) >> 8)) & 255)
     304             : #elif GRID_XY == 15
     305             : #  define  GRID_AREA_TO_ALPHA(c)  (((c) << 4) + (c))
     306             : #elif GRID_XY == 2*256*15
     307             : #  define  GRID_AREA_TO_ALPHA(c)  (((c) + ((c)<<4) + 256) >> 9)
     308             : #else
     309             : #  define  GRID_AREA_TO_ALPHA(c)  (((c)*255 + GRID_XY/2) / GRID_XY)
     310             : #endif
     311             : 
     312             : #define UNROLL3(x) x x x
     313             : 
     314             : struct quorem {
     315             :     int32_t quo;
     316             :     int32_t rem;
     317             : };
     318             : 
     319             : /* Header for a chunk of memory in a memory pool. */
     320             : struct _pool_chunk {
     321             :     /* # bytes used in this chunk. */
     322             :     size_t size;
     323             : 
     324             :     /* # bytes total in this chunk */
     325             :     size_t capacity;
     326             : 
     327             :     /* Pointer to the previous chunk or %NULL if this is the sentinel
     328             :      * chunk in the pool header. */
     329             :     struct _pool_chunk *prev_chunk;
     330             : 
     331             :     /* Actual data starts here.  Well aligned for pointers. */
     332             : };
     333             : 
     334             : /* A memory pool.  This is supposed to be embedded on the stack or
     335             :  * within some other structure.  It may optionally be followed by an
     336             :  * embedded array from which requests are fulfilled until
     337             :  * malloc needs to be called to allocate a first real chunk. */
     338             : struct pool {
     339             :     /* Chunk we're allocating from. */
     340             :     struct _pool_chunk *current;
     341             : 
     342             :     /* Free list of previously allocated chunks.  All have >= default
     343             :      * capacity. */
     344             :     struct _pool_chunk *first_free;
     345             : 
     346             :     /* The default capacity of a chunk. */
     347             :     size_t default_capacity;
     348             : 
     349             :     /* Header for the sentinel chunk.  Directly following the pool
     350             :      * struct should be some space for embedded elements from which
     351             :      * the sentinel chunk allocates from. */
     352             :     struct _pool_chunk sentinel[1];
     353             : };
     354             : 
     355             : /* A polygon edge. */
     356             : struct edge {
     357             :     /* Next in y-bucket or active list. */
     358             :     struct edge *next;
     359             : 
     360             :     /* Current x coordinate while the edge is on the active
     361             :      * list. Initialised to the x coordinate of the top of the
     362             :      * edge. The quotient is in grid_scaled_x_t units and the
     363             :      * remainder is mod dy in grid_scaled_y_t units.*/
     364             :     struct quorem x;
     365             : 
     366             :     /* Advance of the current x when moving down a subsample line. */
     367             :     struct quorem dxdy;
     368             : 
     369             :     /* Advance of the current x when moving down a full pixel
     370             :      * row. Only initialised when the height of the edge is large
     371             :      * enough that there's a chance the edge could be stepped by a
     372             :      * full row's worth of subsample rows at a time. */
     373             :     struct quorem dxdy_full;
     374             : 
     375             :     /* The clipped y of the top of the edge. */
     376             :     grid_scaled_y_t ytop;
     377             : 
     378             :     /* y2-y1 after orienting the edge downwards.  */
     379             :     grid_scaled_y_t dy;
     380             : 
     381             :     /* Number of subsample rows remaining to scan convert of this
     382             :      * edge. */
     383             :     grid_scaled_y_t height_left;
     384             : 
     385             :     /* Original sign of the edge: +1 for downwards, -1 for upwards
     386             :      * edges.  */
     387             :     int dir;
     388             :     int vertical;
     389             : };
     390             : 
     391             : /* Number of subsample rows per y-bucket. Must be GRID_Y. */
     392             : #define EDGE_Y_BUCKET_HEIGHT GRID_Y
     393             : 
     394             : #define EDGE_Y_BUCKET_INDEX(y, ymin) (((y) - (ymin))/EDGE_Y_BUCKET_HEIGHT)
     395             : 
     396             : struct bucket {
     397             :     /* Unsorted list of edges starting within this bucket. */
     398             :     struct edge *edges;
     399             : 
     400             :     /* Set to non-zero if there are edges starting strictly within the
     401             :      * bucket. */
     402             :     unsigned     have_inside_edges;
     403             : };
     404             : 
     405             : /* A collection of sorted and vertically clipped edges of the polygon.
     406             :  * Edges are moved from the polygon to an active list while scan
     407             :  * converting. */
     408             : struct polygon {
     409             :     /* The clip extents. */
     410             :     grid_scaled_x_t xmin, xmax;
     411             :     grid_scaled_y_t ymin, ymax;
     412             : 
     413             :     /* Array of edges all starting in the same bucket.  An edge is put
     414             :      * into bucket EDGE_BUCKET_INDEX(edge->ytop, polygon->ymin) when
     415             :      * it is added to the polygon. */
     416             :     struct bucket *y_buckets;
     417             :     struct bucket  y_buckets_embedded[64];
     418             : 
     419             :     struct {
     420             :         struct pool base[1];
     421             :         struct edge embedded[32];
     422             :     } edge_pool;
     423             : };
     424             : 
     425             : /* A cell records the effect on pixel coverage of polygon edges
     426             :  * passing through a pixel.  It contains two accumulators of pixel
     427             :  * coverage.
     428             :  *
     429             :  * Consider the effects of a polygon edge on the coverage of a pixel
     430             :  * it intersects and that of the following one.  The coverage of the
     431             :  * following pixel is the height of the edge multiplied by the width
     432             :  * of the pixel, and the coverage of the pixel itself is the area of
     433             :  * the trapezoid formed by the edge and the right side of the pixel.
     434             :  *
     435             :  * +-----------------------+-----------------------+
     436             :  * |                       |                       |
     437             :  * |                       |                       |
     438             :  * |_______________________|_______________________|
     439             :  * |   \...................|.......................|\
     440             :  * |    \..................|.......................| |
     441             :  * |     \.................|.......................| |
     442             :  * |      \....covered.....|.......................| |
     443             :  * |       \....area.......|.......................| } covered height
     444             :  * |        \..............|.......................| |
     445             :  * |uncovered\.............|.......................| |
     446             :  * |  area    \............|.......................| |
     447             :  * |___________\...........|.......................|/
     448             :  * |                       |                       |
     449             :  * |                       |                       |
     450             :  * |                       |                       |
     451             :  * +-----------------------+-----------------------+
     452             :  *
     453             :  * Since the coverage of the following pixel will always be a multiple
     454             :  * of the width of the pixel, we can store the height of the covered
     455             :  * area instead.  The coverage of the pixel itself is the total
     456             :  * coverage minus the area of the uncovered area to the left of the
     457             :  * edge.  As it's faster to compute the uncovered area we only store
     458             :  * that and subtract it from the total coverage later when forming
     459             :  * spans to blit.
     460             :  *
     461             :  * The heights and areas are signed, with left edges of the polygon
     462             :  * having positive sign and right edges having negative sign.  When
     463             :  * two edges intersect they swap their left/rightness so their
     464             :  * contribution above and below the intersection point must be
     465             :  * computed separately. */
     466             : struct cell {
     467             :     struct cell         *next;
     468             :     int                  x;
     469             :     grid_area_t          uncovered_area;
     470             :     grid_scaled_y_t      covered_height;
     471             : };
     472             : 
     473             : /* A cell list represents the scan line sparsely as cells ordered by
     474             :  * ascending x.  It is geared towards scanning the cells in order
     475             :  * using an internal cursor. */
     476             : struct cell_list {
     477             :     /* Points to the left-most cell in the scan line. */
     478             :     struct cell *head;
     479             :     /* Sentinel node */
     480             :     struct cell tail;
     481             : 
     482             :     /* Cursor state for iterating through the cell list.  Points to
     483             :      * a pointer to the current cell: either &cell_list->head or the next
     484             :      * field of the previous cell. */
     485             :     struct cell **cursor;
     486             : 
     487             :     /* Cells in the cell list are owned by the cell list and are
     488             :      * allocated from this pool.  */
     489             :     struct {
     490             :         struct pool base[1];
     491             :         struct cell embedded[32];
     492             :     } cell_pool;
     493             : };
     494             : 
     495             : struct cell_pair {
     496             :     struct cell *cell1;
     497             :     struct cell *cell2;
     498             : };
     499             : 
     500             : /* The active list contains edges in the current scan line ordered by
     501             :  * the x-coordinate of the intercept of the edge and the scan line. */
     502             : struct active_list {
     503             :     /* Leftmost edge on the current scan line. */
     504             :     struct edge *head;
     505             : 
     506             :     /* A lower bound on the height of the active edges is used to
     507             :      * estimate how soon some active edge ends.  We can't advance the
     508             :      * scan conversion by a full pixel row if an edge ends somewhere
     509             :      * within it. */
     510             :     grid_scaled_y_t min_height;
     511             : };
     512             : 
     513             : struct glitter_scan_converter {
     514             :     struct polygon      polygon[1];
     515             :     struct active_list  active[1];
     516             :     struct cell_list    coverages[1];
     517             : 
     518             :     /* Clip box. */
     519             :     grid_scaled_x_t xmin, xmax;
     520             :     grid_scaled_y_t ymin, ymax;
     521             : };
     522             : 
     523             : /* Compute the floored division a/b. Assumes / and % perform symmetric
     524             :  * division. */
     525             : inline static struct quorem
     526           0 : floored_divrem(int a, int b)
     527             : {
     528             :     struct quorem qr;
     529           0 :     qr.quo = a/b;
     530           0 :     qr.rem = a%b;
     531           0 :     if ((a^b)<0 && qr.rem) {
     532           0 :         qr.quo -= 1;
     533           0 :         qr.rem += b;
     534             :     }
     535           0 :     return qr;
     536             : }
     537             : 
     538             : /* Compute the floored division (x*a)/b. Assumes / and % perform symmetric
     539             :  * division. */
     540             : static struct quorem
     541           0 : floored_muldivrem(int x, int a, int b)
     542             : {
     543             :     struct quorem qr;
     544           0 :     long long xa = (long long)x*a;
     545           0 :     qr.quo = xa/b;
     546           0 :     qr.rem = xa%b;
     547           0 :     if ((xa>=0) != (b>=0) && qr.rem) {
     548           0 :         qr.quo -= 1;
     549           0 :         qr.rem += b;
     550             :     }
     551           0 :     return qr;
     552             : }
     553             : 
     554             : static void
     555           0 : _pool_chunk_init(
     556             :     struct _pool_chunk *p,
     557             :     struct _pool_chunk *prev_chunk,
     558             :     size_t capacity)
     559             : {
     560           0 :     p->prev_chunk = prev_chunk;
     561           0 :     p->size = 0;
     562           0 :     p->capacity = capacity;
     563           0 : }
     564             : 
     565             : static struct _pool_chunk *
     566           0 : _pool_chunk_create(
     567             :     struct _pool_chunk *prev_chunk,
     568             :     size_t size)
     569             : {
     570             :     struct _pool_chunk *p;
     571           0 :     size_t size_with_head = size + sizeof(struct _pool_chunk);
     572           0 :     if (size_with_head < size)
     573           0 :         return NULL;
     574           0 :     p = malloc(size_with_head);
     575           0 :     if (p)
     576           0 :         _pool_chunk_init(p, prev_chunk, size);
     577           0 :     return p;
     578             : }
     579             : 
     580             : static void
     581           0 : pool_init(
     582             :     struct pool *pool,
     583             :     size_t default_capacity,
     584             :     size_t embedded_capacity)
     585             : {
     586           0 :     pool->current = pool->sentinel;
     587           0 :     pool->first_free = NULL;
     588           0 :     pool->default_capacity = default_capacity;
     589           0 :     _pool_chunk_init(pool->sentinel, NULL, embedded_capacity);
     590           0 : }
     591             : 
     592             : static void
     593           0 : pool_fini(struct pool *pool)
     594             : {
     595           0 :     struct _pool_chunk *p = pool->current;
     596             :     do {
     597           0 :         while (NULL != p) {
     598           0 :             struct _pool_chunk *prev = p->prev_chunk;
     599           0 :             if (p != pool->sentinel)
     600           0 :                 free(p);
     601           0 :             p = prev;
     602             :         }
     603           0 :         p = pool->first_free;
     604           0 :         pool->first_free = NULL;
     605           0 :     } while (NULL != p);
     606           0 :     pool_init(pool, 0, 0);
     607           0 : }
     608             : 
     609             : /* Satisfy an allocation by first allocating a new large enough chunk
     610             :  * and adding it to the head of the pool's chunk list. This function
     611             :  * is called as a fallback if pool_alloc() couldn't do a quick
     612             :  * allocation from the current chunk in the pool. */
     613             : static void *
     614           0 : _pool_alloc_from_new_chunk(
     615             :     struct pool *pool,
     616             :     size_t size)
     617             : {
     618             :     struct _pool_chunk *chunk;
     619             :     void *obj;
     620             :     size_t capacity;
     621             : 
     622             :     /* If the allocation is smaller than the default chunk size then
     623             :      * try getting a chunk off the free list.  Force alloc of a new
     624             :      * chunk for large requests. */
     625           0 :     capacity = size;
     626           0 :     chunk = NULL;
     627           0 :     if (size < pool->default_capacity) {
     628           0 :         capacity = pool->default_capacity;
     629           0 :         chunk = pool->first_free;
     630           0 :         if (chunk) {
     631           0 :             pool->first_free = chunk->prev_chunk;
     632           0 :             _pool_chunk_init(chunk, pool->current, chunk->capacity);
     633             :         }
     634             :     }
     635             : 
     636           0 :     if (NULL == chunk) {
     637           0 :         chunk = _pool_chunk_create (pool->current, capacity);
     638           0 :         if (unlikely (NULL == chunk))
     639           0 :             return NULL;
     640             :     }
     641           0 :     pool->current = chunk;
     642             : 
     643           0 :     obj = ((unsigned char*)chunk + sizeof(*chunk) + chunk->size);
     644           0 :     chunk->size += size;
     645           0 :     return obj;
     646             : }
     647             : 
     648             : /* Allocate size bytes from the pool.  The first allocated address
     649             :  * returned from a pool is aligned to sizeof(void*).  Subsequent
     650             :  * addresses will maintain alignment as long as multiples of void* are
     651             :  * allocated.  Returns the address of a new memory area or %NULL on
     652             :  * allocation failures.  The pool retains ownership of the returned
     653             :  * memory. */
     654             : inline static void *
     655           0 : pool_alloc (struct pool *pool, size_t size)
     656             : {
     657           0 :     struct _pool_chunk *chunk = pool->current;
     658             : 
     659           0 :     if (size <= chunk->capacity - chunk->size) {
     660           0 :         void *obj = ((unsigned char*)chunk + sizeof(*chunk) + chunk->size);
     661           0 :         chunk->size += size;
     662           0 :         return obj;
     663             :     } else {
     664           0 :         return _pool_alloc_from_new_chunk(pool, size);
     665             :     }
     666             : }
     667             : 
     668             : /* Relinquish all pool_alloced memory back to the pool. */
     669             : static void
     670           0 : pool_reset (struct pool *pool)
     671             : {
     672             :     /* Transfer all used chunks to the chunk free list. */
     673           0 :     struct _pool_chunk *chunk = pool->current;
     674           0 :     if (chunk != pool->sentinel) {
     675           0 :         while (chunk->prev_chunk != pool->sentinel) {
     676           0 :             chunk = chunk->prev_chunk;
     677             :         }
     678           0 :         chunk->prev_chunk = pool->first_free;
     679           0 :         pool->first_free = pool->current;
     680             :     }
     681             :     /* Reset the sentinel as the current chunk. */
     682           0 :     pool->current = pool->sentinel;
     683           0 :     pool->sentinel->size = 0;
     684           0 : }
     685             : 
     686             : /* Rewinds the cell list's cursor to the beginning.  After rewinding
     687             :  * we're good to cell_list_find() the cell any x coordinate. */
     688             : inline static void
     689           0 : cell_list_rewind (struct cell_list *cells)
     690             : {
     691           0 :     cells->cursor = &cells->head;
     692           0 : }
     693             : 
     694             : /* Rewind the cell list if its cursor has been advanced past x. */
     695             : inline static void
     696           0 : cell_list_maybe_rewind (struct cell_list *cells, int x)
     697             : {
     698           0 :     struct cell *tail = *cells->cursor;
     699           0 :     if (tail->x > x)
     700           0 :         cell_list_rewind (cells);
     701           0 : }
     702             : 
     703             : static void
     704           0 : cell_list_init(struct cell_list *cells)
     705             : {
     706           0 :     pool_init(cells->cell_pool.base,
     707             :               256*sizeof(struct cell),
     708             :               sizeof(cells->cell_pool.embedded));
     709           0 :     cells->tail.next = NULL;
     710           0 :     cells->tail.x = INT_MAX;
     711           0 :     cells->head = &cells->tail;
     712           0 :     cell_list_rewind (cells);
     713           0 : }
     714             : 
     715             : static void
     716           0 : cell_list_fini(struct cell_list *cells)
     717             : {
     718           0 :     pool_fini (cells->cell_pool.base);
     719           0 : }
     720             : 
     721             : /* Empty the cell list.  This is called at the start of every pixel
     722             :  * row. */
     723             : inline static void
     724           0 : cell_list_reset (struct cell_list *cells)
     725             : {
     726           0 :     cell_list_rewind (cells);
     727           0 :     cells->head = &cells->tail;
     728           0 :     pool_reset (cells->cell_pool.base);
     729           0 : }
     730             : 
     731             : static struct cell *
     732           0 : cell_list_alloc (struct cell_list *cells,
     733             :                  struct cell **cursor,
     734             :                  struct cell *tail,
     735             :                  int x)
     736             : {
     737             :     struct cell *cell;
     738             : 
     739           0 :     cell = pool_alloc (cells->cell_pool.base, sizeof (struct cell));
     740           0 :     if (unlikely (NULL == cell))
     741           0 :         return NULL;
     742             : 
     743           0 :     *cursor = cell;
     744           0 :     cell->next = tail;
     745           0 :     cell->x = x;
     746           0 :     cell->uncovered_area = 0;
     747           0 :     cell->covered_height = 0;
     748           0 :     return cell;
     749             : }
     750             : 
     751             : /* Find a cell at the given x-coordinate.  Returns %NULL if a new cell
     752             :  * needed to be allocated but couldn't be.  Cells must be found with
     753             :  * non-decreasing x-coordinate until the cell list is rewound using
     754             :  * cell_list_rewind(). Ownership of the returned cell is retained by
     755             :  * the cell list. */
     756             : inline static struct cell *
     757           0 : cell_list_find (struct cell_list *cells, int x)
     758             : {
     759           0 :     struct cell **cursor = cells->cursor;
     760             :     struct cell *tail;
     761             : 
     762             :     while (1) {
     763           0 :         UNROLL3({
     764             :             tail = *cursor;
     765             :             if (tail->x >= x) {
     766             :                 break;
     767             :             }
     768             :             cursor = &tail->next;
     769             :         });
     770             :     }
     771           0 :     cells->cursor = cursor;
     772             : 
     773           0 :     if (tail->x == x)
     774           0 :         return tail;
     775             : 
     776           0 :     return cell_list_alloc (cells, cursor, tail, x);
     777             : }
     778             : 
     779             : /* Find two cells at x1 and x2.  This is exactly equivalent
     780             :  * to
     781             :  *
     782             :  *   pair.cell1 = cell_list_find(cells, x1);
     783             :  *   pair.cell2 = cell_list_find(cells, x2);
     784             :  *
     785             :  * except with less function call overhead. */
     786             : inline static struct cell_pair
     787           0 : cell_list_find_pair(struct cell_list *cells, int x1, int x2)
     788             : {
     789             :     struct cell_pair pair;
     790           0 :     struct cell **cursor = cells->cursor;
     791             :     struct cell *cell1;
     792             :     struct cell *cell2;
     793             :     struct cell *newcell;
     794             : 
     795             :     /* Find first cell at x1. */
     796             :     while (1) {
     797           0 :         UNROLL3({
     798             :             cell1 = *cursor;
     799             :             if (cell1->x > x1)
     800             :                 break;
     801             : 
     802             :             if (cell1->x == x1)
     803             :                 goto found_first;
     804             : 
     805             :             cursor = &cell1->next;
     806             :         });
     807             :     }
     808             : 
     809             :     /* New first cell at x1. */
     810           0 :     newcell = pool_alloc (cells->cell_pool.base,
     811             :                           sizeof (struct cell));
     812           0 :     if (likely (NULL != newcell)) {
     813           0 :         *cursor = newcell;
     814           0 :         newcell->next = cell1;
     815           0 :         newcell->x = x1;
     816           0 :         newcell->uncovered_area = 0;
     817           0 :         newcell->covered_height = 0;
     818             :     }
     819           0 :     cell1 = newcell;
     820             :  found_first:
     821             : 
     822             :     /* Find second cell at x2. */
     823             :     while (1) {
     824           0 :         UNROLL3({
     825             :             cell2 = *cursor;
     826             :             if (cell2->x > x2)
     827             :                 break;
     828             :             if (cell2->x == x2)
     829             :                 goto found_second;
     830             :             cursor = &cell2->next;
     831             :         });
     832             :     }
     833             : 
     834             :     /* New second cell at x2. */
     835           0 :     newcell = pool_alloc (cells->cell_pool.base,
     836             :                          sizeof (struct cell));
     837           0 :     if (likely (NULL != newcell)) {
     838           0 :         *cursor = newcell;
     839           0 :         newcell->next = cell2;
     840           0 :         newcell->x = x2;
     841           0 :         newcell->uncovered_area = 0;
     842           0 :         newcell->covered_height = 0;
     843             :     }
     844           0 :     cell2 = newcell;
     845             :  found_second:
     846             : 
     847           0 :     cells->cursor = cursor;
     848           0 :     pair.cell1 = cell1;
     849           0 :     pair.cell2 = cell2;
     850           0 :     return pair;
     851             : }
     852             : 
     853             : /* Add an unbounded subpixel span covering subpixels >= x to the
     854             :  * coverage cells. */
     855             : static glitter_status_t
     856           0 : cell_list_add_unbounded_subspan (struct cell_list *cells,
     857             :                                  grid_scaled_x_t x)
     858             : {
     859             :     struct cell *cell;
     860             :     int ix, fx;
     861             : 
     862           0 :     GRID_X_TO_INT_FRAC(x, ix, fx);
     863             : 
     864           0 :     cell = cell_list_find (cells, ix);
     865           0 :     if (likely (cell != NULL)) {
     866           0 :         cell->uncovered_area += 2*fx;
     867           0 :         cell->covered_height++;
     868           0 :         return GLITTER_STATUS_SUCCESS;
     869             :     }
     870             : 
     871           0 :     return GLITTER_STATUS_NO_MEMORY;
     872             : }
     873             : 
     874             : /* Add a subpixel span covering [x1, x2) to the coverage cells. */
     875             : inline static glitter_status_t
     876           0 : cell_list_add_subspan(
     877             :     struct cell_list *cells,
     878             :     grid_scaled_x_t x1,
     879             :     grid_scaled_x_t x2)
     880             : {
     881             :     int ix1, fx1;
     882             :     int ix2, fx2;
     883             : 
     884           0 :     GRID_X_TO_INT_FRAC(x1, ix1, fx1);
     885           0 :     GRID_X_TO_INT_FRAC(x2, ix2, fx2);
     886             : 
     887           0 :     if (ix1 != ix2) {
     888             :         struct cell_pair p;
     889           0 :         p = cell_list_find_pair(cells, ix1, ix2);
     890           0 :         if (likely (p.cell1 != NULL && p.cell2 != NULL)) {
     891           0 :             p.cell1->uncovered_area += 2*fx1;
     892           0 :             ++p.cell1->covered_height;
     893           0 :             p.cell2->uncovered_area -= 2*fx2;
     894           0 :             --p.cell2->covered_height;
     895           0 :             return GLITTER_STATUS_SUCCESS;
     896             :         }
     897             :     } else {
     898           0 :         struct cell *cell = cell_list_find(cells, ix1);
     899           0 :         if (likely (cell != NULL)) {
     900           0 :             cell->uncovered_area += 2*(fx1-fx2);
     901           0 :             return GLITTER_STATUS_SUCCESS;
     902             :         }
     903             :     }
     904           0 :     return GLITTER_STATUS_NO_MEMORY;
     905             : }
     906             : 
     907             : /* Adds the analytical coverage of an edge crossing the current pixel
     908             :  * row to the coverage cells and advances the edge's x position to the
     909             :  * following row.
     910             :  *
     911             :  * This function is only called when we know that during this pixel row:
     912             :  *
     913             :  * 1) The relative order of all edges on the active list doesn't
     914             :  * change.  In particular, no edges intersect within this row to pixel
     915             :  * precision.
     916             :  *
     917             :  * 2) No new edges start in this row.
     918             :  *
     919             :  * 3) No existing edges end mid-row.
     920             :  *
     921             :  * This function depends on being called with all edges from the
     922             :  * active list in the order they appear on the list (i.e. with
     923             :  * non-decreasing x-coordinate.)  */
     924             : static glitter_status_t
     925           0 : cell_list_render_edge(
     926             :     struct cell_list *cells,
     927             :     struct edge *edge,
     928             :     int sign)
     929             : {
     930             :     grid_scaled_y_t y1, y2, dy;
     931             :     grid_scaled_x_t dx;
     932             :     int ix1, ix2;
     933             :     grid_scaled_x_t fx1, fx2;
     934             : 
     935           0 :     struct quorem x1 = edge->x;
     936           0 :     struct quorem x2 = x1;
     937             : 
     938           0 :     if (! edge->vertical) {
     939           0 :         x2.quo += edge->dxdy_full.quo;
     940           0 :         x2.rem += edge->dxdy_full.rem;
     941           0 :         if (x2.rem >= 0) {
     942           0 :             ++x2.quo;
     943           0 :             x2.rem -= edge->dy;
     944             :         }
     945             : 
     946           0 :         edge->x = x2;
     947             :     }
     948             : 
     949           0 :     GRID_X_TO_INT_FRAC(x1.quo, ix1, fx1);
     950           0 :     GRID_X_TO_INT_FRAC(x2.quo, ix2, fx2);
     951             : 
     952             :     /* Edge is entirely within a column? */
     953           0 :     if (ix1 == ix2) {
     954             :         /* We always know that ix1 is >= the cell list cursor in this
     955             :          * case due to the no-intersections precondition.  */
     956           0 :         struct cell *cell = cell_list_find(cells, ix1);
     957           0 :         if (unlikely (NULL == cell))
     958           0 :             return GLITTER_STATUS_NO_MEMORY;
     959             : 
     960           0 :         cell->covered_height += sign*GRID_Y;
     961           0 :         cell->uncovered_area += sign*(fx1 + fx2)*GRID_Y;
     962           0 :         return GLITTER_STATUS_SUCCESS;
     963             :     }
     964             : 
     965             :     /* Orient the edge left-to-right. */
     966           0 :     dx = x2.quo - x1.quo;
     967           0 :     if (dx >= 0) {
     968           0 :         y1 = 0;
     969           0 :         y2 = GRID_Y;
     970             :     } else {
     971             :         int tmp;
     972           0 :         tmp = ix1; ix1 = ix2; ix2 = tmp;
     973           0 :         tmp = fx1; fx1 = fx2; fx2 = tmp;
     974           0 :         dx = -dx;
     975           0 :         sign = -sign;
     976           0 :         y1 = GRID_Y;
     977           0 :         y2 = 0;
     978             :     }
     979           0 :     dy = y2 - y1;
     980             : 
     981             :     /* Add coverage for all pixels [ix1,ix2] on this row crossed
     982             :      * by the edge. */
     983             :     {
     984             :         struct cell_pair pair;
     985           0 :         struct quorem y = floored_divrem((GRID_X - fx1)*dy, dx);
     986             : 
     987             :         /* When rendering a previous edge on the active list we may
     988             :          * advance the cell list cursor past the leftmost pixel of the
     989             :          * current edge even though the two edges don't intersect.
     990             :          * e.g. consider two edges going down and rightwards:
     991             :          *
     992             :          *  --\_+---\_+-----+-----+----
     993             :          *      \_    \_    |     |
     994             :          *      | \_  | \_  |     |
     995             :          *      |   \_|   \_|     |
     996             :          *      |     \_    \_    |
     997             :          *  ----+-----+-\---+-\---+----
     998             :          *
     999             :          * The left edge touches cells past the starting cell of the
    1000             :          * right edge.  Fortunately such cases are rare.
    1001             :          *
    1002             :          * The rewinding is never necessary if the current edge stays
    1003             :          * within a single column because we've checked before calling
    1004             :          * this function that the active list order won't change. */
    1005           0 :         cell_list_maybe_rewind(cells, ix1);
    1006             : 
    1007           0 :         pair = cell_list_find_pair(cells, ix1, ix1+1);
    1008           0 :         if (unlikely (!pair.cell1 || !pair.cell2))
    1009           0 :             return GLITTER_STATUS_NO_MEMORY;
    1010             : 
    1011           0 :         pair.cell1->uncovered_area += sign*y.quo*(GRID_X + fx1);
    1012           0 :         pair.cell1->covered_height += sign*y.quo;
    1013           0 :         y.quo += y1;
    1014             : 
    1015           0 :         if (ix1+1 < ix2) {
    1016           0 :             struct quorem dydx_full = floored_divrem(GRID_X*dy, dx);
    1017           0 :             struct cell *cell = pair.cell2;
    1018             : 
    1019           0 :             ++ix1;
    1020             :             do {
    1021           0 :                 grid_scaled_y_t y_skip = dydx_full.quo;
    1022           0 :                 y.rem += dydx_full.rem;
    1023           0 :                 if (y.rem >= dx) {
    1024           0 :                     ++y_skip;
    1025           0 :                     y.rem -= dx;
    1026             :                 }
    1027             : 
    1028           0 :                 y.quo += y_skip;
    1029             : 
    1030           0 :                 y_skip *= sign;
    1031           0 :                 cell->uncovered_area += y_skip*GRID_X;
    1032           0 :                 cell->covered_height += y_skip;
    1033             : 
    1034           0 :                 ++ix1;
    1035           0 :                 cell = cell_list_find(cells, ix1);
    1036           0 :                 if (unlikely (NULL == cell))
    1037           0 :                     return GLITTER_STATUS_NO_MEMORY;
    1038           0 :             } while (ix1 != ix2);
    1039             : 
    1040           0 :             pair.cell2 = cell;
    1041             :         }
    1042           0 :         pair.cell2->uncovered_area += sign*(y2 - y.quo)*fx2;
    1043           0 :         pair.cell2->covered_height += sign*(y2 - y.quo);
    1044             :     }
    1045             : 
    1046           0 :     return GLITTER_STATUS_SUCCESS;
    1047             : }
    1048             : 
    1049             : static void
    1050           0 : polygon_init (struct polygon *polygon)
    1051             : {
    1052           0 :     polygon->ymin = polygon->ymax = 0;
    1053           0 :     polygon->xmin = polygon->xmax = 0;
    1054           0 :     polygon->y_buckets = polygon->y_buckets_embedded;
    1055           0 :     pool_init (polygon->edge_pool.base,
    1056             :                8192 - sizeof (struct _pool_chunk),
    1057             :                sizeof (polygon->edge_pool.embedded));
    1058           0 : }
    1059             : 
    1060             : static void
    1061           0 : polygon_fini (struct polygon *polygon)
    1062             : {
    1063           0 :     if (polygon->y_buckets != polygon->y_buckets_embedded)
    1064           0 :         free (polygon->y_buckets);
    1065             : 
    1066           0 :     pool_fini (polygon->edge_pool.base);
    1067           0 : }
    1068             : 
    1069             : /* Empties the polygon of all edges. The polygon is then prepared to
    1070             :  * receive new edges and clip them to the vertical range
    1071             :  * [ymin,ymax). */
    1072             : static glitter_status_t
    1073           0 : polygon_reset (struct polygon *polygon,
    1074             :                grid_scaled_x_t xmin,
    1075             :                grid_scaled_x_t xmax,
    1076             :                grid_scaled_y_t ymin,
    1077             :                grid_scaled_y_t ymax)
    1078             : {
    1079           0 :     unsigned h = ymax - ymin;
    1080           0 :     unsigned num_buckets = EDGE_Y_BUCKET_INDEX(ymax + EDGE_Y_BUCKET_HEIGHT-1,
    1081             :                                                ymin);
    1082             : 
    1083           0 :     pool_reset(polygon->edge_pool.base);
    1084             : 
    1085           0 :     if (unlikely (h > 0x7FFFFFFFU - EDGE_Y_BUCKET_HEIGHT))
    1086           0 :         goto bail_no_mem; /* even if you could, you wouldn't want to. */
    1087             : 
    1088           0 :     if (polygon->y_buckets != polygon->y_buckets_embedded)
    1089           0 :         free (polygon->y_buckets);
    1090             : 
    1091           0 :     polygon->y_buckets =  polygon->y_buckets_embedded;
    1092           0 :     if (num_buckets > ARRAY_LENGTH (polygon->y_buckets_embedded)) {
    1093           0 :         polygon->y_buckets = _cairo_malloc_ab (num_buckets,
    1094             :                                                sizeof (struct bucket));
    1095           0 :         if (unlikely (NULL == polygon->y_buckets))
    1096           0 :             goto bail_no_mem;
    1097             :     }
    1098           0 :     memset (polygon->y_buckets, 0, num_buckets * sizeof (struct bucket));
    1099             : 
    1100           0 :     polygon->ymin = ymin;
    1101           0 :     polygon->ymax = ymax;
    1102           0 :     polygon->xmin = xmin;
    1103           0 :     polygon->xmax = xmax;
    1104           0 :     return GLITTER_STATUS_SUCCESS;
    1105             : 
    1106             :  bail_no_mem:
    1107           0 :     polygon->ymin = 0;
    1108           0 :     polygon->ymax = 0;
    1109           0 :     return GLITTER_STATUS_NO_MEMORY;
    1110             : }
    1111             : 
    1112             : static void
    1113           0 : _polygon_insert_edge_into_its_y_bucket(
    1114             :     struct polygon *polygon,
    1115             :     struct edge *e)
    1116             : {
    1117           0 :     unsigned j = e->ytop - polygon->ymin;
    1118           0 :     unsigned ix = j / EDGE_Y_BUCKET_HEIGHT;
    1119           0 :     unsigned offset = j % EDGE_Y_BUCKET_HEIGHT;
    1120           0 :     struct edge **ptail = &polygon->y_buckets[ix].edges;
    1121           0 :     e->next = *ptail;
    1122           0 :     *ptail = e;
    1123           0 :     polygon->y_buckets[ix].have_inside_edges |= offset;
    1124           0 : }
    1125             : 
    1126             : inline static glitter_status_t
    1127           0 : polygon_add_edge (struct polygon *polygon,
    1128             :                   const cairo_edge_t *edge)
    1129             : {
    1130             :     struct edge *e;
    1131             :     grid_scaled_x_t dx;
    1132             :     grid_scaled_y_t dy;
    1133             :     grid_scaled_y_t ytop, ybot;
    1134           0 :     grid_scaled_y_t ymin = polygon->ymin;
    1135           0 :     grid_scaled_y_t ymax = polygon->ymax;
    1136             : 
    1137           0 :     assert (edge->bottom > edge->top);
    1138             : 
    1139           0 :     if (unlikely (edge->top >= ymax || edge->bottom <= ymin))
    1140           0 :         return GLITTER_STATUS_SUCCESS;
    1141             : 
    1142           0 :     e = pool_alloc (polygon->edge_pool.base, sizeof (struct edge));
    1143           0 :     if (unlikely (NULL == e))
    1144           0 :         return GLITTER_STATUS_NO_MEMORY;
    1145             : 
    1146           0 :     dx = edge->line.p2.x - edge->line.p1.x;
    1147           0 :     dy = edge->line.p2.y - edge->line.p1.y;
    1148           0 :     e->dy = dy;
    1149           0 :     e->dir = edge->dir;
    1150             : 
    1151           0 :     ytop = edge->top >= ymin ? edge->top : ymin;
    1152           0 :     ybot = edge->bottom <= ymax ? edge->bottom : ymax;
    1153           0 :     e->ytop = ytop;
    1154           0 :     e->height_left = ybot - ytop;
    1155             : 
    1156           0 :     if (dx == 0) {
    1157           0 :         e->vertical = TRUE;
    1158           0 :         e->x.quo = edge->line.p1.x;
    1159           0 :         e->x.rem = 0;
    1160           0 :         e->dxdy.quo = 0;
    1161           0 :         e->dxdy.rem = 0;
    1162           0 :         e->dxdy_full.quo = 0;
    1163           0 :         e->dxdy_full.rem = 0;
    1164             : 
    1165             :         /* Drop edges to the right of the clip extents. */
    1166           0 :         if (e->x.quo >= polygon->xmax)
    1167           0 :             return GLITTER_STATUS_SUCCESS;
    1168             : 
    1169             :         /* Offset vertical edges at the left side of the clip extents
    1170             :          * to just shy of the left side.  We depend on this when
    1171             :          * checking for possible intersections within the clip
    1172             :          * rectangle. */
    1173           0 :         if (e->x.quo <= polygon->xmin) {
    1174           0 :             e->x.quo = polygon->xmin - 1;
    1175             :         }
    1176             :     } else {
    1177           0 :         e->vertical = FALSE;
    1178           0 :         e->dxdy = floored_divrem (dx, dy);
    1179           0 :         if (ytop == edge->line.p1.y) {
    1180           0 :             e->x.quo = edge->line.p1.x;
    1181           0 :             e->x.rem = 0;
    1182             :         } else {
    1183           0 :             e->x = floored_muldivrem (ytop - edge->line.p1.y, dx, dy);
    1184           0 :             e->x.quo += edge->line.p1.x;
    1185             :         }
    1186             : 
    1187           0 :         if (e->x.quo >= polygon->xmax && e->dxdy.quo >= 0)
    1188           0 :             return GLITTER_STATUS_SUCCESS;
    1189             : 
    1190           0 :         if (e->height_left >= GRID_Y) {
    1191           0 :             e->dxdy_full = floored_muldivrem (GRID_Y, dx, dy);
    1192             :         } else {
    1193           0 :             e->dxdy_full.quo = 0;
    1194           0 :             e->dxdy_full.rem = 0;
    1195             :         }
    1196             :     }
    1197             : 
    1198           0 :     _polygon_insert_edge_into_its_y_bucket (polygon, e);
    1199             : 
    1200           0 :     e->x.rem -= dy;          /* Bias the remainder for faster
    1201             :                                  * edge advancement. */
    1202           0 :     return GLITTER_STATUS_SUCCESS;
    1203             : }
    1204             : 
    1205             : static void
    1206           0 : active_list_reset (struct active_list *active)
    1207             : {
    1208           0 :     active->head = NULL;
    1209           0 :     active->min_height = 0;
    1210           0 : }
    1211             : 
    1212             : static void
    1213           0 : active_list_init(struct active_list *active)
    1214             : {
    1215           0 :     active_list_reset(active);
    1216           0 : }
    1217             : 
    1218             : /*
    1219             :  * Merge two sorted edge lists.
    1220             :  * Input:
    1221             :  *  - head_a: The head of the first list.
    1222             :  *  - head_b: The head of the second list; head_b cannot be NULL.
    1223             :  * Output:
    1224             :  * Returns the head of the merged list.
    1225             :  *
    1226             :  * Implementation notes:
    1227             :  * To make it fast (in particular, to reduce to an insertion sort whenever
    1228             :  * one of the two input lists only has a single element) we iterate through
    1229             :  * a list until its head becomes greater than the head of the other list,
    1230             :  * then we switch their roles. As soon as one of the two lists is empty, we
    1231             :  * just attach the other one to the current list and exit.
    1232             :  * Writes to memory are only needed to "switch" lists (as it also requires
    1233             :  * attaching to the output list the list which we will be iterating next) and
    1234             :  * to attach the last non-empty list.
    1235             :  */
    1236             : static struct edge *
    1237           0 : merge_sorted_edges (struct edge *head_a, struct edge *head_b)
    1238             : {
    1239             :     struct edge *head, **next;
    1240             : 
    1241           0 :     head = head_a;
    1242           0 :     next = &head;
    1243             : 
    1244             :     while (1) {
    1245           0 :         while (head_a != NULL && head_a->x.quo <= head_b->x.quo) {
    1246           0 :             next = &head_a->next;
    1247           0 :             head_a = head_a->next;
    1248             :         }
    1249             : 
    1250           0 :         *next = head_b;
    1251           0 :         if (head_a == NULL)
    1252           0 :             return head;
    1253             : 
    1254           0 :         while (head_b != NULL && head_b->x.quo <= head_a->x.quo) {
    1255           0 :             next = &head_b->next;
    1256           0 :             head_b = head_b->next;
    1257             :         }
    1258             : 
    1259           0 :         *next = head_a;
    1260           0 :         if (head_b == NULL)
    1261           0 :             return head;
    1262             :     }
    1263             : }
    1264             : 
    1265             : /*
    1266             :  * Sort (part of) a list.
    1267             :  * Input:
    1268             :  *  - list: The list to be sorted; list cannot be NULL.
    1269             :  *  - limit: Recursion limit.
    1270             :  * Output:
    1271             :  *  - head_out: The head of the sorted list containing the first 2^(level+1) elements of the
    1272             :  *              input list; if the input list has fewer elements, head_out be a sorted list
    1273             :  *              containing all the elements of the input list.
    1274             :  * Returns the head of the list of unprocessed elements (NULL if the sorted list contains
    1275             :  * all the elements of the input list).
    1276             :  *
    1277             :  * Implementation notes:
    1278             :  * Special case single element list, unroll/inline the sorting of the first two elements.
    1279             :  * Some tail recursion is used since we iterate on the bottom-up solution of the problem
    1280             :  * (we start with a small sorted list and keep merging other lists of the same size to it).
    1281             :  */
    1282             : static struct edge *
    1283           0 : sort_edges (struct edge  *list,
    1284             :             unsigned int  level,
    1285             :             struct edge **head_out)
    1286             : {
    1287             :     struct edge *head_other, *remaining;
    1288             :     unsigned int i;
    1289             : 
    1290           0 :     head_other = list->next;
    1291             : 
    1292             :     /* Single element list -> return */
    1293           0 :     if (head_other == NULL) {
    1294           0 :         *head_out = list;
    1295           0 :         return NULL;
    1296             :     }
    1297             : 
    1298             :     /* Unroll the first iteration of the following loop (halves the number of calls to merge_sorted_edges):
    1299             :      *  - Initialize remaining to be the list containing the elements after the second in the input list.
    1300             :      *  - Initialize *head_out to be the sorted list containing the first two element.
    1301             :      */
    1302           0 :     remaining = head_other->next;
    1303           0 :     if (list->x.quo <= head_other->x.quo) {
    1304           0 :         *head_out = list;
    1305             :         /* list->next = head_other; */ /* The input list is already like this. */
    1306           0 :         head_other->next = NULL;
    1307             :     } else {
    1308           0 :         *head_out = head_other;
    1309           0 :         head_other->next = list;
    1310           0 :         list->next = NULL;
    1311             :     }
    1312             : 
    1313           0 :     for (i = 0; i < level && remaining; i++) {
    1314             :         /* Extract a sorted list of the same size as *head_out
    1315             :          * (2^(i+1) elements) from the list of remaining elements. */
    1316           0 :         remaining = sort_edges (remaining, i, &head_other);
    1317           0 :         *head_out = merge_sorted_edges (*head_out, head_other);
    1318             :     }
    1319             : 
    1320             :     /* *head_out now contains (at most) 2^(level+1) elements. */
    1321             : 
    1322           0 :     return remaining;
    1323             : }
    1324             : 
    1325             : /* Test if the edges on the active list can be safely advanced by a
    1326             :  * full row without intersections or any edges ending. */
    1327             : inline static int
    1328           0 : active_list_can_step_full_row (struct active_list *active,
    1329             :                                grid_scaled_x_t     xmin)
    1330             : {
    1331             :     const struct edge *e;
    1332           0 :     grid_scaled_x_t prev_x = INT_MIN;
    1333             : 
    1334             :     /* Recomputes the minimum height of all edges on the active
    1335             :      * list if we have been dropping edges. */
    1336           0 :     if (active->min_height <= 0) {
    1337           0 :         int min_height = INT_MAX;
    1338             : 
    1339           0 :         e = active->head;
    1340           0 :         while (NULL != e) {
    1341           0 :             if (e->height_left < min_height)
    1342           0 :                 min_height = e->height_left;
    1343           0 :             e = e->next;
    1344             :         }
    1345             : 
    1346           0 :         active->min_height = min_height;
    1347             :     }
    1348             : 
    1349           0 :     if (active->min_height < GRID_Y)
    1350           0 :         return 0;
    1351             : 
    1352             :     /* Check for intersections as no edges end during the next row. */
    1353           0 :     e = active->head;
    1354           0 :     while (NULL != e) {
    1355           0 :         struct quorem x = e->x;
    1356             : 
    1357           0 :         if (! e->vertical) {
    1358           0 :             x.quo += e->dxdy_full.quo;
    1359           0 :             x.rem += e->dxdy_full.rem;
    1360           0 :             if (x.rem >= 0)
    1361           0 :                 ++x.quo;
    1362             :         }
    1363             : 
    1364             :         /* There's may be an intersection if the edge sort order might
    1365             :          * change. */
    1366           0 :         if (x.quo <= prev_x) {
    1367             :             /* Ignore intersections to the left of the clip extents.
    1368             :              * This assumes that all vertical edges on or at the left
    1369             :              * side of the clip rectangle have been shifted slightly
    1370             :              * to the left in polygon_add_edge(). */
    1371           0 :             if (prev_x >= xmin || x.quo >= xmin || e->x.quo >= xmin)
    1372           0 :                 return 0;
    1373             :         }
    1374             :         else {
    1375           0 :             prev_x = x.quo;
    1376             :         }
    1377           0 :         e = e->next;
    1378             :     }
    1379             : 
    1380           0 :     return 1;
    1381             : }
    1382             : 
    1383             : /* Merges edges on the given subpixel row from the polygon to the
    1384             :  * active_list. */
    1385             : inline static void
    1386           0 : active_list_merge_edges_from_polygon(
    1387             :     struct active_list *active,
    1388             :     grid_scaled_y_t y,
    1389             :     struct polygon *polygon)
    1390             : {
    1391             :     /* Split off the edges on the current subrow and merge them into
    1392             :      * the active list. */
    1393           0 :     unsigned ix = EDGE_Y_BUCKET_INDEX(y, polygon->ymin);
    1394           0 :     int min_height = active->min_height;
    1395           0 :     struct edge *subrow_edges = NULL;
    1396           0 :     struct edge **ptail = &polygon->y_buckets[ix].edges;
    1397             : 
    1398           0 :     while (1) {
    1399           0 :         struct edge *tail = *ptail;
    1400           0 :         if (NULL == tail) break;
    1401             : 
    1402           0 :         if (y == tail->ytop) {
    1403           0 :             *ptail = tail->next;
    1404           0 :             tail->next = subrow_edges;
    1405           0 :             subrow_edges = tail;
    1406           0 :             if (tail->height_left < min_height)
    1407           0 :                 min_height = tail->height_left;
    1408             :         } else {
    1409           0 :             ptail = &tail->next;
    1410             :         }
    1411             :     }
    1412           0 :     if (subrow_edges) {
    1413           0 :         sort_edges (subrow_edges, UINT_MAX, &subrow_edges);
    1414           0 :         active->head = merge_sorted_edges (active->head, subrow_edges);
    1415           0 :         active->min_height = min_height;
    1416             :     }
    1417           0 : }
    1418             : 
    1419             : /* Advance the edges on the active list by one subsample row by
    1420             :  * updating their x positions.  Drop edges from the list that end. */
    1421             : inline static void
    1422           0 : active_list_substep_edges(
    1423             :     struct active_list *active)
    1424             : {
    1425           0 :     struct edge **cursor = &active->head;
    1426           0 :     grid_scaled_x_t prev_x = INT_MIN;
    1427           0 :     struct edge *unsorted = NULL;
    1428             : 
    1429           0 :     while (1) {
    1430             :         struct edge *edge;
    1431             : 
    1432           0 :         UNROLL3({
    1433             :             edge = *cursor;
    1434             :             if (NULL == edge)
    1435             :                 break;
    1436             : 
    1437             :             if (0 != --edge->height_left) {
    1438             :                 edge->x.quo += edge->dxdy.quo;
    1439             :                 edge->x.rem += edge->dxdy.rem;
    1440             :                 if (edge->x.rem >= 0) {
    1441             :                     ++edge->x.quo;
    1442             :                     edge->x.rem -= edge->dy;
    1443             :                 }
    1444             : 
    1445             :                 if (edge->x.quo < prev_x) {
    1446             :                     *cursor = edge->next;
    1447             :                     edge->next = unsorted;
    1448             :                     unsorted = edge;
    1449             :                 } else {
    1450             :                     prev_x = edge->x.quo;
    1451             :                     cursor = &edge->next;
    1452             :                 }
    1453             : 
    1454             :             } else {
    1455             :                 *cursor = edge->next;
    1456             :             }
    1457             :         });
    1458             :     }
    1459             : 
    1460           0 :     if (unsorted) {
    1461           0 :         sort_edges (unsorted, UINT_MAX, &unsorted);
    1462           0 :         active->head = merge_sorted_edges (active->head, unsorted);
    1463             :     }
    1464           0 : }
    1465             : 
    1466             : inline static glitter_status_t
    1467           0 : apply_nonzero_fill_rule_for_subrow (struct active_list *active,
    1468             :                                     struct cell_list *coverages)
    1469             : {
    1470           0 :     struct edge *edge = active->head;
    1471           0 :     int winding = 0;
    1472             :     int xstart;
    1473             :     int xend;
    1474             :     int status;
    1475             : 
    1476           0 :     cell_list_rewind (coverages);
    1477             : 
    1478           0 :     while (NULL != edge) {
    1479           0 :         xstart = edge->x.quo;
    1480           0 :         winding = edge->dir;
    1481             :         while (1) {
    1482           0 :             edge = edge->next;
    1483           0 :             if (NULL == edge)
    1484           0 :                 return cell_list_add_unbounded_subspan (coverages, xstart);
    1485             : 
    1486           0 :             winding += edge->dir;
    1487           0 :             if (0 == winding) {
    1488           0 :                 if (edge->next == NULL || edge->next->x.quo != edge->x.quo)
    1489             :                     break;
    1490             :             }
    1491             :         }
    1492             : 
    1493           0 :         xend = edge->x.quo;
    1494           0 :         status = cell_list_add_subspan (coverages, xstart, xend);
    1495           0 :         if (unlikely (status))
    1496           0 :             return status;
    1497             : 
    1498           0 :         edge = edge->next;
    1499             :     }
    1500             : 
    1501           0 :     return GLITTER_STATUS_SUCCESS;
    1502             : }
    1503             : 
    1504             : static glitter_status_t
    1505           0 : apply_evenodd_fill_rule_for_subrow (struct active_list *active,
    1506             :                                     struct cell_list *coverages)
    1507             : {
    1508           0 :     struct edge *edge = active->head;
    1509             :     int xstart;
    1510             :     int xend;
    1511             :     int status;
    1512             : 
    1513           0 :     cell_list_rewind (coverages);
    1514             : 
    1515           0 :     while (NULL != edge) {
    1516           0 :         xstart = edge->x.quo;
    1517             : 
    1518             :         while (1) {
    1519           0 :             edge = edge->next;
    1520           0 :             if (NULL == edge)
    1521           0 :                 return cell_list_add_unbounded_subspan (coverages, xstart);
    1522             : 
    1523           0 :             if (edge->next == NULL || edge->next->x.quo != edge->x.quo)
    1524             :                 break;
    1525             : 
    1526           0 :             edge = edge->next;
    1527             :         }
    1528             : 
    1529           0 :         xend = edge->x.quo;
    1530           0 :         status = cell_list_add_subspan (coverages, xstart, xend);
    1531           0 :         if (unlikely (status))
    1532           0 :             return status;
    1533             : 
    1534           0 :         edge = edge->next;
    1535             :     }
    1536             : 
    1537           0 :     return GLITTER_STATUS_SUCCESS;
    1538             : }
    1539             : 
    1540             : static glitter_status_t
    1541           0 : apply_nonzero_fill_rule_and_step_edges (struct active_list *active,
    1542             :                                         struct cell_list *coverages)
    1543             : {
    1544           0 :     struct edge **cursor = &active->head;
    1545             :     struct edge *left_edge;
    1546             :     int status;
    1547             : 
    1548           0 :     left_edge = *cursor;
    1549           0 :     while (NULL != left_edge) {
    1550             :         struct edge *right_edge;
    1551           0 :         int winding = left_edge->dir;
    1552             : 
    1553           0 :         left_edge->height_left -= GRID_Y;
    1554           0 :         if (left_edge->height_left)
    1555           0 :             cursor = &left_edge->next;
    1556             :         else
    1557           0 :             *cursor = left_edge->next;
    1558             : 
    1559             :         while (1) {
    1560           0 :             right_edge = *cursor;
    1561           0 :             if (NULL == right_edge)
    1562           0 :                 return cell_list_render_edge (coverages, left_edge, +1);
    1563             : 
    1564           0 :             right_edge->height_left -= GRID_Y;
    1565           0 :             if (right_edge->height_left)
    1566           0 :                 cursor = &right_edge->next;
    1567             :             else
    1568           0 :                 *cursor = right_edge->next;
    1569             : 
    1570           0 :             winding += right_edge->dir;
    1571           0 :             if (0 == winding) {
    1572           0 :                 if (right_edge->next == NULL ||
    1573           0 :                     right_edge->next->x.quo != right_edge->x.quo)
    1574             :                 {
    1575             :                     break;
    1576             :                 }
    1577             :             }
    1578             : 
    1579           0 :             if (! right_edge->vertical) {
    1580           0 :                 right_edge->x.quo += right_edge->dxdy_full.quo;
    1581           0 :                 right_edge->x.rem += right_edge->dxdy_full.rem;
    1582           0 :                 if (right_edge->x.rem >= 0) {
    1583           0 :                     ++right_edge->x.quo;
    1584           0 :                     right_edge->x.rem -= right_edge->dy;
    1585             :                 }
    1586             :             }
    1587             :         }
    1588             : 
    1589           0 :         status = cell_list_render_edge (coverages, left_edge, +1);
    1590           0 :         if (unlikely (status))
    1591           0 :             return status;
    1592             : 
    1593           0 :         status = cell_list_render_edge (coverages, right_edge, -1);
    1594           0 :         if (unlikely (status))
    1595           0 :             return status;
    1596             : 
    1597           0 :         left_edge = *cursor;
    1598             :     }
    1599             : 
    1600           0 :     return GLITTER_STATUS_SUCCESS;
    1601             : }
    1602             : 
    1603             : static glitter_status_t
    1604           0 : apply_evenodd_fill_rule_and_step_edges (struct active_list *active,
    1605             :                                         struct cell_list *coverages)
    1606             : {
    1607           0 :     struct edge **cursor = &active->head;
    1608             :     struct edge *left_edge;
    1609             :     int status;
    1610             : 
    1611           0 :     left_edge = *cursor;
    1612           0 :     while (NULL != left_edge) {
    1613             :         struct edge *right_edge;
    1614           0 :         int winding = left_edge->dir;
    1615             : 
    1616           0 :         left_edge->height_left -= GRID_Y;
    1617           0 :         if (left_edge->height_left)
    1618           0 :             cursor = &left_edge->next;
    1619             :         else
    1620           0 :             *cursor = left_edge->next;
    1621             : 
    1622             :         while (1) {
    1623           0 :             right_edge = *cursor;
    1624           0 :             if (NULL == right_edge)
    1625           0 :                 return cell_list_render_edge (coverages, left_edge, +1);
    1626             : 
    1627           0 :             right_edge->height_left -= GRID_Y;
    1628           0 :             if (right_edge->height_left)
    1629           0 :                 cursor = &right_edge->next;
    1630             :             else
    1631           0 :                 *cursor = right_edge->next;
    1632             : 
    1633           0 :             winding += right_edge->dir;
    1634           0 :             if ((winding & 1) == 0) {
    1635           0 :                 if (right_edge->next == NULL ||
    1636           0 :                     right_edge->next->x.quo != right_edge->x.quo)
    1637             :                 {
    1638             :                     break;
    1639             :                 }
    1640             :             }
    1641             : 
    1642           0 :             if (! right_edge->vertical) {
    1643           0 :                 right_edge->x.quo += right_edge->dxdy_full.quo;
    1644           0 :                 right_edge->x.rem += right_edge->dxdy_full.rem;
    1645           0 :                 if (right_edge->x.rem >= 0) {
    1646           0 :                     ++right_edge->x.quo;
    1647           0 :                     right_edge->x.rem -= right_edge->dy;
    1648             :                 }
    1649             :             }
    1650             :         }
    1651             : 
    1652           0 :         status = cell_list_render_edge (coverages, left_edge, +1);
    1653           0 :         if (unlikely (status))
    1654           0 :             return status;
    1655             : 
    1656           0 :         status = cell_list_render_edge (coverages, right_edge, -1);
    1657           0 :         if (unlikely (status))
    1658           0 :             return status;
    1659             : 
    1660           0 :         left_edge = *cursor;
    1661             :     }
    1662             : 
    1663           0 :     return GLITTER_STATUS_SUCCESS;
    1664             : }
    1665             : 
    1666             : /* If the user hasn't configured a coverage blitter, use a default one
    1667             :  * that blits spans directly to an A8 raster. */
    1668             : #ifndef GLITTER_BLIT_COVERAGES
    1669             : 
    1670             : inline static void
    1671             : blit_span(
    1672             :     unsigned char *row_pixels,
    1673             :     int x, unsigned len,
    1674             :     grid_area_t coverage)
    1675             : {
    1676             :     int alpha = GRID_AREA_TO_ALPHA(coverage);
    1677             :     if (1 == len) {
    1678             :         row_pixels[x] = alpha;
    1679             :     }
    1680             :     else {
    1681             :         memset(row_pixels + x, alpha, len);
    1682             :     }
    1683             : }
    1684             : 
    1685             : #define GLITTER_BLIT_COVERAGES(coverages, y, height, xmin, xmax) \
    1686             :     do { \
    1687             :         int __y = y; \
    1688             :         int __h = height; \
    1689             :         do { \
    1690             :             blit_cells(coverages, raster_pixels + (__y)*raster_stride, xmin, xmax); \
    1691             :         } while (--__h); \
    1692             :     } while (0)
    1693             : 
    1694             : static void
    1695             : blit_cells(
    1696             :     struct cell_list *cells,
    1697             :     unsigned char *row_pixels,
    1698             :     int xmin, int xmax)
    1699             : {
    1700             :     struct cell *cell = cells->head;
    1701             :     int prev_x = xmin;
    1702             :     int coverage = 0;
    1703             :     if (NULL == cell)
    1704             :         return;
    1705             : 
    1706             :     while (NULL != cell && cell->x < xmin) {
    1707             :         coverage += cell->covered_height;
    1708             :         cell = cell->next;
    1709             :     }
    1710             :     coverage *= GRID_X*2;
    1711             : 
    1712             :     for (; NULL != cell; cell = cell->next) {
    1713             :         int x = cell->x;
    1714             :         int area;
    1715             :         if (x >= xmax)
    1716             :             break;
    1717             :         if (x > prev_x && 0 != coverage) {
    1718             :             blit_span(row_pixels, prev_x, x - prev_x, coverage);
    1719             :         }
    1720             : 
    1721             :         coverage += cell->covered_height * GRID_X*2;
    1722             :         area = coverage - cell->uncovered_area;
    1723             :         if (area) {
    1724             :             blit_span(row_pixels, x, 1, area);
    1725             :         }
    1726             :         prev_x = x+1;
    1727             :     }
    1728             : 
    1729             :     if (0 != coverage && prev_x < xmax) {
    1730             :         blit_span(row_pixels, prev_x, xmax - prev_x, coverage);
    1731             :     }
    1732             : }
    1733             : #endif /* GLITTER_BLIT_COVERAGES */
    1734             : 
    1735             : static void
    1736           0 : _glitter_scan_converter_init(glitter_scan_converter_t *converter)
    1737             : {
    1738           0 :     polygon_init(converter->polygon);
    1739           0 :     active_list_init(converter->active);
    1740           0 :     cell_list_init(converter->coverages);
    1741           0 :     converter->xmin=0;
    1742           0 :     converter->ymin=0;
    1743           0 :     converter->xmax=0;
    1744           0 :     converter->ymax=0;
    1745           0 : }
    1746             : 
    1747             : static void
    1748           0 : _glitter_scan_converter_fini(glitter_scan_converter_t *converter)
    1749             : {
    1750           0 :     polygon_fini(converter->polygon);
    1751           0 :     cell_list_fini(converter->coverages);
    1752           0 :     converter->xmin=0;
    1753           0 :     converter->ymin=0;
    1754           0 :     converter->xmax=0;
    1755           0 :     converter->ymax=0;
    1756           0 : }
    1757             : 
    1758             : static grid_scaled_t
    1759           0 : int_to_grid_scaled(int i, int scale)
    1760             : {
    1761             :     /* Clamp to max/min representable scaled number. */
    1762           0 :     if (i >= 0) {
    1763           0 :         if (i >= INT_MAX/scale)
    1764           0 :             i = INT_MAX/scale;
    1765             :     }
    1766             :     else {
    1767           0 :         if (i <= INT_MIN/scale)
    1768           0 :             i = INT_MIN/scale;
    1769             :     }
    1770           0 :     return i*scale;
    1771             : }
    1772             : 
    1773             : #define int_to_grid_scaled_x(x) int_to_grid_scaled((x), GRID_X)
    1774             : #define int_to_grid_scaled_y(x) int_to_grid_scaled((x), GRID_Y)
    1775             : 
    1776             : I glitter_status_t
    1777           0 : glitter_scan_converter_reset(
    1778             :     glitter_scan_converter_t *converter,
    1779             :     int xmin, int ymin,
    1780             :     int xmax, int ymax)
    1781             : {
    1782             :     glitter_status_t status;
    1783             : 
    1784           0 :     converter->xmin = 0; converter->xmax = 0;
    1785           0 :     converter->ymin = 0; converter->ymax = 0;
    1786             : 
    1787           0 :     xmin = int_to_grid_scaled_x(xmin);
    1788           0 :     ymin = int_to_grid_scaled_y(ymin);
    1789           0 :     xmax = int_to_grid_scaled_x(xmax);
    1790           0 :     ymax = int_to_grid_scaled_y(ymax);
    1791             : 
    1792           0 :     active_list_reset(converter->active);
    1793           0 :     cell_list_reset(converter->coverages);
    1794           0 :     status = polygon_reset(converter->polygon, xmin, xmax, ymin, ymax);
    1795           0 :     if (status)
    1796           0 :         return status;
    1797             : 
    1798           0 :     converter->xmin = xmin;
    1799           0 :     converter->xmax = xmax;
    1800           0 :     converter->ymin = ymin;
    1801           0 :     converter->ymax = ymax;
    1802           0 :     return GLITTER_STATUS_SUCCESS;
    1803             : }
    1804             : 
    1805             : /* INPUT_TO_GRID_X/Y (in_coord, out_grid_scaled, grid_scale)
    1806             :  *   These macros convert an input coordinate in the client's
    1807             :  *   device space to the rasterisation grid.
    1808             :  */
    1809             : /* Gah.. this bit of ugly defines INPUT_TO_GRID_X/Y so as to use
    1810             :  * shifts if possible, and something saneish if not.
    1811             :  */
    1812             : #if !defined(INPUT_TO_GRID_Y) && defined(GRID_Y_BITS) && GRID_Y_BITS <= GLITTER_INPUT_BITS
    1813             : #  define INPUT_TO_GRID_Y(in, out) (out) = (in) >> (GLITTER_INPUT_BITS - GRID_Y_BITS)
    1814             : #else
    1815             : #  define INPUT_TO_GRID_Y(in, out) INPUT_TO_GRID_general(in, out, GRID_Y)
    1816             : #endif
    1817             : 
    1818             : #if !defined(INPUT_TO_GRID_X) && defined(GRID_X_BITS) && GRID_X_BITS <= GLITTER_INPUT_BITS
    1819             : #  define INPUT_TO_GRID_X(in, out) (out) = (in) >> (GLITTER_INPUT_BITS - GRID_X_BITS)
    1820             : #else
    1821             : #  define INPUT_TO_GRID_X(in, out) INPUT_TO_GRID_general(in, out, GRID_X)
    1822             : #endif
    1823             : 
    1824             : #define INPUT_TO_GRID_general(in, out, grid_scale) do {         \
    1825             :         long long tmp__ = (long long)(grid_scale) * (in);       \
    1826             :         tmp__ >>= GLITTER_INPUT_BITS;                             \
    1827             :         (out) = tmp__;                                          \
    1828             : } while (0)
    1829             : 
    1830             : I glitter_status_t
    1831           0 : glitter_scan_converter_add_edge (glitter_scan_converter_t *converter,
    1832             :                                  const cairo_edge_t *edge)
    1833             : {
    1834             :     cairo_edge_t e;
    1835             : 
    1836           0 :     INPUT_TO_GRID_Y (edge->top, e.top);
    1837           0 :     INPUT_TO_GRID_Y (edge->bottom, e.bottom);
    1838           0 :     if (e.top >= e.bottom)
    1839           0 :         return GLITTER_STATUS_SUCCESS;
    1840             : 
    1841             :     /* XXX: possible overflows if GRID_X/Y > 2**GLITTER_INPUT_BITS */
    1842           0 :     INPUT_TO_GRID_Y (edge->line.p1.y, e.line.p1.y);
    1843           0 :     INPUT_TO_GRID_Y (edge->line.p2.y, e.line.p2.y);
    1844           0 :     if (e.line.p1.y == e.line.p2.y)
    1845           0 :         return GLITTER_STATUS_SUCCESS;
    1846             : 
    1847           0 :     INPUT_TO_GRID_X (edge->line.p1.x, e.line.p1.x);
    1848           0 :     INPUT_TO_GRID_X (edge->line.p2.x, e.line.p2.x);
    1849             : 
    1850           0 :     e.dir = edge->dir;
    1851             : 
    1852           0 :     return polygon_add_edge (converter->polygon, &e);
    1853             : }
    1854             : 
    1855             : #ifndef GLITTER_BLIT_COVERAGES_BEGIN
    1856             : # define GLITTER_BLIT_COVERAGES_BEGIN
    1857             : #endif
    1858             : 
    1859             : #ifndef GLITTER_BLIT_COVERAGES_END
    1860             : # define GLITTER_BLIT_COVERAGES_END
    1861             : #endif
    1862             : 
    1863             : #ifndef GLITTER_BLIT_COVERAGES_EMPTY
    1864             : # define GLITTER_BLIT_COVERAGES_EMPTY(y0, y1, xmin, xmax)
    1865             : #endif
    1866             : 
    1867             : static cairo_bool_t
    1868           0 : active_list_is_vertical (struct active_list *active)
    1869             : {
    1870             :     struct edge *e;
    1871             : 
    1872           0 :     for (e = active->head; e != NULL; e = e->next) {
    1873           0 :         if (! e->vertical)
    1874           0 :             return FALSE;
    1875             :     }
    1876             : 
    1877           0 :     return TRUE;
    1878             : }
    1879             : 
    1880             : static void
    1881           0 : step_edges (struct active_list *active, int count)
    1882             : {
    1883           0 :     struct edge **cursor = &active->head;
    1884             :     struct edge *edge;
    1885             : 
    1886           0 :     for (edge = *cursor; edge != NULL; edge = *cursor) {
    1887           0 :         edge->height_left -= GRID_Y * count;
    1888           0 :         if (edge->height_left)
    1889           0 :             cursor = &edge->next;
    1890             :         else
    1891           0 :             *cursor = edge->next;
    1892             :     }
    1893           0 : }
    1894             : 
    1895             : I glitter_status_t
    1896           0 : glitter_scan_converter_render(
    1897             :     glitter_scan_converter_t *converter,
    1898             :     int nonzero_fill,
    1899             :     GLITTER_BLIT_COVERAGES_ARGS)
    1900             : {
    1901             :     int i, j;
    1902           0 :     int ymax_i = converter->ymax / GRID_Y;
    1903           0 :     int ymin_i = converter->ymin / GRID_Y;
    1904             :     int xmin_i, xmax_i;
    1905           0 :     grid_scaled_x_t xmin = converter->xmin;
    1906           0 :     int h = ymax_i - ymin_i;
    1907           0 :     struct polygon *polygon = converter->polygon;
    1908           0 :     struct cell_list *coverages = converter->coverages;
    1909           0 :     struct active_list *active = converter->active;
    1910             : 
    1911           0 :     xmin_i = converter->xmin / GRID_X;
    1912           0 :     xmax_i = converter->xmax / GRID_X;
    1913           0 :     if (xmin_i >= xmax_i)
    1914           0 :         return GLITTER_STATUS_SUCCESS;
    1915             : 
    1916             :     /* Let the coverage blitter initialise itself. */
    1917             :     GLITTER_BLIT_COVERAGES_BEGIN;
    1918             : 
    1919             :     /* Render each pixel row. */
    1920           0 :     for (i = 0; i < h; i = j) {
    1921           0 :         int do_full_step = 0;
    1922           0 :         glitter_status_t status = 0;
    1923             : 
    1924           0 :         j = i + 1;
    1925             : 
    1926             :         /* Determine if we can ignore this row or use the full pixel
    1927             :          * stepper. */
    1928           0 :         if (polygon->y_buckets[i].edges == NULL) {
    1929           0 :             if (! active->head) {
    1930           0 :                 for (; j < h && ! polygon->y_buckets[j].edges; j++)
    1931             :                     ;
    1932           0 :                 GLITTER_BLIT_COVERAGES_EMPTY (i+ymin_i, j-i, xmin_i, xmax_i);
    1933           0 :                 continue;
    1934             :             }
    1935           0 :             do_full_step = active_list_can_step_full_row (active, xmin);
    1936             :         }
    1937           0 :         else if (! polygon->y_buckets[i].have_inside_edges) {
    1938           0 :             grid_scaled_y_t y = (i+ymin_i)*GRID_Y;
    1939           0 :             active_list_merge_edges_from_polygon (active, y, polygon);
    1940           0 :             do_full_step = active_list_can_step_full_row (active, xmin);
    1941             :         }
    1942             : 
    1943           0 :         if (do_full_step) {
    1944             :             /* Step by a full pixel row's worth. */
    1945           0 :             if (nonzero_fill) {
    1946           0 :                 status = apply_nonzero_fill_rule_and_step_edges (active,
    1947             :                                                                  coverages);
    1948             :             } else {
    1949           0 :                 status = apply_evenodd_fill_rule_and_step_edges (active,
    1950             :                                                                  coverages);
    1951             :             }
    1952             : 
    1953           0 :             if (active_list_is_vertical (active)) {
    1954           0 :                 while (j < h &&
    1955           0 :                        polygon->y_buckets[j].edges == NULL &&
    1956           0 :                        active->min_height >= 2*GRID_Y)
    1957             :                 {
    1958           0 :                     active->min_height -= GRID_Y;
    1959           0 :                     j++;
    1960             :                 }
    1961           0 :                 if (j != i + 1)
    1962           0 :                     step_edges (active, j - (i + 1));
    1963             :             }
    1964             :         } else {
    1965             :             /* Supersample this row. */
    1966             :             grid_scaled_y_t suby;
    1967           0 :             for (suby = 0; suby < GRID_Y; suby++) {
    1968           0 :                 grid_scaled_y_t y = (i+ymin_i)*GRID_Y + suby;
    1969             : 
    1970           0 :                 active_list_merge_edges_from_polygon (active, y, polygon);
    1971             : 
    1972           0 :                 if (nonzero_fill) {
    1973           0 :                     status |= apply_nonzero_fill_rule_for_subrow (active,
    1974             :                                                                   coverages);
    1975             :                 } else {
    1976           0 :                     status |= apply_evenodd_fill_rule_for_subrow (active,
    1977             :                                                                   coverages);
    1978             :                 }
    1979             : 
    1980           0 :                 active_list_substep_edges(active);
    1981             :             }
    1982             :         }
    1983             : 
    1984           0 :         if (unlikely (status))
    1985           0 :             return status;
    1986             : 
    1987           0 :         GLITTER_BLIT_COVERAGES(coverages, i+ymin_i, j-i, xmin_i, xmax_i);
    1988           0 :         cell_list_reset (coverages);
    1989             : 
    1990           0 :         if (! active->head)
    1991           0 :             active->min_height = INT_MAX;
    1992             :         else
    1993           0 :             active->min_height -= GRID_Y;
    1994             :     }
    1995             : 
    1996             :     /* Clean up the coverage blitter. */
    1997             :     GLITTER_BLIT_COVERAGES_END;
    1998             : 
    1999           0 :     return GLITTER_STATUS_SUCCESS;
    2000             : }
    2001             : 
    2002             : /*-------------------------------------------------------------------------
    2003             :  * cairo specific implementation: the coverage blitter and
    2004             :  * scan converter subclass. */
    2005             : 
    2006             : static glitter_status_t
    2007           0 : blit_with_span_renderer (struct cell_list *cells,
    2008             :                          cairo_span_renderer_t *renderer,
    2009             :                          struct pool *span_pool,
    2010             :                          int y, int height,
    2011             :                          int xmin, int xmax)
    2012             : {
    2013           0 :     struct cell *cell = cells->head;
    2014           0 :     int prev_x = xmin;
    2015           0 :     int cover = 0;
    2016             :     cairo_half_open_span_t *spans;
    2017             :     unsigned num_spans;
    2018             : 
    2019           0 :     if (cell == NULL)
    2020           0 :         return blit_empty_with_span_renderer (renderer, y, height);
    2021             : 
    2022             :     /* Skip cells to the left of the clip region. */
    2023           0 :     while (cell != NULL && cell->x < xmin) {
    2024           0 :         cover += cell->covered_height;
    2025           0 :         cell = cell->next;
    2026             :     }
    2027           0 :     cover *= GRID_X*2;
    2028             : 
    2029             :     /* Count number of cells remaining. */
    2030             :     {
    2031           0 :         struct cell *next = cell;
    2032           0 :         num_spans = 1;
    2033           0 :         while (next != NULL) {
    2034           0 :             next = next->next;
    2035           0 :             ++num_spans;
    2036             :         }
    2037           0 :         num_spans = 2*num_spans;
    2038             :     }
    2039             : 
    2040             :     /* Allocate enough spans for the row. */
    2041           0 :     pool_reset (span_pool);
    2042           0 :     spans = pool_alloc (span_pool, sizeof(spans[0])*num_spans);
    2043           0 :     if (unlikely (spans == NULL))
    2044           0 :         return GLITTER_STATUS_NO_MEMORY;
    2045             : 
    2046           0 :     num_spans = 0;
    2047             : 
    2048             :     /* Form the spans from the coverages and areas. */
    2049           0 :     for (; cell != NULL; cell = cell->next) {
    2050           0 :         int x = cell->x;
    2051             :         int area;
    2052             : 
    2053           0 :         if (x >= xmax)
    2054           0 :             break;
    2055             : 
    2056           0 :         if (x > prev_x) {
    2057           0 :             spans[num_spans].x = prev_x;
    2058           0 :             spans[num_spans].coverage = GRID_AREA_TO_ALPHA (cover);
    2059           0 :             ++num_spans;
    2060             :         }
    2061             : 
    2062           0 :         cover += cell->covered_height*GRID_X*2;
    2063           0 :         area = cover - cell->uncovered_area;
    2064             : 
    2065           0 :         spans[num_spans].x = x;
    2066           0 :         spans[num_spans].coverage = GRID_AREA_TO_ALPHA (area);
    2067           0 :         ++num_spans;
    2068             : 
    2069           0 :         prev_x = x+1;
    2070             :     }
    2071             : 
    2072           0 :     if (prev_x <= xmax) {
    2073           0 :         spans[num_spans].x = prev_x;
    2074           0 :         spans[num_spans].coverage = GRID_AREA_TO_ALPHA (cover);
    2075           0 :         ++num_spans;
    2076             :     }
    2077             : 
    2078           0 :     if (prev_x < xmax && cover) {
    2079           0 :         spans[num_spans].x = xmax;
    2080           0 :         spans[num_spans].coverage = 0;
    2081           0 :         ++num_spans;
    2082             :     }
    2083             : 
    2084             :     /* Dump them into the renderer. */
    2085           0 :     return renderer->render_rows (renderer, y, height, spans, num_spans);
    2086             : }
    2087             : 
    2088             : static glitter_status_t
    2089           0 : blit_empty_with_span_renderer (cairo_span_renderer_t *renderer, int y, int height)
    2090             : {
    2091           0 :     return renderer->render_rows (renderer, y, height, NULL, 0);
    2092             : }
    2093             : 
    2094             : struct _cairo_tor_scan_converter {
    2095             :     cairo_scan_converter_t base;
    2096             : 
    2097             :     glitter_scan_converter_t converter[1];
    2098             :     cairo_fill_rule_t fill_rule;
    2099             : 
    2100             :     struct {
    2101             :         struct pool base[1];
    2102             :         cairo_half_open_span_t embedded[32];
    2103             :     } span_pool;
    2104             : };
    2105             : 
    2106             : typedef struct _cairo_tor_scan_converter cairo_tor_scan_converter_t;
    2107             : 
    2108             : static void
    2109           0 : _cairo_tor_scan_converter_destroy (void *converter)
    2110             : {
    2111           0 :     cairo_tor_scan_converter_t *self = converter;
    2112           0 :     if (self == NULL) {
    2113           0 :         return;
    2114             :     }
    2115           0 :     _glitter_scan_converter_fini (self->converter);
    2116           0 :     pool_fini (self->span_pool.base);
    2117           0 :     free(self);
    2118             : }
    2119             : 
    2120             : static cairo_status_t
    2121           0 : _cairo_tor_scan_converter_add_edge (void                *converter,
    2122             :                                     const cairo_point_t *p1,
    2123             :                                     const cairo_point_t *p2,
    2124             :                                     int top, int bottom,
    2125             :                                     int dir)
    2126             : {
    2127           0 :     cairo_tor_scan_converter_t *self = converter;
    2128             :     cairo_status_t status;
    2129             :     cairo_edge_t edge;
    2130             : 
    2131           0 :     edge.line.p1 = *p1;
    2132           0 :     edge.line.p2 = *p2;
    2133           0 :     edge.top = top;
    2134           0 :     edge.bottom = bottom;
    2135           0 :     edge.dir = dir;
    2136             : 
    2137           0 :     status = glitter_scan_converter_add_edge (self->converter, &edge);
    2138           0 :     if (unlikely (status))
    2139           0 :         return _cairo_scan_converter_set_error (self, _cairo_error (status));
    2140             : 
    2141           0 :     return CAIRO_STATUS_SUCCESS;
    2142             : }
    2143             : 
    2144             : static cairo_status_t
    2145           0 : _cairo_tor_scan_converter_add_polygon (void             *converter,
    2146             :                                        const cairo_polygon_t *polygon)
    2147             : {
    2148           0 :     cairo_tor_scan_converter_t *self = converter;
    2149             :     cairo_status_t status;
    2150             :     int i;
    2151             : 
    2152           0 :     for (i = 0; i < polygon->num_edges; i++) {
    2153           0 :         status = glitter_scan_converter_add_edge (self->converter,
    2154           0 :                                                   &polygon->edges[i]);
    2155           0 :         if (unlikely (status)) {
    2156           0 :             return _cairo_scan_converter_set_error (self,
    2157             :                                                     _cairo_error (status));
    2158             :         }
    2159             :     }
    2160             : 
    2161           0 :     return CAIRO_STATUS_SUCCESS;
    2162             : }
    2163             : 
    2164             : static cairo_status_t
    2165           0 : _cairo_tor_scan_converter_generate (void                        *converter,
    2166             :                                     cairo_span_renderer_t       *renderer)
    2167             : {
    2168           0 :     cairo_tor_scan_converter_t *self = converter;
    2169             :     cairo_status_t status;
    2170             : 
    2171           0 :    status = glitter_scan_converter_render (self->converter,
    2172           0 :                                            self->fill_rule == CAIRO_FILL_RULE_WINDING,
    2173             :                                            renderer,
    2174           0 :                                            self->span_pool.base);
    2175           0 :     if (unlikely (status))
    2176           0 :         return _cairo_scan_converter_set_error (self, _cairo_error (status));
    2177             : 
    2178           0 :     return CAIRO_STATUS_SUCCESS;
    2179             : }
    2180             : 
    2181             : cairo_scan_converter_t *
    2182           0 : _cairo_tor_scan_converter_create (int                   xmin,
    2183             :                                   int                   ymin,
    2184             :                                   int                   xmax,
    2185             :                                   int                   ymax,
    2186             :                                   cairo_fill_rule_t     fill_rule)
    2187             : {
    2188             :     cairo_tor_scan_converter_t *self;
    2189             :     cairo_status_t status;
    2190             : 
    2191           0 :     self = calloc (1, sizeof(struct _cairo_tor_scan_converter));
    2192           0 :     if (unlikely (self == NULL)) {
    2193           0 :         status = _cairo_error (CAIRO_STATUS_NO_MEMORY);
    2194           0 :         goto bail_nomem;
    2195             :     }
    2196             : 
    2197           0 :     self->base.destroy = _cairo_tor_scan_converter_destroy;
    2198           0 :     self->base.add_edge = _cairo_tor_scan_converter_add_edge;
    2199           0 :     self->base.add_polygon = _cairo_tor_scan_converter_add_polygon;
    2200           0 :     self->base.generate = _cairo_tor_scan_converter_generate;
    2201             : 
    2202           0 :     pool_init (self->span_pool.base,
    2203             :               250 * sizeof(self->span_pool.embedded[0]),
    2204             :               sizeof(self->span_pool.embedded));
    2205             : 
    2206           0 :     _glitter_scan_converter_init (self->converter);
    2207           0 :     status = glitter_scan_converter_reset (self->converter,
    2208             :                                            xmin, ymin, xmax, ymax);
    2209           0 :     if (unlikely (status))
    2210           0 :         goto bail;
    2211             : 
    2212           0 :     self->fill_rule = fill_rule;
    2213             : 
    2214           0 :     return &self->base;
    2215             : 
    2216             :  bail:
    2217           0 :     self->base.destroy(&self->base);
    2218             :  bail_nomem:
    2219           0 :     return _cairo_scan_converter_create_in_error (status);
    2220             : }

Generated by: LCOV version 1.13